From 18110206d68b943f4c0709ebc25abe67b7a00a0b Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 16 Mar 2023 22:02:44 -0700 Subject: [PATCH 001/361] Fallback subtitle decision if transcoding subtitles --- plexpy/pmsconnect.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plexpy/pmsconnect.py b/plexpy/pmsconnect.py index ea41e838..7f81ed99 100644 --- a/plexpy/pmsconnect.py +++ b/plexpy/pmsconnect.py @@ -2008,7 +2008,7 @@ class PmsConnect(object): 'stream_subtitle_location': helpers.get_xml_attr(subtitle_stream_info, 'location'), 'stream_subtitle_language': helpers.get_xml_attr(subtitle_stream_info, 'language'), 'stream_subtitle_language_code': helpers.get_xml_attr(subtitle_stream_info, 'languageCode'), - 'stream_subtitle_decision': helpers.get_xml_attr(subtitle_stream_info, 'decision'), + 'stream_subtitle_decision': helpers.get_xml_attr(subtitle_stream_info, 'decision') or transcode_details['subtitle_decision'], 'stream_subtitle_transient': int(helpers.get_xml_attr(subtitle_stream_info, 'transient') == '1') } else: From eb7a4fb4bf012d9b5c77d5721be040550619e6ac Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Wed, 22 Mar 2023 16:48:42 -0700 Subject: [PATCH 002/361] Fix calculating media info file sizes --- plexpy/libraries.py | 162 ++++++++++++++++++++----------------------- plexpy/pmsconnect.py | 22 ++++-- plexpy/webserve.py | 3 + 3 files changed, 94 insertions(+), 93 deletions(-) diff --git a/plexpy/libraries.py b/plexpy/libraries.py index 8042378c..ba30efa9 100644 --- a/plexpy/libraries.py +++ b/plexpy/libraries.py @@ -514,39 +514,8 @@ class Libraries(object): watched_list[str(item[group_by])] = {'last_played': item['last_played'], 'play_count': item['play_count']} - cache_time = None - rows = [] # Import media info cache from json file - if rating_key: - try: - inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key)) - with open(inFilePath, 'r') as inFile: - data = json.load(inFile) - if isinstance(data, dict): - cache_time = data['last_refreshed'] - rows = data['rows'] - else: - rows = data - library_count = len(rows) - except IOError as e: - #logger.debug("Tautulli Libraries :: No JSON file for rating_key %s." % rating_key) - #logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key) - pass - elif section_id: - try: - inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id) - with open(inFilePath, 'r') as inFile: - data = json.load(inFile) - if isinstance(data, dict): - cache_time = data['last_refreshed'] - rows = data['rows'] - else: - rows = data - library_count = len(rows) - except IOError as e: - #logger.debug("Tautulli Libraries :: No JSON file for library section_id %s." % section_id) - #logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id) - pass + cache_time, rows, library_count = self._load_media_info_cache(section_id=section_id, rating_key=rating_key) # If no cache was imported, get all library children items cached_items = {d['rating_key']: d['file_size'] for d in rows} if not refresh else {} @@ -606,22 +575,7 @@ class Libraries(object): return default_return # Cache the media info to a json file - cache_time = helpers.timestamp() - - if rating_key: - try: - outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key)) - with open(outFilePath, 'w') as outFile: - json.dump({'last_refreshed': cache_time, 'rows': rows}, outFile) - except IOError as e: - logger.debug("Tautulli Libraries :: Unable to create cache file for rating_key %s." % rating_key) - elif section_id: - try: - outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id) - with open(outFilePath, 'w') as outFile: - json.dump({'last_refreshed': cache_time, 'rows': rows}, outFile) - except IOError as e: - logger.debug("Tautulli Libraries :: Unable to create cache file for section_id %s." % section_id) + self._save_media_info_cache(section_id=section_id, rating_key=rating_key, rows=rows) # Update the last_played and play_count for item in rows: @@ -707,30 +661,15 @@ class Libraries(object): if library_details['section_type'] == 'photo': return False - rows = [] # Import media info cache from json file - if rating_key: - #logger.debug("Tautulli Libraries :: Getting file sizes for rating_key %s." % rating_key) - try: - inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key)) - with open(inFilePath, 'r') as inFile: - rows = json.load(inFile) - except IOError as e: - #logger.debug("Tautulli Libraries :: No JSON file for rating_key %s." % rating_key) - #logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key) - pass - elif section_id: - logger.debug("Tautulli Libraries :: Getting file sizes for section_id %s." % section_id) - try: - inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id) - with open(inFilePath, 'r') as inFile: - rows = json.load(inFile) - except IOError as e: - #logger.debug("Tautulli Libraries :: No JSON file for library section_id %s." % section_id) - #logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id) - pass + _, rows, _ = self._load_media_info_cache(section_id=section_id, rating_key=rating_key) # Get the total file size for each item + if rating_key: + logger.debug("Tautulli Libraries :: Getting file sizes for rating_key %s." % rating_key) + elif section_id: + logger.debug("Tautulli Libraries :: Fetting file sizes for section_id %s." % section_id) + pms_connect = pmsconnect.PmsConnect() for item in rows: @@ -738,7 +677,9 @@ class Libraries(object): file_size = 0 metadata = pms_connect.get_metadata_children_details(rating_key=item['rating_key'], - get_children=True) + get_children=True, + media_type=item['media_type'], + section_id=section_id) for child_metadata in metadata: ## TODO: Check list of media info items, currently only grabs first item @@ -754,28 +695,77 @@ class Libraries(object): item['file_size'] = file_size # Cache the media info to a json file - if rating_key: - try: - outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key)) - with open(outFilePath, 'w') as outFile: - json.dump(rows, outFile) - except IOError as e: - logger.debug("Tautulli Libraries :: Unable to create cache file with file sizes for rating_key %s." % rating_key) - elif section_id: - try: - outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id) - with open(outFilePath, 'w') as outFile: - json.dump(rows, outFile) - except IOError as e: - logger.debug("Tautulli Libraries :: Unable to create cache file with file sizes for section_id %s." % section_id) + self._save_media_info_cache(section_id=section_id, rating_key=rating_key, rows=rows) if rating_key: - #logger.debug("Tautulli Libraries :: File sizes updated for rating_key %s." % rating_key) - pass + logger.debug("Tautulli Libraries :: File sizes updated for rating_key %s." % rating_key) elif section_id: logger.debug("Tautulli Libraries :: File sizes updated for section_id %s." % section_id) return True + + def _load_media_info_cache(self, section_id=None, rating_key=None): + cache_time = None + rows = [] + library_count = 0 + + # Import media info cache from json file + if rating_key: + try: + inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key)) + with open(inFilePath, 'r') as inFile: + data = json.load(inFile) + if isinstance(data, dict): + cache_time = data['last_refreshed'] + rows = data['rows'] + else: + rows = data + library_count = len(rows) + logger.debug("Tautulli Libraries :: Loaded media info from cache for rating_key %s (%s items)." % (rating_key, library_count)) + except IOError as e: + logger.debug("Tautulli Libraries :: No media info cache for rating_key %s." % rating_key) + + elif section_id: + try: + inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id) + with open(inFilePath, 'r') as inFile: + data = json.load(inFile) + if isinstance(data, dict): + cache_time = data['last_refreshed'] + rows = data['rows'] + else: + rows = data + library_count = len(rows) + logger.debug("Tautulli Libraries :: Loaded media info from cache for section_id %s (%s items)." % (section_id, library_count)) + except IOError as e: + logger.debug("Tautulli Libraries :: No media info cache for section_id %s." % section_id) + + return cache_time, rows, library_count + + def _save_media_info_cache(self, section_id=None, rating_key=None, rows=None): + cache_time = helpers.timestamp() + + if rows is None: + rows = [] + + if rating_key: + try: + outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key)) + with open(outFilePath, 'w') as outFile: + json.dump({'last_refreshed': cache_time, 'rows': rows}, outFile) + logger.debug("Tautulli Libraries :: Saved media info cache for rating_key %s." % rating_key) + except IOError as e: + logger.debug("Tautulli Libraries :: Unable to create cache file for rating_key %s." % rating_key) + + elif section_id: + try: + outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id) + with open(outFilePath, 'w') as outFile: + json.dump({'last_refreshed': cache_time, 'rows': rows}, outFile) + logger.debug("Tautulli Libraries :: Saved media info cache for section_id %s." % section_id) + except IOError as e: + logger.debug("Tautulli Libraries :: Unable to create cache file for section_id %s." % section_id) + def set_config(self, section_id=None, custom_thumb='', custom_art='', do_notify=1, keep_history=1, do_notify_created=1): if section_id: diff --git a/plexpy/pmsconnect.py b/plexpy/pmsconnect.py index 7f81ed99..cf662be4 100644 --- a/plexpy/pmsconnect.py +++ b/plexpy/pmsconnect.py @@ -1617,7 +1617,7 @@ class PmsConnect(object): else: return metadata - def get_metadata_children_details(self, rating_key='', get_children=False): + def get_metadata_children_details(self, rating_key='', get_children=False, media_type=None, section_id=None): """ Return processed and validated metadata list for all children of requested item. @@ -1625,13 +1625,21 @@ class PmsConnect(object): Output: array """ - metadata = self.get_metadata_children(str(rating_key), output_format='xml') + if media_type == 'artist': + sort_type = '&artist.id={}&type=9'.format(rating_key) + xml_head = self.fetch_library_list( + section_id=str(section_id), + sort_type=sort_type, + output_format='xml' + ) + else: + metadata = self.get_metadata_children(str(rating_key), output_format='xml') - try: - xml_head = metadata.getElementsByTagName('MediaContainer') - except Exception as e: - logger.warn("Tautulli Pmsconnect :: Unable to parse XML for get_metadata_children: %s." % e) - return [] + try: + xml_head = metadata.getElementsByTagName('MediaContainer') + except Exception as e: + logger.warn("Tautulli Pmsconnect :: Unable to parse XML for get_metadata_children: %s." % e) + return [] metadata_list = [] diff --git a/plexpy/webserve.py b/plexpy/webserve.py index c71addb1..7234d9d9 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -931,6 +931,9 @@ class WebInterface(object): section_ids = set(get_file_sizes_hold['section_ids']) rating_keys = set(get_file_sizes_hold['rating_keys']) + section_id = helpers.cast_to_int(section_id) + rating_key = helpers.cast_to_int(rating_key) + if (section_id and section_id not in section_ids) or (rating_key and rating_key not in rating_keys): if section_id: section_ids.add(section_id) From fa8b51bfd9af683c87f61710c6bf420c8ff5a6fe Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 13 Apr 2023 14:46:30 -0700 Subject: [PATCH 003/361] Add .vscode to .gitignore --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index 68d5abaf..1e54132b 100644 --- a/.gitignore +++ b/.gitignore @@ -53,6 +53,9 @@ Thumbs.db #Ignore files generated by PyCharm *.idea/* +#Ignore files generated by VSCode +*.vscode/* + #Ignore files generated by vi *.swp From 2921c1fc305483cf0d016112d7f88fd39e3f141b Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 13 Apr 2023 14:48:19 -0700 Subject: [PATCH 004/361] Fix live tv thumb and art for top libraries statistics card --- data/interfaces/default/home_stats.html | 5 +++-- data/interfaces/default/index.html | 2 +- plexpy/datafactory.py | 2 ++ 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/data/interfaces/default/home_stats.html b/data/interfaces/default/home_stats.html index df2b7b69..08f08b5a 100644 --- a/data/interfaces/default/home_stats.html +++ b/data/interfaces/default/home_stats.html @@ -77,7 +77,8 @@ DOCUMENTATION :: END <% fallback = 'art-live' if row0['live'] else 'art' %>
% elif stat_id == 'top_libraries': -
+ <% fallback = 'art-live' if row0['live'] else row0['library_art'] %> +
% elif stat_id == 'top_users':
% elif stat_id == 'top_platforms': @@ -110,7 +111,7 @@ DOCUMENTATION :: END
% elif stat_id == 'top_libraries': % if row0['thumb'].startswith('http'): - + % else: % endif diff --git a/data/interfaces/default/index.html b/data/interfaces/default/index.html index 88d973da..7ad9da3b 100644 --- a/data/interfaces/default/index.html +++ b/data/interfaces/default/index.html @@ -809,7 +809,7 @@ if (stat_id === 'most_concurrent') { return } else if (stat_id === 'top_libraries') { - $('#stats-background-' + stat_id).css('background-image', 'url(' + page('pms_image_proxy', art || library_art, null, 500, 280, 40, '282828', 3, library_art || fallback_art) + ')'); + $('#stats-background-' + stat_id).css('background-image', 'url(' + page('pms_image_proxy', art || library_art, null, 500, 280, 40, '282828', 3, fallback_art) + ')'); $('#stats-thumb-' + stat_id).removeClass(function (index, className) { return (className.match (/(^|\s)svg-icon library-\S+/g) || []).join(' ')}); if (thumb.startsWith('http')) { diff --git a/plexpy/datafactory.py b/plexpy/datafactory.py index 8eb1d39c..62a4d9de 100644 --- a/plexpy/datafactory.py +++ b/plexpy/datafactory.py @@ -743,6 +743,8 @@ class DataFactory(object): if item['custom_art'] and item['custom_art'] != item['library_art']: library_art = item['custom_art'] + elif item['library_art'] == common.DEFAULT_LIVE_TV_ART_FULL: + library_art = common.DEFAULT_LIVE_TV_ART else: library_art = item['library_art'] From b0921b5f4a7faf61882586f557ee51185fda6af5 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 13 Apr 2023 15:17:22 -0700 Subject: [PATCH 005/361] Fix history table sorting by play duration --- data/interfaces/default/history.html | 2 +- data/interfaces/default/history_table_modal.html | 2 +- data/interfaces/default/info.html | 2 +- data/interfaces/default/js/tables/history_table.js | 4 ++-- data/interfaces/default/library.html | 2 +- data/interfaces/default/user.html | 2 +- plexpy/datafactory.py | 3 ++- 7 files changed, 9 insertions(+), 8 deletions(-) diff --git a/data/interfaces/default/history.html b/data/interfaces/default/history.html index cf15c87f..327b99b7 100644 --- a/data/interfaces/default/history.html +++ b/data/interfaces/default/history.html @@ -84,7 +84,7 @@ Started Paused Stopped - Duration + Duration diff --git a/data/interfaces/default/history_table_modal.html b/data/interfaces/default/history_table_modal.html index 8ed42bf5..b4066627 100644 --- a/data/interfaces/default/history_table_modal.html +++ b/data/interfaces/default/history_table_modal.html @@ -32,7 +32,7 @@ Started Paused Stopped - Duration + Duration diff --git a/data/interfaces/default/info.html b/data/interfaces/default/info.html index 43e57694..87c8f9d8 100644 --- a/data/interfaces/default/info.html +++ b/data/interfaces/default/info.html @@ -692,7 +692,7 @@ DOCUMENTATION :: END Started Paused Stopped - Duration + Duration diff --git a/data/interfaces/default/js/tables/history_table.js b/data/interfaces/default/js/tables/history_table.js index deefa067..d209f90e 100644 --- a/data/interfaces/default/js/tables/history_table.js +++ b/data/interfaces/default/js/tables/history_table.js @@ -247,7 +247,7 @@ history_table_options = { }, { "targets": [11], - "data": "duration", + "data": "play_duration", "render": function (data, type, full) { if (data !== null) { return Math.round(moment.duration(data, 'seconds').as('minutes')) + ' mins'; @@ -529,7 +529,7 @@ function childTableFormat(rowData) { 'Started' + 'Paused' + 'Stopped' + - 'Duration' + + 'Duration' + '' + '' + '' + diff --git a/data/interfaces/default/library.html b/data/interfaces/default/library.html index 59de6497..b1fe8b6f 100644 --- a/data/interfaces/default/library.html +++ b/data/interfaces/default/library.html @@ -248,7 +248,7 @@ DOCUMENTATION :: END Started Paused Stopped - Duration + Duration diff --git a/data/interfaces/default/user.html b/data/interfaces/default/user.html index 501c68ca..61dfdcb5 100644 --- a/data/interfaces/default/user.html +++ b/data/interfaces/default/user.html @@ -212,7 +212,7 @@ DOCUMENTATION :: END Started Paused Stopped - Duration + Duration diff --git a/plexpy/datafactory.py b/plexpy/datafactory.py index 62a4d9de..e2ee0c2b 100644 --- a/plexpy/datafactory.py +++ b/plexpy/datafactory.py @@ -308,7 +308,8 @@ class DataFactory(object): 'date': item['date'], 'started': item['started'], 'stopped': item['stopped'], - 'duration': item['play_duration'], + 'duration': item['play_duration'], # Keep for backwards compatibility + 'play_duration': item['play_duration'], 'paused_counter': item['paused_counter'], 'user_id': item['user_id'], 'user': item['user'], From 07715c6a493d983852cf4aab27dd6391005ac816 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 13 Apr 2023 15:17:41 -0700 Subject: [PATCH 006/361] Rename API get_history response duration to play_duration --- plexpy/webserve.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plexpy/webserve.py b/plexpy/webserve.py index 7234d9d9..57921b61 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -1920,7 +1920,6 @@ class WebInterface(object): "filter_duration": "10 hrs 12 mins", "data": [{"date": 1462687607, - "duration": 263, "friendly_name": "Mother of Dragons", "full_title": "Game of Thrones - The Red Woman", "grandparent_rating_key": 351, @@ -1942,6 +1941,7 @@ class WebInterface(object): "paused_counter": 0, "percent_complete": 84, "platform": "Windows", + "play_duration": 263, "product": "Plex for Windows", "player": "Castle-PC", "rating_key": 4348, From 14648a46045688f27d165529158539d1ea1431a5 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 13 Apr 2023 21:48:31 -0700 Subject: [PATCH 007/361] Fix live tv thumb hover on top libraries statistics card --- data/interfaces/default/home_stats.html | 7 ++++--- data/interfaces/default/index.html | 5 +++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/data/interfaces/default/home_stats.html b/data/interfaces/default/home_stats.html index 08f08b5a..d66dd1d4 100644 --- a/data/interfaces/default/home_stats.html +++ b/data/interfaces/default/home_stats.html @@ -110,8 +110,8 @@ DOCUMENTATION :: END
% elif stat_id == 'top_libraries': - % if row0['thumb'].startswith('http'): - + % if row0['library_thumb'].startswith('http'): + % else: % endif @@ -148,7 +148,8 @@ DOCUMENTATION :: END data-rating_key="${row.get('rating_key')}" data-grandparent_rating_key="${row.get('grandparent_rating_key')}" data-guid="${row.get('guid')}" data-title="${row.get('title')}" data-art="${row.get('art')}" data-thumb="${row.get('thumb')}" data-platform="${row.get('platform_name')}" data-library-type="${row.get('section_type')}" data-user_id="${row.get('user_id')}" data-user="${row.get('user')}" data-friendly_name="${row.get('friendly_name')}" data-user_thumb="${row.get('user_thumb')}" - data-last_watch="${row.get('last_watch')}" data-started="${row.get('started')}" data-live="${row.get('live')}" data-library_art="${row.get('library_art', '')}"> + data-last_watch="${row.get('last_watch')}" data-started="${row.get('started')}" data-live="${row.get('live')}" + data-library_art="${row.get('library_art', '')}" data-library_thumb="${row.get('library_thumb', '')}">
${loop.index + 1}
% if stat_id in ('top_movies', 'popular_movies', 'top_tv', 'popular_tv', 'top_music', 'popular_music', 'last_watched'): diff --git a/data/interfaces/default/index.html b/data/interfaces/default/index.html index 7ad9da3b..57236e69 100644 --- a/data/interfaces/default/index.html +++ b/data/interfaces/default/index.html @@ -798,6 +798,7 @@ var guid = $(elem).data('guid'); var live = $(elem).data('live'); var library_art = $(elem).data('library_art'); + var library_thumb = $(elem).data('library_thumb'); var [height, fallback_poster, fallback_art] = [450, 'poster', 'art']; if ($.inArray(stat_id, ['top_music', 'popular_music']) > -1) { [height, fallback_poster, fallback_art] = [300, 'cover', 'art']; @@ -812,8 +813,8 @@ $('#stats-background-' + stat_id).css('background-image', 'url(' + page('pms_image_proxy', art || library_art, null, 500, 280, 40, '282828', 3, fallback_art) + ')'); $('#stats-thumb-' + stat_id).removeClass(function (index, className) { return (className.match (/(^|\s)svg-icon library-\S+/g) || []).join(' ')}); - if (thumb.startsWith('http')) { - $('#stats-thumb-' + stat_id).css('background-image', 'url(' + page('pms_image_proxy', thumb, null, 300, 300, null, null, null, 'cover') + ')'); + if (library_thumb.startsWith('http')) { + $('#stats-thumb-' + stat_id).css('background-image', 'url(' + page('pms_image_proxy', library_thumb, null, 100, 100, null, null, null, 'cover') + ')'); } else { $('#stats-thumb-' + stat_id).css('background-image', '') .addClass('svg-icon library-' + library_type); From 99afb7392b833d754c5efaa1188552d51b2cc391 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 14 Apr 2023 11:29:53 -0700 Subject: [PATCH 008/361] Use separate log file for script PlexAPI --- plexpy/notifiers.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py index ecd33a60..c7580603 100644 --- a/plexpy/notifiers.py +++ b/plexpy/notifiers.py @@ -3431,7 +3431,8 @@ class SCRIPTS(Notifier): 'TAUTULLI_PUBLIC_URL': plexpy.CONFIG.HTTP_BASE_URL + plexpy.HTTP_ROOT, 'TAUTULLI_APIKEY': plexpy.CONFIG.API_KEY, 'TAUTULLI_ENCODING': plexpy.SYS_ENCODING, - 'TAUTULLI_PYTHON_VERSION': common.PYTHON_VERSION + 'TAUTULLI_PYTHON_VERSION': common.PYTHON_VERSION, + 'PLEXAPI_LOG_PATH': os.path.join(plexpy.CONFIG.LOG_DIR, 'plexapi_script.log') } if user_id: From e9b1db139e96f0d82ff7d047f3a91d5b45a69f45 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 14 Apr 2023 11:50:55 -0700 Subject: [PATCH 009/361] v2.12.3 --- CHANGELOG.md | 15 +++++++++++++++ plexpy/version.py | 2 +- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c8463420..b3c6c4a1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,20 @@ # Changelog +## v2.12.3 (2023-04-14) + +* Activity: + * Fix: Incorrect subtitle decision shown when subtitles are transcoded. +* History: + * Fix: Incorrect order when sorting by the duration column in the history tables. +* Notifications: + * Fix: Logging error when running scripts that use PlexAPI. +* UI: + * Fix: Calculate file sizes setting causing the media info table to fail to load. + * Fix: Incorrect artwork and thumbnail shown for Live TV on the Most Active Libraries statistics card. +* API: + * Change: Renamed duration to play_duration in the get_history API response. (Note: duration kept for backwards compatibility.) + + ## v2.12.2 (2023-03-16) * Other: diff --git a/plexpy/version.py b/plexpy/version.py index 7180028b..47ba56cb 100644 --- a/plexpy/version.py +++ b/plexpy/version.py @@ -18,4 +18,4 @@ from __future__ import unicode_literals PLEXPY_BRANCH = "master" -PLEXPY_RELEASE_VERSION = "v2.12.2" \ No newline at end of file +PLEXPY_RELEASE_VERSION = "v2.12.3" \ No newline at end of file From 3b3c59c4bb8e58d76df01348151d869ee7ca1105 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Mon, 17 Apr 2023 12:50:52 -0700 Subject: [PATCH 010/361] Set view offset equal to duration if stopped within the last 10 sec * Plex reports the view offset every 10 seconds, so the view offset at the end of stream can be short by up to 10 seconds. --- plexpy/activity_handler.py | 9 +++++++-- plexpy/notification_handler.py | 24 +++++++++++++++--------- 2 files changed, 22 insertions(+), 11 deletions(-) diff --git a/plexpy/activity_handler.py b/plexpy/activity_handler.py index 851372d5..7bfbe8fb 100644 --- a/plexpy/activity_handler.py +++ b/plexpy/activity_handler.py @@ -108,7 +108,6 @@ class ActivityHandler(object): self.ap.write_session(session=self.session, notify=notify) self.set_session_state() - self.get_db_session() def set_session_state(self, view_offset=None): self.ap.set_session_state( @@ -117,6 +116,7 @@ class ActivityHandler(object): view_offset=view_offset or self.view_offset, stopped=helpers.timestamp() ) + self.get_db_session() def put_notification(self, notify_action, **kwargs): notification = {'stream_data': self.db_session.copy(), 'notify_action': notify_action} @@ -162,7 +162,12 @@ class ActivityHandler(object): # Update the session state and viewOffset # Set force_stop to true to disable the state set if not force_stop: - self.set_session_state() + # Set the view offset equal to the duration if it is within the last 10 seconds + if self.db_session['duration'] - self.view_offset <= 10000: + view_offset = self.db_session['duration'] + else: + view_offset = self.view_offset + self.set_session_state(view_offset=view_offset) # Write it to the history table row_id = self.ap.write_session_history(session=self.db_session) diff --git a/plexpy/notification_handler.py b/plexpy/notification_handler.py index 86baac4d..bc8d39c5 100644 --- a/plexpy/notification_handler.py +++ b/plexpy/notification_handler.py @@ -550,7 +550,13 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m if session: # Reload json from raw stream info if session.get('raw_stream_info'): - session.update(json.loads(session['raw_stream_info'])) + raw_stream_info = json.loads(session['raw_stream_info']) + # Don't overwrite id, session_key, stopped, view_offset + raw_stream_info.pop('id', None) + raw_stream_info.pop('session_key', None) + raw_stream_info.pop('stopped', None) + raw_stream_info.pop('view_offset', None) + session.update(raw_stream_info) notify_params.update(session) if timeline: @@ -638,13 +644,13 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m stream_duration_sec = 0 stream_duration = 0 - view_offset_sec = helpers.convert_milliseconds_to_seconds(session.get('view_offset', 0)) + progress_duration_sec = helpers.convert_milliseconds_to_seconds(session.get('view_offset', 0)) duration_sec = helpers.convert_milliseconds_to_seconds(notify_params['duration']) - remaining_duration_sec = duration_sec - view_offset_sec + remaining_duration_sec = duration_sec - progress_duration_sec - view_offset = helpers.seconds_to_minutes(view_offset_sec) + progress_duration = helpers.seconds_to_minutes(progress_duration_sec) duration = helpers.seconds_to_minutes(duration_sec) - remaining_duration = duration - view_offset + remaining_duration = duration - progress_duration # Build Plex URL if notify_params['media_type'] == 'track': @@ -1005,10 +1011,10 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m 'remaining_duration': remaining_duration, 'remaining_duration_sec': remaining_duration_sec, 'remaining_time': arrow.get(remaining_duration_sec).format(duration_format), - 'progress_duration': view_offset, - 'progress_duration_sec': view_offset_sec, - 'progress_time': arrow.get(view_offset_sec).format(duration_format), - 'progress_percent': helpers.get_percent(view_offset_sec, duration_sec), + 'progress_duration': progress_duration, + 'progress_duration_sec': progress_duration_sec, + 'progress_time': arrow.get(progress_duration_sec).format(duration_format), + 'progress_percent': helpers.get_percent(progress_duration_sec, duration_sec), 'view_offset': session.get('view_offset', 0), 'initial_stream': notify_params['initial_stream'], 'transcode_decision': transcode_decision, From fe4fba353e600d45ec30d381f60b68afdc67f8b5 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 20 Apr 2023 08:50:41 -0700 Subject: [PATCH 011/361] Catch KeyError on import db version --- plexpy/database.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plexpy/database.py b/plexpy/database.py index 3b56f33b..72ce7782 100644 --- a/plexpy/database.py +++ b/plexpy/database.py @@ -98,7 +98,7 @@ def import_tautulli_db(database=None, method=None, backup=False): try: version_info = db.select_single('SELECT * FROM import_db.version_info WHERE key = "version"') import_db_version = version_info['value'] - except sqlite3.OperationalError: + except (sqlite3.OperationalError, KeyError): import_db_version = 'v2.6.10' logger.info("Tautulli Database :: Import Tautulli database version: %s", import_db_version) From 3a1d6322aebfe63bb0160b4c85b3ecc785ce2cba Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 5 May 2023 15:56:32 -0700 Subject: [PATCH 012/361] Add return ID for async API calls * export_id, notification_id, and newsletter_notification_d --- plexpy/api2.py | 18 ++++++++++-------- plexpy/exporter.py | 2 +- plexpy/notification_handler.py | 2 +- plexpy/webserve.py | 8 +++----- 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/plexpy/api2.py b/plexpy/api2.py index 4d9efbdc..eefaa9fd 100644 --- a/plexpy/api2.py +++ b/plexpy/api2.py @@ -505,7 +505,8 @@ class API2(object): script_args (str): The arguments for script notifications Returns: - None + json: + {"notification_id": 1} ``` """ if not notifier_id: @@ -527,14 +528,14 @@ class API2(object): body=body, **kwargs) - if success: + if isinstance(success, int): self._api_msg = 'Notification sent.' self._api_result_type = 'success' + return {'notification_id': success} else: self._api_msg = 'Notification failed.' self._api_result_type = 'error' - - return + return def notify_newsletter(self, newsletter_id='', subject='', body='', message='', **kwargs): """ Send a newsletter using Tautulli. @@ -549,7 +550,8 @@ class API2(object): message (str): The message of the newsletter Returns: - None + json: + {"newsletter_notification_id": 1} ``` """ if not newsletter_id: @@ -572,14 +574,14 @@ class API2(object): message=message, **kwargs) - if success: + if isinstance(success, int): self._api_msg = 'Newsletter sent.' self._api_result_type = 'success' + return {'newsletter_notification_id': success} else: self._api_msg = 'Newsletter failed.' self._api_result_type = 'error' - - return + return def _api_make_md(self): """ Tries to make a API.md to simplify the api docs. """ diff --git a/plexpy/exporter.py b/plexpy/exporter.py index 9bcb5d06..908aced7 100644 --- a/plexpy/exporter.py +++ b/plexpy/exporter.py @@ -1837,7 +1837,7 @@ class Export(object): threading.Thread(target=self._real_export).start() - return True + return self.export_id def add_export(self): keys = { diff --git a/plexpy/notification_handler.py b/plexpy/notification_handler.py index bc8d39c5..2171d2bd 100644 --- a/plexpy/notification_handler.py +++ b/plexpy/notification_handler.py @@ -438,7 +438,7 @@ def notify(notifier_id=None, notify_action=None, stream_data=None, timeline_data if success: set_notify_success(notification_id) - return True + return notification_id def get_notify_state(session): diff --git a/plexpy/webserve.py b/plexpy/webserve.py index 57921b61..88b65174 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -7057,9 +7057,7 @@ class WebInterface(object): Returns: json: - {"result": "success", - "message": "Metadata export has started." - } + {"export_id": 1} ``` """ individual_files = helpers.bool_true(individual_files) @@ -7075,8 +7073,8 @@ class WebInterface(object): export_type=export_type, individual_files=individual_files).export() - if result is True: - return {'result': 'success', 'message': 'Metadata export has started.'} + if isinstance(result, int): + return {'result': 'success', 'message': 'Metadata export has started.', 'export_id': result} else: return {'result': 'error', 'message': result} From f39b9f908701feed62a22c30fdd558f5b3b3c979 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Mon, 15 May 2023 11:03:26 -0700 Subject: [PATCH 013/361] Fix SQLite Double-Quoted Strings (#2057) * Fix __init__.py * Fix activity_pinger.py * Fix activity_processor.py * Fix database.py * Fix datafactory.py * Fix exporter.py * Fix graphs.py * Fix libraries.py * Fix mobile_app.py * Fix newsletter_handler.py * Fix newsletters.py * Fix notification_handler.py * Fix notifiers.py * Fix plexivity_import.py * Fix plexwatch_import.py * Fix users.py * Fix webauth.py --- plexpy/__init__.py | 1519 ++++++++++++++++---------------- plexpy/activity_pinger.py | 30 +- plexpy/activity_processor.py | 78 +- plexpy/database.py | 74 +- plexpy/datafactory.py | 1104 +++++++++++------------ plexpy/exporter.py | 52 +- plexpy/graphs.py | 810 ++++++++--------- plexpy/libraries.py | 218 ++--- plexpy/mobile_app.py | 24 +- plexpy/newsletter_handler.py | 18 +- plexpy/newsletters.py | 18 +- plexpy/notification_handler.py | 54 +- plexpy/notifiers.py | 14 +- plexpy/plexivity_import.py | 48 +- plexpy/plexwatch_import.py | 52 +- plexpy/users.py | 310 +++---- plexpy/webauth.py | 14 +- 17 files changed, 2220 insertions(+), 2217 deletions(-) diff --git a/plexpy/__init__.py b/plexpy/__init__.py index 2a769374..eb1f73c2 100644 --- a/plexpy/__init__.py +++ b/plexpy/__init__.py @@ -242,6 +242,9 @@ def initialize(config_file): logger.info("Python {}".format( sys.version.replace('\n', '') )) + logger.info("SQLite {}".format( + sqlite3.sqlite_version + )) logger.info("Program Dir: {}".format( PROG_DIR @@ -618,1646 +621,1646 @@ def dbcheck(): # schema table :: This is a table which keeps track of the database version c_db.execute( - 'CREATE TABLE IF NOT EXISTS version_info (key TEXT UNIQUE, value TEXT)' + "CREATE TABLE IF NOT EXISTS version_info (key TEXT UNIQUE, value TEXT)" ) # sessions table :: This is a temp table that logs currently active sessions c_db.execute( - 'CREATE TABLE IF NOT EXISTS sessions (id INTEGER PRIMARY KEY AUTOINCREMENT, session_key INTEGER, session_id TEXT, ' - 'transcode_key TEXT, rating_key INTEGER, section_id INTEGER, media_type TEXT, started INTEGER, stopped INTEGER, ' - 'paused_counter INTEGER DEFAULT 0, state TEXT, user_id INTEGER, user TEXT, friendly_name TEXT, ' - 'ip_address TEXT, machine_id TEXT, bandwidth INTEGER, location TEXT, player TEXT, product TEXT, platform TEXT, ' - 'title TEXT, parent_title TEXT, grandparent_title TEXT, original_title TEXT, full_title TEXT, ' - 'media_index INTEGER, parent_media_index INTEGER, ' - 'thumb TEXT, parent_thumb TEXT, grandparent_thumb TEXT, year INTEGER, ' - 'parent_rating_key INTEGER, grandparent_rating_key INTEGER, ' - 'originally_available_at TEXT, added_at INTEGER, guid TEXT, ' - 'view_offset INTEGER DEFAULT 0, duration INTEGER, video_decision TEXT, audio_decision TEXT, ' - 'transcode_decision TEXT, container TEXT, bitrate INTEGER, width INTEGER, height INTEGER, ' - 'video_codec TEXT, video_bitrate INTEGER, video_resolution TEXT, video_width INTEGER, video_height INTEGER, ' - 'video_framerate TEXT, video_scan_type TEXT, video_full_resolution TEXT, ' - 'video_dynamic_range TEXT, aspect_ratio TEXT, ' - 'audio_codec TEXT, audio_bitrate INTEGER, audio_channels INTEGER, audio_language TEXT, audio_language_code TEXT, ' - 'subtitle_codec TEXT, subtitle_forced INTEGER, subtitle_language TEXT, ' - 'stream_bitrate INTEGER, stream_video_resolution TEXT, quality_profile TEXT, ' - 'stream_container_decision TEXT, stream_container TEXT, ' - 'stream_video_decision TEXT, stream_video_codec TEXT, stream_video_bitrate INTEGER, stream_video_width INTEGER, ' - 'stream_video_height INTEGER, stream_video_framerate TEXT, stream_video_scan_type TEXT, stream_video_full_resolution TEXT, ' - 'stream_video_dynamic_range TEXT, ' - 'stream_audio_decision TEXT, stream_audio_codec TEXT, stream_audio_bitrate INTEGER, stream_audio_channels INTEGER, ' - 'stream_audio_language TEXT, stream_audio_language_code TEXT, ' - 'subtitles INTEGER, stream_subtitle_decision TEXT, stream_subtitle_codec TEXT, ' - 'stream_subtitle_forced INTEGER, stream_subtitle_language TEXT, ' - 'transcode_protocol TEXT, transcode_container TEXT, ' - 'transcode_video_codec TEXT, transcode_audio_codec TEXT, transcode_audio_channels INTEGER,' - 'transcode_width INTEGER, transcode_height INTEGER, ' - 'transcode_hw_decoding INTEGER, transcode_hw_encoding INTEGER, ' - 'optimized_version INTEGER, optimized_version_profile TEXT, optimized_version_title TEXT, ' - 'synced_version INTEGER, synced_version_profile TEXT, ' - 'live INTEGER, live_uuid TEXT, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT, ' - 'secure INTEGER, relayed INTEGER, ' - 'buffer_count INTEGER DEFAULT 0, buffer_last_triggered INTEGER, last_paused INTEGER, watched INTEGER DEFAULT 0, ' - 'intro INTEGER DEFAULT 0, credits INTEGER DEFAULT 0, commercial INTEGER DEFAULT 0, marker INTEGER DEFAULT 0, ' - 'initial_stream INTEGER DEFAULT 1, write_attempts INTEGER DEFAULT 0, raw_stream_info TEXT, ' - 'rating_key_websocket TEXT)' + "CREATE TABLE IF NOT EXISTS sessions (id INTEGER PRIMARY KEY AUTOINCREMENT, session_key INTEGER, session_id TEXT, " + "transcode_key TEXT, rating_key INTEGER, section_id INTEGER, media_type TEXT, started INTEGER, stopped INTEGER, " + "paused_counter INTEGER DEFAULT 0, state TEXT, user_id INTEGER, user TEXT, friendly_name TEXT, " + "ip_address TEXT, machine_id TEXT, bandwidth INTEGER, location TEXT, player TEXT, product TEXT, platform TEXT, " + "title TEXT, parent_title TEXT, grandparent_title TEXT, original_title TEXT, full_title TEXT, " + "media_index INTEGER, parent_media_index INTEGER, " + "thumb TEXT, parent_thumb TEXT, grandparent_thumb TEXT, year INTEGER, " + "parent_rating_key INTEGER, grandparent_rating_key INTEGER, " + "originally_available_at TEXT, added_at INTEGER, guid TEXT, " + "view_offset INTEGER DEFAULT 0, duration INTEGER, video_decision TEXT, audio_decision TEXT, " + "transcode_decision TEXT, container TEXT, bitrate INTEGER, width INTEGER, height INTEGER, " + "video_codec TEXT, video_bitrate INTEGER, video_resolution TEXT, video_width INTEGER, video_height INTEGER, " + "video_framerate TEXT, video_scan_type TEXT, video_full_resolution TEXT, " + "video_dynamic_range TEXT, aspect_ratio TEXT, " + "audio_codec TEXT, audio_bitrate INTEGER, audio_channels INTEGER, audio_language TEXT, audio_language_code TEXT, " + "subtitle_codec TEXT, subtitle_forced INTEGER, subtitle_language TEXT, " + "stream_bitrate INTEGER, stream_video_resolution TEXT, quality_profile TEXT, " + "stream_container_decision TEXT, stream_container TEXT, " + "stream_video_decision TEXT, stream_video_codec TEXT, stream_video_bitrate INTEGER, stream_video_width INTEGER, " + "stream_video_height INTEGER, stream_video_framerate TEXT, stream_video_scan_type TEXT, stream_video_full_resolution TEXT, " + "stream_video_dynamic_range TEXT, " + "stream_audio_decision TEXT, stream_audio_codec TEXT, stream_audio_bitrate INTEGER, stream_audio_channels INTEGER, " + "stream_audio_language TEXT, stream_audio_language_code TEXT, " + "subtitles INTEGER, stream_subtitle_decision TEXT, stream_subtitle_codec TEXT, " + "stream_subtitle_forced INTEGER, stream_subtitle_language TEXT, " + "transcode_protocol TEXT, transcode_container TEXT, " + "transcode_video_codec TEXT, transcode_audio_codec TEXT, transcode_audio_channels INTEGER," + "transcode_width INTEGER, transcode_height INTEGER, " + "transcode_hw_decoding INTEGER, transcode_hw_encoding INTEGER, " + "optimized_version INTEGER, optimized_version_profile TEXT, optimized_version_title TEXT, " + "synced_version INTEGER, synced_version_profile TEXT, " + "live INTEGER, live_uuid TEXT, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT, " + "secure INTEGER, relayed INTEGER, " + "buffer_count INTEGER DEFAULT 0, buffer_last_triggered INTEGER, last_paused INTEGER, watched INTEGER DEFAULT 0, " + "intro INTEGER DEFAULT 0, credits INTEGER DEFAULT 0, commercial INTEGER DEFAULT 0, marker INTEGER DEFAULT 0, " + "initial_stream INTEGER DEFAULT 1, write_attempts INTEGER DEFAULT 0, raw_stream_info TEXT, " + "rating_key_websocket TEXT)" ) # sessions_continued table :: This is a temp table that keeps track of continued streaming sessions c_db.execute( - 'CREATE TABLE IF NOT EXISTS sessions_continued (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'user_id INTEGER, machine_id TEXT, media_type TEXT, stopped INTEGER)' + "CREATE TABLE IF NOT EXISTS sessions_continued (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "user_id INTEGER, machine_id TEXT, media_type TEXT, stopped INTEGER)" ) # session_history table :: This is a history table which logs essential stream details c_db.execute( - 'CREATE TABLE IF NOT EXISTS session_history (id INTEGER PRIMARY KEY AUTOINCREMENT, reference_id INTEGER, ' - 'started INTEGER, stopped INTEGER, rating_key INTEGER, user_id INTEGER, user TEXT, ' - 'ip_address TEXT, paused_counter INTEGER DEFAULT 0, player TEXT, product TEXT, product_version TEXT, ' - 'platform TEXT, platform_version TEXT, profile TEXT, machine_id TEXT, ' - 'bandwidth INTEGER, location TEXT, quality_profile TEXT, secure INTEGER, relayed INTEGER, ' - 'parent_rating_key INTEGER, grandparent_rating_key INTEGER, media_type TEXT, section_id INTEGER, ' - 'view_offset INTEGER DEFAULT 0)' + "CREATE TABLE IF NOT EXISTS session_history (id INTEGER PRIMARY KEY AUTOINCREMENT, reference_id INTEGER, " + "started INTEGER, stopped INTEGER, rating_key INTEGER, user_id INTEGER, user TEXT, " + "ip_address TEXT, paused_counter INTEGER DEFAULT 0, player TEXT, product TEXT, product_version TEXT, " + "platform TEXT, platform_version TEXT, profile TEXT, machine_id TEXT, " + "bandwidth INTEGER, location TEXT, quality_profile TEXT, secure INTEGER, relayed INTEGER, " + "parent_rating_key INTEGER, grandparent_rating_key INTEGER, media_type TEXT, section_id INTEGER, " + "view_offset INTEGER DEFAULT 0)" ) - # session_history_media_info table :: This is a table which logs each session's media info + # session_history_media_info table :: This is a table which logs each session"s media info c_db.execute( - 'CREATE TABLE IF NOT EXISTS session_history_media_info (id INTEGER PRIMARY KEY, rating_key INTEGER, ' - 'video_decision TEXT, audio_decision TEXT, transcode_decision TEXT, duration INTEGER DEFAULT 0, ' - 'container TEXT, bitrate INTEGER, width INTEGER, height INTEGER, video_bitrate INTEGER, video_bit_depth INTEGER, ' - 'video_codec TEXT, video_codec_level TEXT, video_width INTEGER, video_height INTEGER, video_resolution TEXT, ' - 'video_framerate TEXT, video_scan_type TEXT, video_full_resolution TEXT, video_dynamic_range TEXT, aspect_ratio TEXT, ' - 'audio_bitrate INTEGER, audio_codec TEXT, audio_channels INTEGER, audio_language TEXT, audio_language_code TEXT, ' - 'subtitles INTEGER, subtitle_codec TEXT, subtitle_forced, subtitle_language TEXT,' - 'transcode_protocol TEXT, transcode_container TEXT, transcode_video_codec TEXT, transcode_audio_codec TEXT, ' - 'transcode_audio_channels INTEGER, transcode_width INTEGER, transcode_height INTEGER, ' - 'transcode_hw_requested INTEGER, transcode_hw_full_pipeline INTEGER, ' - 'transcode_hw_decode TEXT, transcode_hw_decode_title TEXT, transcode_hw_decoding INTEGER, ' - 'transcode_hw_encode TEXT, transcode_hw_encode_title TEXT, transcode_hw_encoding INTEGER, ' - 'stream_container TEXT, stream_container_decision TEXT, stream_bitrate INTEGER, ' - 'stream_video_decision TEXT, stream_video_bitrate INTEGER, stream_video_codec TEXT, stream_video_codec_level TEXT, ' - 'stream_video_bit_depth INTEGER, stream_video_height INTEGER, stream_video_width INTEGER, stream_video_resolution TEXT, ' - 'stream_video_framerate TEXT, stream_video_scan_type TEXT, stream_video_full_resolution TEXT, stream_video_dynamic_range TEXT, ' - 'stream_audio_decision TEXT, stream_audio_codec TEXT, stream_audio_bitrate INTEGER, stream_audio_channels INTEGER, ' - 'stream_audio_language TEXT, stream_audio_language_code TEXT, ' - 'stream_subtitle_decision TEXT, stream_subtitle_codec TEXT, ' - 'stream_subtitle_container TEXT, stream_subtitle_forced INTEGER, stream_subtitle_language TEXT, ' - 'synced_version INTEGER, synced_version_profile TEXT, ' - 'optimized_version INTEGER, optimized_version_profile TEXT, optimized_version_title TEXT)' + "CREATE TABLE IF NOT EXISTS session_history_media_info (id INTEGER PRIMARY KEY, rating_key INTEGER, " + "video_decision TEXT, audio_decision TEXT, transcode_decision TEXT, duration INTEGER DEFAULT 0, " + "container TEXT, bitrate INTEGER, width INTEGER, height INTEGER, video_bitrate INTEGER, video_bit_depth INTEGER, " + "video_codec TEXT, video_codec_level TEXT, video_width INTEGER, video_height INTEGER, video_resolution TEXT, " + "video_framerate TEXT, video_scan_type TEXT, video_full_resolution TEXT, video_dynamic_range TEXT, aspect_ratio TEXT, " + "audio_bitrate INTEGER, audio_codec TEXT, audio_channels INTEGER, audio_language TEXT, audio_language_code TEXT, " + "subtitles INTEGER, subtitle_codec TEXT, subtitle_forced, subtitle_language TEXT," + "transcode_protocol TEXT, transcode_container TEXT, transcode_video_codec TEXT, transcode_audio_codec TEXT, " + "transcode_audio_channels INTEGER, transcode_width INTEGER, transcode_height INTEGER, " + "transcode_hw_requested INTEGER, transcode_hw_full_pipeline INTEGER, " + "transcode_hw_decode TEXT, transcode_hw_decode_title TEXT, transcode_hw_decoding INTEGER, " + "transcode_hw_encode TEXT, transcode_hw_encode_title TEXT, transcode_hw_encoding INTEGER, " + "stream_container TEXT, stream_container_decision TEXT, stream_bitrate INTEGER, " + "stream_video_decision TEXT, stream_video_bitrate INTEGER, stream_video_codec TEXT, stream_video_codec_level TEXT, " + "stream_video_bit_depth INTEGER, stream_video_height INTEGER, stream_video_width INTEGER, stream_video_resolution TEXT, " + "stream_video_framerate TEXT, stream_video_scan_type TEXT, stream_video_full_resolution TEXT, stream_video_dynamic_range TEXT, " + "stream_audio_decision TEXT, stream_audio_codec TEXT, stream_audio_bitrate INTEGER, stream_audio_channels INTEGER, " + "stream_audio_language TEXT, stream_audio_language_code TEXT, " + "stream_subtitle_decision TEXT, stream_subtitle_codec TEXT, " + "stream_subtitle_container TEXT, stream_subtitle_forced INTEGER, stream_subtitle_language TEXT, " + "synced_version INTEGER, synced_version_profile TEXT, " + "optimized_version INTEGER, optimized_version_profile TEXT, optimized_version_title TEXT)" ) - # session_history_metadata table :: This is a table which logs each session's media metadata + # session_history_metadata table :: This is a table which logs each session"s media metadata c_db.execute( - 'CREATE TABLE IF NOT EXISTS session_history_metadata (id INTEGER PRIMARY KEY, ' - 'rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, ' - 'title TEXT, parent_title TEXT, grandparent_title TEXT, original_title TEXT, full_title TEXT, ' - 'media_index INTEGER, parent_media_index INTEGER, ' - 'thumb TEXT, parent_thumb TEXT, grandparent_thumb TEXT, ' - 'art TEXT, media_type TEXT, year INTEGER, originally_available_at TEXT, added_at INTEGER, updated_at INTEGER, ' - 'last_viewed_at INTEGER, content_rating TEXT, summary TEXT, tagline TEXT, rating TEXT, ' - 'duration INTEGER DEFAULT 0, guid TEXT, directors TEXT, writers TEXT, actors TEXT, genres TEXT, studio TEXT, ' - 'labels TEXT, live INTEGER DEFAULT 0, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT, ' - 'marker_credits_first INTEGER DEFAULT NULL, marker_credits_final INTEGER DEFAULT NULL)' + "CREATE TABLE IF NOT EXISTS session_history_metadata (id INTEGER PRIMARY KEY, " + "rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, " + "title TEXT, parent_title TEXT, grandparent_title TEXT, original_title TEXT, full_title TEXT, " + "media_index INTEGER, parent_media_index INTEGER, " + "thumb TEXT, parent_thumb TEXT, grandparent_thumb TEXT, " + "art TEXT, media_type TEXT, year INTEGER, originally_available_at TEXT, added_at INTEGER, updated_at INTEGER, " + "last_viewed_at INTEGER, content_rating TEXT, summary TEXT, tagline TEXT, rating TEXT, " + "duration INTEGER DEFAULT 0, guid TEXT, directors TEXT, writers TEXT, actors TEXT, genres TEXT, studio TEXT, " + "labels TEXT, live INTEGER DEFAULT 0, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT, " + "marker_credits_first INTEGER DEFAULT NULL, marker_credits_final INTEGER DEFAULT NULL)" ) # users table :: This table keeps record of the friends list c_db.execute( - 'CREATE TABLE IF NOT EXISTS users (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'user_id INTEGER DEFAULT NULL UNIQUE, username TEXT NOT NULL, friendly_name TEXT, ' - 'thumb TEXT, custom_avatar_url TEXT, title TEXT, email TEXT, ' - 'is_active INTEGER DEFAULT 1, is_admin INTEGER DEFAULT 0, is_home_user INTEGER DEFAULT NULL, ' - 'is_allow_sync INTEGER DEFAULT NULL, is_restricted INTEGER DEFAULT NULL, ' - 'do_notify INTEGER DEFAULT 1, keep_history INTEGER DEFAULT 1, deleted_user INTEGER DEFAULT 0, ' - 'allow_guest INTEGER DEFAULT 0, user_token TEXT, server_token TEXT, shared_libraries TEXT, ' - 'filter_all TEXT, filter_movies TEXT, filter_tv TEXT, filter_music TEXT, filter_photos TEXT)' + "CREATE TABLE IF NOT EXISTS users (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "user_id INTEGER DEFAULT NULL UNIQUE, username TEXT NOT NULL, friendly_name TEXT, " + "thumb TEXT, custom_avatar_url TEXT, title TEXT, email TEXT, " + "is_active INTEGER DEFAULT 1, is_admin INTEGER DEFAULT 0, is_home_user INTEGER DEFAULT NULL, " + "is_allow_sync INTEGER DEFAULT NULL, is_restricted INTEGER DEFAULT NULL, " + "do_notify INTEGER DEFAULT 1, keep_history INTEGER DEFAULT 1, deleted_user INTEGER DEFAULT 0, " + "allow_guest INTEGER DEFAULT 0, user_token TEXT, server_token TEXT, shared_libraries TEXT, " + "filter_all TEXT, filter_movies TEXT, filter_tv TEXT, filter_music TEXT, filter_photos TEXT)" ) # library_sections table :: This table keeps record of the servers library sections c_db.execute( - 'CREATE TABLE IF NOT EXISTS library_sections (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'server_id TEXT, section_id INTEGER, section_name TEXT, section_type TEXT, agent TEXT, ' - 'thumb TEXT, custom_thumb_url TEXT, art TEXT, custom_art_url TEXT, ' - 'count INTEGER, parent_count INTEGER, child_count INTEGER, is_active INTEGER DEFAULT 1, ' - 'do_notify INTEGER DEFAULT 1, do_notify_created INTEGER DEFAULT 1, keep_history INTEGER DEFAULT 1, ' - 'deleted_section INTEGER DEFAULT 0, UNIQUE(server_id, section_id))' + "CREATE TABLE IF NOT EXISTS library_sections (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "server_id TEXT, section_id INTEGER, section_name TEXT, section_type TEXT, agent TEXT, " + "thumb TEXT, custom_thumb_url TEXT, art TEXT, custom_art_url TEXT, " + "count INTEGER, parent_count INTEGER, child_count INTEGER, is_active INTEGER DEFAULT 1, " + "do_notify INTEGER DEFAULT 1, do_notify_created INTEGER DEFAULT 1, keep_history INTEGER DEFAULT 1, " + "deleted_section INTEGER DEFAULT 0, UNIQUE(server_id, section_id))" ) # user_login table :: This table keeps record of the Tautulli guest logins c_db.execute( - 'CREATE TABLE IF NOT EXISTS user_login (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'timestamp INTEGER, user_id INTEGER, user TEXT, user_group TEXT, ' - 'ip_address TEXT, host TEXT, user_agent TEXT, success INTEGER DEFAULT 1,' - 'expiry TEXT, jwt_token TEXT)' + "CREATE TABLE IF NOT EXISTS user_login (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "timestamp INTEGER, user_id INTEGER, user TEXT, user_group TEXT, " + "ip_address TEXT, host TEXT, user_agent TEXT, success INTEGER DEFAULT 1," + "expiry TEXT, jwt_token TEXT)" ) # notifiers table :: This table keeps record of the notification agent settings c_db.execute( - 'CREATE TABLE IF NOT EXISTS notifiers (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'agent_id INTEGER, agent_name TEXT, agent_label TEXT, friendly_name TEXT, notifier_config TEXT, ' - 'on_play INTEGER DEFAULT 0, on_stop INTEGER DEFAULT 0, on_pause INTEGER DEFAULT 0, ' - 'on_resume INTEGER DEFAULT 0, on_change INTEGER DEFAULT 0, on_buffer INTEGER DEFAULT 0, ' - 'on_error INTEGER DEFAULT 0, ' - 'on_intro INTEGER DEFAULT 0, on_credits INTEGER DEFAULT 0, on_commercial INTEGER DEFAULT 0, ' - 'on_watched INTEGER DEFAULT 0, on_created INTEGER DEFAULT 0, ' - 'on_extdown INTEGER DEFAULT 0, on_intdown INTEGER DEFAULT 0, ' - 'on_extup INTEGER DEFAULT 0, on_intup INTEGER DEFAULT 0, on_pmsupdate INTEGER DEFAULT 0, ' - 'on_concurrent INTEGER DEFAULT 0, on_newdevice INTEGER DEFAULT 0, on_plexpyupdate INTEGER DEFAULT 0, ' - 'on_plexpydbcorrupt INTEGER DEFAULT 0, ' - 'on_play_subject TEXT, on_stop_subject TEXT, on_pause_subject TEXT, ' - 'on_resume_subject TEXT, on_change_subject TEXT, on_buffer_subject TEXT, on_error_subject TEXT, ' - 'on_intro_subject TEXT, on_credits_subject TEXT, on_commercial_subject TEXT,' - 'on_watched_subject TEXT, on_created_subject TEXT, on_extdown_subject TEXT, on_intdown_subject TEXT, ' - 'on_extup_subject TEXT, on_intup_subject TEXT, on_pmsupdate_subject TEXT, ' - 'on_concurrent_subject TEXT, on_newdevice_subject TEXT, on_plexpyupdate_subject TEXT, ' - 'on_plexpydbcorrupt_subject TEXT, ' - 'on_play_body TEXT, on_stop_body TEXT, on_pause_body TEXT, ' - 'on_resume_body TEXT, on_change_body TEXT, on_buffer_body TEXT, on_error_body TEXT, ' - 'on_intro_body TEXT, on_credits_body TEXT, on_commercial_body TEXT, ' - 'on_watched_body TEXT, on_created_body TEXT, on_extdown_body TEXT, on_intdown_body TEXT, ' - 'on_extup_body TEXT, on_intup_body TEXT, on_pmsupdate_body TEXT, ' - 'on_concurrent_body TEXT, on_newdevice_body TEXT, on_plexpyupdate_body TEXT, ' - 'on_plexpydbcorrupt_body TEXT, ' - 'custom_conditions TEXT, custom_conditions_logic TEXT)' + "CREATE TABLE IF NOT EXISTS notifiers (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "agent_id INTEGER, agent_name TEXT, agent_label TEXT, friendly_name TEXT, notifier_config TEXT, " + "on_play INTEGER DEFAULT 0, on_stop INTEGER DEFAULT 0, on_pause INTEGER DEFAULT 0, " + "on_resume INTEGER DEFAULT 0, on_change INTEGER DEFAULT 0, on_buffer INTEGER DEFAULT 0, " + "on_error INTEGER DEFAULT 0, " + "on_intro INTEGER DEFAULT 0, on_credits INTEGER DEFAULT 0, on_commercial INTEGER DEFAULT 0, " + "on_watched INTEGER DEFAULT 0, on_created INTEGER DEFAULT 0, " + "on_extdown INTEGER DEFAULT 0, on_intdown INTEGER DEFAULT 0, " + "on_extup INTEGER DEFAULT 0, on_intup INTEGER DEFAULT 0, on_pmsupdate INTEGER DEFAULT 0, " + "on_concurrent INTEGER DEFAULT 0, on_newdevice INTEGER DEFAULT 0, on_plexpyupdate INTEGER DEFAULT 0, " + "on_plexpydbcorrupt INTEGER DEFAULT 0, " + "on_play_subject TEXT, on_stop_subject TEXT, on_pause_subject TEXT, " + "on_resume_subject TEXT, on_change_subject TEXT, on_buffer_subject TEXT, on_error_subject TEXT, " + "on_intro_subject TEXT, on_credits_subject TEXT, on_commercial_subject TEXT," + "on_watched_subject TEXT, on_created_subject TEXT, on_extdown_subject TEXT, on_intdown_subject TEXT, " + "on_extup_subject TEXT, on_intup_subject TEXT, on_pmsupdate_subject TEXT, " + "on_concurrent_subject TEXT, on_newdevice_subject TEXT, on_plexpyupdate_subject TEXT, " + "on_plexpydbcorrupt_subject TEXT, " + "on_play_body TEXT, on_stop_body TEXT, on_pause_body TEXT, " + "on_resume_body TEXT, on_change_body TEXT, on_buffer_body TEXT, on_error_body TEXT, " + "on_intro_body TEXT, on_credits_body TEXT, on_commercial_body TEXT, " + "on_watched_body TEXT, on_created_body TEXT, on_extdown_body TEXT, on_intdown_body TEXT, " + "on_extup_body TEXT, on_intup_body TEXT, on_pmsupdate_body TEXT, " + "on_concurrent_body TEXT, on_newdevice_body TEXT, on_plexpyupdate_body TEXT, " + "on_plexpydbcorrupt_body TEXT, " + "custom_conditions TEXT, custom_conditions_logic TEXT)" ) # notify_log table :: This is a table which logs notifications sent c_db.execute( - 'CREATE TABLE IF NOT EXISTS notify_log (id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp INTEGER, ' - 'session_key INTEGER, rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, ' - 'user_id INTEGER, user TEXT, notifier_id INTEGER, agent_id INTEGER, agent_name TEXT, notify_action TEXT, ' - 'subject_text TEXT, body_text TEXT, script_args TEXT, success INTEGER DEFAULT 0, tag TEXT)' + "CREATE TABLE IF NOT EXISTS notify_log (id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp INTEGER, " + "session_key INTEGER, rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, " + "user_id INTEGER, user TEXT, notifier_id INTEGER, agent_id INTEGER, agent_name TEXT, notify_action TEXT, " + "subject_text TEXT, body_text TEXT, script_args TEXT, success INTEGER DEFAULT 0, tag TEXT)" ) # newsletters table :: This table keeps record of the newsletter settings c_db.execute( - 'CREATE TABLE IF NOT EXISTS newsletters (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'agent_id INTEGER, agent_name TEXT, agent_label TEXT, id_name TEXT NOT NULL, ' - 'friendly_name TEXT, newsletter_config TEXT, email_config TEXT, ' - 'subject TEXT, body TEXT, message TEXT, ' - 'cron TEXT NOT NULL DEFAULT \'0 0 * * 0\', active INTEGER DEFAULT 0)' + "CREATE TABLE IF NOT EXISTS newsletters (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "agent_id INTEGER, agent_name TEXT, agent_label TEXT, id_name TEXT NOT NULL, " + "friendly_name TEXT, newsletter_config TEXT, email_config TEXT, " + "subject TEXT, body TEXT, message TEXT, " + "cron TEXT NOT NULL DEFAULT '0 0 * * 0', active INTEGER DEFAULT 0)" ) # newsletter_log table :: This is a table which logs newsletters sent c_db.execute( - 'CREATE TABLE IF NOT EXISTS newsletter_log (id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp INTEGER, ' - 'newsletter_id INTEGER, agent_id INTEGER, agent_name TEXT, notify_action TEXT, ' - 'subject_text TEXT, body_text TEXT, message_text TEXT, start_date TEXT, end_date TEXT, ' - 'start_time INTEGER, end_time INTEGER, uuid TEXT UNIQUE, filename TEXT, email_msg_id TEXT, ' - 'success INTEGER DEFAULT 0)' + "CREATE TABLE IF NOT EXISTS newsletter_log (id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp INTEGER, " + "newsletter_id INTEGER, agent_id INTEGER, agent_name TEXT, notify_action TEXT, " + "subject_text TEXT, body_text TEXT, message_text TEXT, start_date TEXT, end_date TEXT, " + "start_time INTEGER, end_time INTEGER, uuid TEXT UNIQUE, filename TEXT, email_msg_id TEXT, " + "success INTEGER DEFAULT 0)" ) # recently_added table :: This table keeps record of recently added items c_db.execute( - 'CREATE TABLE IF NOT EXISTS recently_added (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'added_at INTEGER, pms_identifier TEXT, section_id INTEGER, ' - 'rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, media_type TEXT, ' - 'media_info TEXT)' + "CREATE TABLE IF NOT EXISTS recently_added (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "added_at INTEGER, pms_identifier TEXT, section_id INTEGER, " + "rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, media_type TEXT, " + "media_info TEXT)" ) # mobile_devices table :: This table keeps record of devices linked with the mobile app c_db.execute( - 'CREATE TABLE IF NOT EXISTS mobile_devices (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'device_id TEXT NOT NULL UNIQUE, device_token TEXT, device_name TEXT, ' - 'platform TEXT, version TEXT, friendly_name TEXT, ' - 'onesignal_id TEXT, last_seen INTEGER, official INTEGER DEFAULT 0)' + "CREATE TABLE IF NOT EXISTS mobile_devices (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "device_id TEXT NOT NULL UNIQUE, device_token TEXT, device_name TEXT, " + "platform TEXT, version TEXT, friendly_name TEXT, " + "onesignal_id TEXT, last_seen INTEGER, official INTEGER DEFAULT 0)" ) # tvmaze_lookup table :: This table keeps record of the TVmaze lookups c_db.execute( - 'CREATE TABLE IF NOT EXISTS tvmaze_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'rating_key INTEGER, thetvdb_id INTEGER, imdb_id TEXT, ' - 'tvmaze_id INTEGER, tvmaze_url TEXT, tvmaze_json TEXT)' + "CREATE TABLE IF NOT EXISTS tvmaze_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "rating_key INTEGER, thetvdb_id INTEGER, imdb_id TEXT, " + "tvmaze_id INTEGER, tvmaze_url TEXT, tvmaze_json TEXT)" ) # themoviedb_lookup table :: This table keeps record of the TheMovieDB lookups c_db.execute( - 'CREATE TABLE IF NOT EXISTS themoviedb_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'rating_key INTEGER, thetvdb_id INTEGER, imdb_id TEXT, ' - 'themoviedb_id INTEGER, themoviedb_url TEXT, themoviedb_json TEXT)' + "CREATE TABLE IF NOT EXISTS themoviedb_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "rating_key INTEGER, thetvdb_id INTEGER, imdb_id TEXT, " + "themoviedb_id INTEGER, themoviedb_url TEXT, themoviedb_json TEXT)" ) # musicbrainz_lookup table :: This table keeps record of the MusicBrainz lookups c_db.execute( - 'CREATE TABLE IF NOT EXISTS musicbrainz_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'rating_key INTEGER, musicbrainz_id INTEGER, musicbrainz_url TEXT, musicbrainz_type TEXT, ' - 'musicbrainz_json TEXT)' + "CREATE TABLE IF NOT EXISTS musicbrainz_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "rating_key INTEGER, musicbrainz_id INTEGER, musicbrainz_url TEXT, musicbrainz_type TEXT, " + "musicbrainz_json TEXT)" ) # image_hash_lookup table :: This table keeps record of the image hash lookups c_db.execute( - 'CREATE TABLE IF NOT EXISTS image_hash_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'img_hash TEXT UNIQUE, img TEXT, rating_key INTEGER, width INTEGER, height INTEGER, ' - 'opacity INTEGER, background TEXT, blur INTEGER, fallback TEXT)' + "CREATE TABLE IF NOT EXISTS image_hash_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "img_hash TEXT UNIQUE, img TEXT, rating_key INTEGER, width INTEGER, height INTEGER, " + "opacity INTEGER, background TEXT, blur INTEGER, fallback TEXT)" ) # imgur_lookup table :: This table keeps record of the Imgur uploads c_db.execute( - 'CREATE TABLE IF NOT EXISTS imgur_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'img_hash TEXT, imgur_title TEXT, imgur_url TEXT, delete_hash TEXT)' + "CREATE TABLE IF NOT EXISTS imgur_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "img_hash TEXT, imgur_title TEXT, imgur_url TEXT, delete_hash TEXT)" ) # cloudinary_lookup table :: This table keeps record of the Cloudinary uploads c_db.execute( - 'CREATE TABLE IF NOT EXISTS cloudinary_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'img_hash TEXT, cloudinary_title TEXT, cloudinary_url TEXT)' + "CREATE TABLE IF NOT EXISTS cloudinary_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "img_hash TEXT, cloudinary_title TEXT, cloudinary_url TEXT)" ) # exports table :: This table keeps record of the exported files c_db.execute( - 'CREATE TABLE IF NOT EXISTS exports (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'timestamp INTEGER, section_id INTEGER, user_id INTEGER, rating_key INTEGER, media_type TEXT, ' - 'title TEXT, file_format TEXT, ' - 'metadata_level INTEGER, media_info_level INTEGER, ' - 'thumb_level INTEGER DEFAULT 0, art_level INTEGER DEFAULT 0, ' - 'custom_fields TEXT, individual_files INTEGER DEFAULT 0, ' - 'file_size INTEGER DEFAULT 0, complete INTEGER DEFAULT 0, ' - 'exported_items INTEGER DEFAULT 0, total_items INTEGER DEFAULT 0)' + "CREATE TABLE IF NOT EXISTS exports (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "timestamp INTEGER, section_id INTEGER, user_id INTEGER, rating_key INTEGER, media_type TEXT, " + "title TEXT, file_format TEXT, " + "metadata_level INTEGER, media_info_level INTEGER, " + "thumb_level INTEGER DEFAULT 0, art_level INTEGER DEFAULT 0, " + "custom_fields TEXT, individual_files INTEGER DEFAULT 0, " + "file_size INTEGER DEFAULT 0, complete INTEGER DEFAULT 0, " + "exported_items INTEGER DEFAULT 0, total_items INTEGER DEFAULT 0)" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT started FROM sessions') + c_db.execute("SELECT started FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN started INTEGER' + "ALTER TABLE sessions ADD COLUMN started INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN paused_counter INTEGER DEFAULT 0' + "ALTER TABLE sessions ADD COLUMN paused_counter INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN state TEXT' + "ALTER TABLE sessions ADD COLUMN state TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN user TEXT' + "ALTER TABLE sessions ADD COLUMN user TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN machine_id TEXT' + "ALTER TABLE sessions ADD COLUMN machine_id TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT title FROM sessions') + c_db.execute("SELECT title FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN title TEXT' + "ALTER TABLE sessions ADD COLUMN title TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN parent_title TEXT' + "ALTER TABLE sessions ADD COLUMN parent_title TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN grandparent_title TEXT' + "ALTER TABLE sessions ADD COLUMN grandparent_title TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN friendly_name TEXT' + "ALTER TABLE sessions ADD COLUMN friendly_name TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN player TEXT' + "ALTER TABLE sessions ADD COLUMN player TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN user_id INTEGER' + "ALTER TABLE sessions ADD COLUMN user_id INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT ip_address FROM sessions') + c_db.execute("SELECT ip_address FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN ip_address TEXT' + "ALTER TABLE sessions ADD COLUMN ip_address TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN platform TEXT' + "ALTER TABLE sessions ADD COLUMN platform TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN parent_rating_key INTEGER' + "ALTER TABLE sessions ADD COLUMN parent_rating_key INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN grandparent_rating_key INTEGER' + "ALTER TABLE sessions ADD COLUMN grandparent_rating_key INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN view_offset INTEGER DEFAULT 0' + "ALTER TABLE sessions ADD COLUMN view_offset INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN duration INTEGER' + "ALTER TABLE sessions ADD COLUMN duration INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN video_decision TEXT' + "ALTER TABLE sessions ADD COLUMN video_decision TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN audio_decision TEXT' + "ALTER TABLE sessions ADD COLUMN audio_decision TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN width INTEGER' + "ALTER TABLE sessions ADD COLUMN width INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN height INTEGER' + "ALTER TABLE sessions ADD COLUMN height INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN container TEXT' + "ALTER TABLE sessions ADD COLUMN container TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN video_codec TEXT' + "ALTER TABLE sessions ADD COLUMN video_codec TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN audio_codec TEXT' + "ALTER TABLE sessions ADD COLUMN audio_codec TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN bitrate INTEGER' + "ALTER TABLE sessions ADD COLUMN bitrate INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN video_resolution TEXT' + "ALTER TABLE sessions ADD COLUMN video_resolution TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN video_framerate TEXT' + "ALTER TABLE sessions ADD COLUMN video_framerate TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN aspect_ratio TEXT' + "ALTER TABLE sessions ADD COLUMN aspect_ratio TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN audio_channels INTEGER' + "ALTER TABLE sessions ADD COLUMN audio_channels INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN transcode_protocol TEXT' + "ALTER TABLE sessions ADD COLUMN transcode_protocol TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN transcode_container TEXT' + "ALTER TABLE sessions ADD COLUMN transcode_container TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN transcode_video_codec TEXT' + "ALTER TABLE sessions ADD COLUMN transcode_video_codec TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN transcode_audio_codec TEXT' + "ALTER TABLE sessions ADD COLUMN transcode_audio_codec TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN transcode_audio_channels INTEGER' + "ALTER TABLE sessions ADD COLUMN transcode_audio_channels INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN transcode_width INTEGER' + "ALTER TABLE sessions ADD COLUMN transcode_width INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN transcode_height INTEGER' + "ALTER TABLE sessions ADD COLUMN transcode_height INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT buffer_count FROM sessions') + c_db.execute("SELECT buffer_count FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN buffer_count INTEGER DEFAULT 0' + "ALTER TABLE sessions ADD COLUMN buffer_count INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN buffer_last_triggered INTEGER' + "ALTER TABLE sessions ADD COLUMN buffer_last_triggered INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT last_paused FROM sessions') + c_db.execute("SELECT last_paused FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN last_paused INTEGER' + "ALTER TABLE sessions ADD COLUMN last_paused INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT section_id FROM sessions') + c_db.execute("SELECT section_id FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN section_id INTEGER' + "ALTER TABLE sessions ADD COLUMN section_id INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT stopped FROM sessions') + c_db.execute("SELECT stopped FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stopped INTEGER' + "ALTER TABLE sessions ADD COLUMN stopped INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT transcode_key FROM sessions') + c_db.execute("SELECT transcode_key FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN transcode_key TEXT' + "ALTER TABLE sessions ADD COLUMN transcode_key TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT write_attempts FROM sessions') + c_db.execute("SELECT write_attempts FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN write_attempts INTEGER DEFAULT 0' + "ALTER TABLE sessions ADD COLUMN write_attempts INTEGER DEFAULT 0" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT transcode_decision FROM sessions') + c_db.execute("SELECT transcode_decision FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN transcode_decision TEXT' + "ALTER TABLE sessions ADD COLUMN transcode_decision TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN full_title TEXT' + "ALTER TABLE sessions ADD COLUMN full_title TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN media_index INTEGER' + "ALTER TABLE sessions ADD COLUMN media_index INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN parent_media_index INTEGER' + "ALTER TABLE sessions ADD COLUMN parent_media_index INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN thumb TEXT' + "ALTER TABLE sessions ADD COLUMN thumb TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN parent_thumb TEXT' + "ALTER TABLE sessions ADD COLUMN parent_thumb TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN grandparent_thumb TEXT' + "ALTER TABLE sessions ADD COLUMN grandparent_thumb TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN year INTEGER' + "ALTER TABLE sessions ADD COLUMN year INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT raw_stream_info FROM sessions') + c_db.execute("SELECT raw_stream_info FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN product TEXT' + "ALTER TABLE sessions ADD COLUMN product TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN optimized_version INTEGER' + "ALTER TABLE sessions ADD COLUMN optimized_version INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN optimized_version_profile TEXT' + "ALTER TABLE sessions ADD COLUMN optimized_version_profile TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN synced_version INTEGER' + "ALTER TABLE sessions ADD COLUMN synced_version INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN video_bitrate INTEGER' + "ALTER TABLE sessions ADD COLUMN video_bitrate INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN video_width INTEGER' + "ALTER TABLE sessions ADD COLUMN video_width INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN video_height INTEGER' + "ALTER TABLE sessions ADD COLUMN video_height INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN audio_bitrate INTEGER' + "ALTER TABLE sessions ADD COLUMN audio_bitrate INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN subtitle_codec TEXT' + "ALTER TABLE sessions ADD COLUMN subtitle_codec TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_bitrate INTEGER' + "ALTER TABLE sessions ADD COLUMN stream_bitrate INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_video_resolution TEXT' + "ALTER TABLE sessions ADD COLUMN stream_video_resolution TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN quality_profile TEXT' + "ALTER TABLE sessions ADD COLUMN quality_profile TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_container_decision TEXT' + "ALTER TABLE sessions ADD COLUMN stream_container_decision TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_container TEXT' + "ALTER TABLE sessions ADD COLUMN stream_container TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_video_decision TEXT' + "ALTER TABLE sessions ADD COLUMN stream_video_decision TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_video_codec TEXT' + "ALTER TABLE sessions ADD COLUMN stream_video_codec TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_video_bitrate INTEGER' + "ALTER TABLE sessions ADD COLUMN stream_video_bitrate INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_video_width INTEGER' + "ALTER TABLE sessions ADD COLUMN stream_video_width INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_video_height INTEGER' + "ALTER TABLE sessions ADD COLUMN stream_video_height INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_video_framerate TEXT' + "ALTER TABLE sessions ADD COLUMN stream_video_framerate TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_audio_decision TEXT' + "ALTER TABLE sessions ADD COLUMN stream_audio_decision TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_audio_codec TEXT' + "ALTER TABLE sessions ADD COLUMN stream_audio_codec TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_audio_bitrate INTEGER' + "ALTER TABLE sessions ADD COLUMN stream_audio_bitrate INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_audio_channels INTEGER' + "ALTER TABLE sessions ADD COLUMN stream_audio_channels INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN subtitles INTEGER' + "ALTER TABLE sessions ADD COLUMN subtitles INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_subtitle_decision TEXT' + "ALTER TABLE sessions ADD COLUMN stream_subtitle_decision TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_subtitle_codec TEXT' + "ALTER TABLE sessions ADD COLUMN stream_subtitle_codec TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN raw_stream_info TEXT' + "ALTER TABLE sessions ADD COLUMN raw_stream_info TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT video_height FROM sessions') + c_db.execute("SELECT video_height FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN video_height INTEGER' + "ALTER TABLE sessions ADD COLUMN video_height INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT subtitles FROM sessions') + c_db.execute("SELECT subtitles FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN subtitles INTEGER' + "ALTER TABLE sessions ADD COLUMN subtitles INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT synced_version_profile FROM sessions') + c_db.execute("SELECT synced_version_profile FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN synced_version_profile TEXT' + "ALTER TABLE sessions ADD COLUMN synced_version_profile TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN optimized_version_title TEXT' + "ALTER TABLE sessions ADD COLUMN optimized_version_title TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT transcode_hw_decoding FROM sessions') + c_db.execute("SELECT transcode_hw_decoding FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN transcode_hw_decoding INTEGER' + "ALTER TABLE sessions ADD COLUMN transcode_hw_decoding INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN transcode_hw_encoding INTEGER' + "ALTER TABLE sessions ADD COLUMN transcode_hw_encoding INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT watched FROM sessions') + c_db.execute("SELECT watched FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN watched INTEGER DEFAULT 0' + "ALTER TABLE sessions ADD COLUMN watched INTEGER DEFAULT 0" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT live FROM sessions') + c_db.execute("SELECT live FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN live INTEGER' + "ALTER TABLE sessions ADD COLUMN live INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN live_uuid TEXT' + "ALTER TABLE sessions ADD COLUMN live_uuid TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT session_id FROM sessions') + c_db.execute("SELECT session_id FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN session_id TEXT' + "ALTER TABLE sessions ADD COLUMN session_id TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT original_title FROM sessions') + c_db.execute("SELECT original_title FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN original_title TEXT' + "ALTER TABLE sessions ADD COLUMN original_title TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT secure FROM sessions') + c_db.execute("SELECT secure FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN secure INTEGER' + "ALTER TABLE sessions ADD COLUMN secure INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN relayed INTEGER' + "ALTER TABLE sessions ADD COLUMN relayed INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT rating_key_websocket FROM sessions') + c_db.execute("SELECT rating_key_websocket FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN rating_key_websocket TEXT' + "ALTER TABLE sessions ADD COLUMN rating_key_websocket TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT video_scan_type FROM sessions') + c_db.execute("SELECT video_scan_type FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN video_scan_type TEXT' + "ALTER TABLE sessions ADD COLUMN video_scan_type TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN video_full_resolution TEXT' + "ALTER TABLE sessions ADD COLUMN video_full_resolution TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_video_scan_type TEXT' + "ALTER TABLE sessions ADD COLUMN stream_video_scan_type TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_video_full_resolution TEXT' + "ALTER TABLE sessions ADD COLUMN stream_video_full_resolution TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT video_dynamic_range FROM sessions') + c_db.execute("SELECT video_dynamic_range FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN video_dynamic_range TEXT' + "ALTER TABLE sessions ADD COLUMN video_dynamic_range TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_video_dynamic_range TEXT' + "ALTER TABLE sessions ADD COLUMN stream_video_dynamic_range TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT channel_identifier FROM sessions') + c_db.execute("SELECT channel_identifier FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN channel_call_sign TEXT' + "ALTER TABLE sessions ADD COLUMN channel_call_sign TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN channel_identifier TEXT' + "ALTER TABLE sessions ADD COLUMN channel_identifier TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN channel_thumb TEXT' + "ALTER TABLE sessions ADD COLUMN channel_thumb TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT originally_available_at FROM sessions') + c_db.execute("SELECT originally_available_at FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN originally_available_at TEXT' + "ALTER TABLE sessions ADD COLUMN originally_available_at TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN added_at INTEGER' + "ALTER TABLE sessions ADD COLUMN added_at INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT guid FROM sessions') + c_db.execute("SELECT guid FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN guid TEXT' + "ALTER TABLE sessions ADD COLUMN guid TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT bandwidth FROM sessions') + c_db.execute("SELECT bandwidth FROM sessions") except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN bandwidth INTEGER' + "ALTER TABLE sessions ADD COLUMN bandwidth INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN location TEXT' + "ALTER TABLE sessions ADD COLUMN location TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT initial_stream FROM sessions') + c_db.execute("SELECT initial_stream FROM sessions") except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN initial_stream INTEGER DEFAULT 1' + "ALTER TABLE sessions ADD COLUMN initial_stream INTEGER DEFAULT 1" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT audio_language FROM sessions') + c_db.execute("SELECT audio_language FROM sessions") except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN audio_language TEXT' + "ALTER TABLE sessions ADD COLUMN audio_language TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN audio_language_code TEXT' + "ALTER TABLE sessions ADD COLUMN audio_language_code TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_audio_language TEXT' + "ALTER TABLE sessions ADD COLUMN stream_audio_language TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_audio_language_code TEXT' + "ALTER TABLE sessions ADD COLUMN stream_audio_language_code TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT subtitle_language FROM sessions') + c_db.execute("SELECT subtitle_language FROM sessions") except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN subtitle_language TEXT' + "ALTER TABLE sessions ADD COLUMN subtitle_language TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_subtitle_language TEXT' + "ALTER TABLE sessions ADD COLUMN stream_subtitle_language TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT subtitle_forced FROM sessions') + c_db.execute("SELECT subtitle_forced FROM sessions") except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN subtitle_forced INTEGER' + "ALTER TABLE sessions ADD COLUMN subtitle_forced INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_subtitle_forced INTEGER' + "ALTER TABLE sessions ADD COLUMN stream_subtitle_forced INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT intro FROM sessions') + c_db.execute("SELECT intro FROM sessions") except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN intro INTEGER DEFAULT 0' + "ALTER TABLE sessions ADD COLUMN intro INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN credits INTEGER DEFAULT 0' + "ALTER TABLE sessions ADD COLUMN credits INTEGER DEFAULT 0" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT commercial FROM sessions') + c_db.execute("SELECT commercial FROM sessions") except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN commercial INTEGER DEFAULT 0' + "ALTER TABLE sessions ADD COLUMN commercial INTEGER DEFAULT 0" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT marker FROM sessions') + c_db.execute("SELECT marker FROM sessions") except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN marker INTEGER DEFAULT 0' + "ALTER TABLE sessions ADD COLUMN marker INTEGER DEFAULT 0" ) # Upgrade session_history table from earlier versions try: - c_db.execute('SELECT reference_id FROM session_history') + c_db.execute("SELECT reference_id FROM session_history") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history.") c_db.execute( - 'ALTER TABLE session_history ADD COLUMN reference_id INTEGER DEFAULT 0' + "ALTER TABLE session_history ADD COLUMN reference_id INTEGER DEFAULT 0" ) # Set reference_id to the first row where (user_id = previous row, rating_key != previous row) and user_id = user_id c_db.execute( - 'UPDATE session_history ' \ - 'SET reference_id = (SELECT (CASE \ + "UPDATE session_history " \ + "SET reference_id = (SELECT (CASE \ WHEN (SELECT MIN(id) FROM session_history WHERE id > ( \ SELECT MAX(id) FROM session_history \ WHERE (user_id = t1.user_id AND rating_key <> t1.rating_key AND id < t1.id)) AND user_id = t1.user_id) IS NULL \ THEN (SELECT MIN(id) FROM session_history WHERE (user_id = t1.user_id)) \ ELSE (SELECT MIN(id) FROM session_history WHERE id > ( \ SELECT MAX(id) FROM session_history \ - WHERE (user_id = t1.user_id AND rating_key <> t1.rating_key AND id < t1.id)) AND user_id = t1.user_id) END) ' \ - 'FROM session_history AS t1 ' \ - 'WHERE t1.id = session_history.id) ' + WHERE (user_id = t1.user_id AND rating_key <> t1.rating_key AND id < t1.id)) AND user_id = t1.user_id) END) " \ + "FROM session_history AS t1 " \ + "WHERE t1.id = session_history.id) " ) # Upgrade session_history table from earlier versions try: - c_db.execute('SELECT bandwidth FROM session_history') + c_db.execute("SELECT bandwidth FROM session_history") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history.") c_db.execute( - 'ALTER TABLE session_history ADD COLUMN platform_version TEXT' + "ALTER TABLE session_history ADD COLUMN platform_version TEXT" ) c_db.execute( - 'ALTER TABLE session_history ADD COLUMN product TEXT' + "ALTER TABLE session_history ADD COLUMN product TEXT" ) c_db.execute( - 'ALTER TABLE session_history ADD COLUMN product_version TEXT' + "ALTER TABLE session_history ADD COLUMN product_version TEXT" ) c_db.execute( - 'ALTER TABLE session_history ADD COLUMN profile TEXT' + "ALTER TABLE session_history ADD COLUMN profile TEXT" ) c_db.execute( - 'ALTER TABLE session_history ADD COLUMN bandwidth INTEGER' + "ALTER TABLE session_history ADD COLUMN bandwidth INTEGER" ) c_db.execute( - 'ALTER TABLE session_history ADD COLUMN location TEXT' + "ALTER TABLE session_history ADD COLUMN location TEXT" ) c_db.execute( - 'ALTER TABLE session_history ADD COLUMN quality_profile TEXT' + "ALTER TABLE session_history ADD COLUMN quality_profile TEXT" ) # Upgrade session_history table from earlier versions try: - c_db.execute('SELECT secure FROM session_history') + c_db.execute("SELECT secure FROM session_history") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history.") c_db.execute( - 'ALTER TABLE session_history ADD COLUMN secure INTEGER' + "ALTER TABLE session_history ADD COLUMN secure INTEGER" ) c_db.execute( - 'ALTER TABLE session_history ADD COLUMN relayed INTEGER' + "ALTER TABLE session_history ADD COLUMN relayed INTEGER" ) # Upgrade session_history table from earlier versions try: - result = c_db.execute('SELECT platform FROM session_history ' - 'WHERE platform = "windows"').fetchall() + result = c_db.execute("SELECT platform FROM session_history " + "WHERE platform = 'windows'").fetchall() if len(result) > 0: logger.debug("Altering database. Capitalizing Windows platform values in session_history table.") c_db.execute( - 'UPDATE session_history SET platform = "Windows" WHERE platform = "windows" ' + "UPDATE session_history SET platform = 'Windows' WHERE platform = 'windows' " ) except sqlite3.OperationalError: logger.warn("Unable to capitalize Windows platform values in session_history table.") # Upgrade session_history_metadata table from earlier versions try: - c_db.execute('SELECT full_title FROM session_history_metadata') + c_db.execute("SELECT full_title FROM session_history_metadata") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_metadata.") c_db.execute( - 'ALTER TABLE session_history_metadata ADD COLUMN full_title TEXT' + "ALTER TABLE session_history_metadata ADD COLUMN full_title TEXT" ) # Upgrade session_history_metadata table from earlier versions try: - c_db.execute('SELECT tagline FROM session_history_metadata') + c_db.execute("SELECT tagline FROM session_history_metadata") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_metadata.") c_db.execute( - 'ALTER TABLE session_history_metadata ADD COLUMN tagline TEXT' + "ALTER TABLE session_history_metadata ADD COLUMN tagline TEXT" ) # Upgrade session_history_metadata table from earlier versions try: - c_db.execute('SELECT labels FROM session_history_metadata') + c_db.execute("SELECT labels FROM session_history_metadata") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_metadata.") c_db.execute( - 'ALTER TABLE session_history_metadata ADD COLUMN labels TEXT' + "ALTER TABLE session_history_metadata ADD COLUMN labels TEXT" ) # Upgrade session_history_metadata table from earlier versions try: - c_db.execute('SELECT original_title FROM session_history_metadata') + c_db.execute("SELECT original_title FROM session_history_metadata") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_metadata.") c_db.execute( - 'ALTER TABLE session_history_metadata ADD COLUMN original_title TEXT' + "ALTER TABLE session_history_metadata ADD COLUMN original_title TEXT" ) # Upgrade session_history_metadata table from earlier versions try: - c_db.execute('SELECT live FROM session_history_metadata') + c_db.execute("SELECT live FROM session_history_metadata") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_metadata.") c_db.execute( - 'ALTER TABLE session_history_metadata ADD COLUMN live INTEGER DEFAULT 0' + "ALTER TABLE session_history_metadata ADD COLUMN live INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE session_history_metadata ADD COLUMN channel_call_sign TEXT' + "ALTER TABLE session_history_metadata ADD COLUMN channel_call_sign TEXT" ) c_db.execute( - 'ALTER TABLE session_history_metadata ADD COLUMN channel_identifier TEXT' + "ALTER TABLE session_history_metadata ADD COLUMN channel_identifier TEXT" ) c_db.execute( - 'ALTER TABLE session_history_metadata ADD COLUMN channel_thumb TEXT' + "ALTER TABLE session_history_metadata ADD COLUMN channel_thumb TEXT" ) # Upgrade session_history_metadata table from earlier versions try: - c_db.execute('SELECT marker_credits_first FROM session_history_metadata') + c_db.execute("SELECT marker_credits_first FROM session_history_metadata") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_metadata.") c_db.execute( - 'ALTER TABLE session_history_metadata ADD COLUMN marker_credits_first INTEGER DEFAULT NULL' + "ALTER TABLE session_history_metadata ADD COLUMN marker_credits_first INTEGER DEFAULT NULL" ) c_db.execute( - 'ALTER TABLE session_history_metadata ADD COLUMN marker_credits_final INTEGER DEFAULT NULL' + "ALTER TABLE session_history_metadata ADD COLUMN marker_credits_final INTEGER DEFAULT NULL" ) # Upgrade session_history_media_info table from earlier versions try: - c_db.execute('SELECT transcode_decision FROM session_history_media_info') + c_db.execute("SELECT transcode_decision FROM session_history_media_info") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_media_info.") c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN transcode_decision TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN transcode_decision TEXT" ) c_db.execute( - 'UPDATE session_history_media_info SET transcode_decision = (CASE ' - 'WHEN video_decision = "transcode" OR audio_decision = "transcode" THEN "transcode" ' - 'WHEN video_decision = "copy" OR audio_decision = "copy" THEN "copy" ' - 'WHEN video_decision = "direct play" OR audio_decision = "direct play" THEN "direct play" END)' + "UPDATE session_history_media_info SET transcode_decision = (CASE " + "WHEN video_decision = 'transcode' OR audio_decision = 'transcode' THEN 'transcode' " + "WHEN video_decision = 'copy' OR audio_decision = 'copy' THEN 'copy' " + "WHEN video_decision = 'direct play' OR audio_decision = 'direct play' THEN 'direct play' END)" ) # Upgrade session_history_media_info table from earlier versions try: - c_db.execute('SELECT subtitles FROM session_history_media_info') + c_db.execute("SELECT subtitles FROM session_history_media_info") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_media_info.") c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN video_bit_depth INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN video_bit_depth INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN video_bitrate INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN video_bitrate INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN video_codec_level TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN video_codec_level TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN video_width INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN video_width INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN video_height INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN video_height INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN audio_bitrate INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN audio_bitrate INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_requested INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_requested INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_full_pipeline INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_full_pipeline INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_decode TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_decode TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_encode TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_encode TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_decode_title TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_decode_title TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_encode_title TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_encode_title TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_container TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_container TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_container_decision TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_container_decision TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_bitrate INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN stream_bitrate INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_decision TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_decision TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_bitrate INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_bitrate INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_codec TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_codec TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_codec_level TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_codec_level TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_bit_depth INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_bit_depth INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_height INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_height INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_width INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_width INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_resolution TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_resolution TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_framerate TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_framerate TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_audio_decision TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_audio_decision TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_audio_codec TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_audio_codec TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_audio_bitrate INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN stream_audio_bitrate INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_audio_channels INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN stream_audio_channels INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_subtitle_decision TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_subtitle_decision TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_subtitle_codec TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_subtitle_codec TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_subtitle_container TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_subtitle_container TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_subtitle_forced INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN stream_subtitle_forced INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN subtitles INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN subtitles INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN synced_version INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN synced_version INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN optimized_version INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN optimized_version INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN optimized_version_profile TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN optimized_version_profile TEXT" ) c_db.execute( - 'UPDATE session_history_media_info SET video_resolution=REPLACE(video_resolution, "p", "")' + "UPDATE session_history_media_info SET video_resolution=REPLACE(video_resolution, 'p', '')" ) c_db.execute( - 'UPDATE session_history_media_info SET video_resolution=REPLACE(video_resolution, "SD", "sd")' + "UPDATE session_history_media_info SET video_resolution=REPLACE(video_resolution, 'SD', 'sd')" ) # Upgrade session_history_media_info table from earlier versions try: - c_db.execute('SELECT subtitle_codec FROM session_history_media_info') + c_db.execute("SELECT subtitle_codec FROM session_history_media_info") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_media_info.") c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN subtitle_codec TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN subtitle_codec TEXT" ) # Upgrade session_history_media_info table from earlier versions try: - c_db.execute('SELECT synced_version_profile FROM session_history_media_info') + c_db.execute("SELECT synced_version_profile FROM session_history_media_info") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_media_info.") c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN synced_version_profile TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN synced_version_profile TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN optimized_version_title TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN optimized_version_title TEXT" ) # Upgrade session_history_media_info table from earlier versions try: - c_db.execute('SELECT transcode_hw_decoding FROM session_history_media_info') + c_db.execute("SELECT transcode_hw_decoding FROM session_history_media_info") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_media_info.") c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_decoding INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_decoding INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_encoding INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_encoding INTEGER" ) c_db.execute( - 'UPDATE session_history_media_info SET subtitle_codec = "" WHERE subtitle_codec IS NULL' + "UPDATE session_history_media_info SET subtitle_codec = '' WHERE subtitle_codec IS NULL" ) # Upgrade session_history_media_info table from earlier versions try: - result = c_db.execute('SELECT stream_container FROM session_history_media_info ' - 'WHERE stream_container IS NULL').fetchall() + result = c_db.execute("SELECT stream_container FROM session_history_media_info " + "WHERE stream_container IS NULL").fetchall() if len(result) > 0: logger.debug("Altering database. Removing NULL values from session_history_media_info table.") c_db.execute( - 'UPDATE session_history_media_info SET stream_container = "" WHERE stream_container IS NULL' + "UPDATE session_history_media_info SET stream_container = '' WHERE stream_container IS NULL" ) c_db.execute( - 'UPDATE session_history_media_info SET stream_video_codec = "" WHERE stream_video_codec IS NULL' + "UPDATE session_history_media_info SET stream_video_codec = '' WHERE stream_video_codec IS NULL" ) c_db.execute( - 'UPDATE session_history_media_info SET stream_audio_codec = "" WHERE stream_audio_codec IS NULL' + "UPDATE session_history_media_info SET stream_audio_codec = '' WHERE stream_audio_codec IS NULL" ) c_db.execute( - 'UPDATE session_history_media_info SET stream_subtitle_codec = "" WHERE stream_subtitle_codec IS NULL' + "UPDATE session_history_media_info SET stream_subtitle_codec = '' WHERE stream_subtitle_codec IS NULL" ) except sqlite3.OperationalError: logger.warn("Unable to remove NULL values from session_history_media_info table.") # Upgrade session_history_media_info table from earlier versions try: - c_db.execute('SELECT video_scan_type FROM session_history_media_info') + c_db.execute("SELECT video_scan_type FROM session_history_media_info") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_media_info.") c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN video_scan_type TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN video_scan_type TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN video_full_resolution TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN video_full_resolution TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_scan_type TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_scan_type TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_full_resolution TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_full_resolution TEXT" ) c_db.execute( - 'UPDATE session_history_media_info SET video_scan_type = "progressive" ' - 'WHERE video_resolution != ""' + "UPDATE session_history_media_info SET video_scan_type = 'progressive' " + "WHERE video_resolution != ''" ) c_db.execute( - 'UPDATE session_history_media_info SET stream_video_scan_type = "progressive" ' - 'WHERE stream_video_resolution != "" AND stream_video_resolution IS NOT NULL' + "UPDATE session_history_media_info SET stream_video_scan_type = 'progressive' " + "WHERE stream_video_resolution != '' AND stream_video_resolution IS NOT NULL" ) c_db.execute( - 'UPDATE session_history_media_info SET video_full_resolution = (CASE ' - 'WHEN video_resolution = "" OR video_resolution = "SD" OR video_resolution = "4k" THEN video_resolution ' - 'WHEN video_resolution = "sd" THEN "SD" ' - 'ELSE video_resolution || "p" END)' + "UPDATE session_history_media_info SET video_full_resolution = (CASE " + "WHEN video_resolution = '' OR video_resolution = 'SD' OR video_resolution = '4k' THEN video_resolution " + "WHEN video_resolution = 'sd' THEN 'SD' " + "ELSE video_resolution || 'p' END)" ) c_db.execute( - 'UPDATE session_history_media_info SET stream_video_full_resolution = ( ' - 'CASE WHEN stream_video_resolution = "" OR stream_video_resolution = "SD" OR stream_video_resolution = "4k" ' - 'THEN stream_video_resolution ' - 'WHEN stream_video_resolution = "sd" THEN "SD" ' - 'ELSE stream_video_resolution || "p" END)' + "UPDATE session_history_media_info SET stream_video_full_resolution = ( " + "CASE WHEN stream_video_resolution = '' OR stream_video_resolution = 'SD' OR stream_video_resolution = '4k' " + "THEN stream_video_resolution " + "WHEN stream_video_resolution = 'sd' THEN 'SD' " + "ELSE stream_video_resolution || 'p' END)" ) # Upgrade session_history_media_info table from earlier versions try: - c_db.execute('SELECT video_dynamic_range FROM session_history_media_info') + c_db.execute("SELECT video_dynamic_range FROM session_history_media_info") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_media_info.") c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN video_dynamic_range TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN video_dynamic_range TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_dynamic_range TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_dynamic_range TEXT" ) - result = c_db.execute('SELECT * FROM session_history_media_info ' - 'WHERE video_dynamic_range = "SDR" AND stream_video_dynamic_range = "HDR"').fetchone() + result = c_db.execute("SELECT * FROM session_history_media_info " + "WHERE video_dynamic_range = 'SDR' AND stream_video_dynamic_range = 'HDR'").fetchone() if result: c_db.execute( - 'UPDATE session_history_media_info SET stream_video_dynamic_range = "SDR" ' - 'WHERE video_dynamic_range = "SDR" AND stream_video_dynamic_range = "HDR"' + "UPDATE session_history_media_info SET stream_video_dynamic_range = 'SDR' " + "WHERE video_dynamic_range = 'SDR' AND stream_video_dynamic_range = 'HDR'" ) # Upgrade session_history_media_info table from earlier versions try: - c_db.execute('SELECT audio_language FROM session_history_media_info') + c_db.execute("SELECT audio_language FROM session_history_media_info") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_media_info.") c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN audio_language TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN audio_language TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN audio_language_code TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN audio_language_code TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_audio_language TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_audio_language TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_audio_language_code TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_audio_language_code TEXT" ) # Upgrade session_history_media_info table from earlier versions try: - c_db.execute('SELECT subtitle_language FROM session_history_media_info') + c_db.execute("SELECT subtitle_language FROM session_history_media_info") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_media_info.") c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN subtitle_language TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN subtitle_language TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_subtitle_language TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_subtitle_language TEXT" ) # Upgrade session_history_media_info table from earlier versions try: - c_db.execute('SELECT subtitle_forced FROM session_history_media_info') + c_db.execute("SELECT subtitle_forced FROM session_history_media_info") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_media_info.") c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN subtitle_forced INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN subtitle_forced INTEGER" ) # Upgrade session_history table from earlier versions try: - c_db.execute('SELECT section_id FROM session_history') + c_db.execute("SELECT section_id FROM session_history") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history.") c_db.execute( - 'ALTER TABLE session_history ADD COLUMN section_id INTEGER' + "ALTER TABLE session_history ADD COLUMN section_id INTEGER" ) c_db.execute( - 'UPDATE session_history SET section_id = (' - 'SELECT section_id FROM session_history_metadata ' - 'WHERE session_history_metadata.id = session_history.id)' + "UPDATE session_history SET section_id = (" + "SELECT section_id FROM session_history_metadata " + "WHERE session_history_metadata.id = session_history.id)" ) c_db.execute( - 'CREATE TABLE IF NOT EXISTS session_history_metadata_temp (id INTEGER PRIMARY KEY, ' - 'rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, ' - 'title TEXT, parent_title TEXT, grandparent_title TEXT, original_title TEXT, full_title TEXT, ' - 'media_index INTEGER, parent_media_index INTEGER, ' - 'thumb TEXT, parent_thumb TEXT, grandparent_thumb TEXT, ' - 'art TEXT, media_type TEXT, year INTEGER, originally_available_at TEXT, added_at INTEGER, updated_at INTEGER, ' - 'last_viewed_at INTEGER, content_rating TEXT, summary TEXT, tagline TEXT, rating TEXT, ' - 'duration INTEGER DEFAULT 0, guid TEXT, directors TEXT, writers TEXT, actors TEXT, genres TEXT, studio TEXT, ' - 'labels TEXT, live INTEGER DEFAULT 0, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT)' + "CREATE TABLE IF NOT EXISTS session_history_metadata_temp (id INTEGER PRIMARY KEY, " + "rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, " + "title TEXT, parent_title TEXT, grandparent_title TEXT, original_title TEXT, full_title TEXT, " + "media_index INTEGER, parent_media_index INTEGER, " + "thumb TEXT, parent_thumb TEXT, grandparent_thumb TEXT, " + "art TEXT, media_type TEXT, year INTEGER, originally_available_at TEXT, added_at INTEGER, updated_at INTEGER, " + "last_viewed_at INTEGER, content_rating TEXT, summary TEXT, tagline TEXT, rating TEXT, " + "duration INTEGER DEFAULT 0, guid TEXT, directors TEXT, writers TEXT, actors TEXT, genres TEXT, studio TEXT, " + "labels TEXT, live INTEGER DEFAULT 0, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT)" ) c_db.execute( - 'INSERT INTO session_history_metadata_temp (id, rating_key, parent_rating_key, grandparent_rating_key, ' - 'title, parent_title, grandparent_title, original_title, full_title, ' - 'media_index, parent_media_index, ' - 'thumb, parent_thumb, grandparent_thumb, ' - 'art, media_type, year, originally_available_at, added_at, updated_at, ' - 'last_viewed_at, content_rating, summary, tagline, rating, ' - 'duration, guid, directors, writers, actors, genres, studio, ' - 'labels, live, channel_call_sign, channel_identifier, channel_thumb) ' - 'SELECT id, rating_key, parent_rating_key, grandparent_rating_key, ' - 'title, parent_title, grandparent_title, original_title, full_title, ' - 'media_index, parent_media_index, ' - 'thumb, parent_thumb, grandparent_thumb, ' - 'art, media_type, year, originally_available_at, added_at, updated_at, ' - 'last_viewed_at, content_rating, summary, tagline, rating, ' - 'duration, guid, directors, writers, actors, genres, studio, ' - 'labels, live, channel_call_sign, channel_identifier, channel_thumb ' - 'FROM session_history_metadata' + "INSERT INTO session_history_metadata_temp (id, rating_key, parent_rating_key, grandparent_rating_key, " + "title, parent_title, grandparent_title, original_title, full_title, " + "media_index, parent_media_index, " + "thumb, parent_thumb, grandparent_thumb, " + "art, media_type, year, originally_available_at, added_at, updated_at, " + "last_viewed_at, content_rating, summary, tagline, rating, " + "duration, guid, directors, writers, actors, genres, studio, " + "labels, live, channel_call_sign, channel_identifier, channel_thumb) " + "SELECT id, rating_key, parent_rating_key, grandparent_rating_key, " + "title, parent_title, grandparent_title, original_title, full_title, " + "media_index, parent_media_index, " + "thumb, parent_thumb, grandparent_thumb, " + "art, media_type, year, originally_available_at, added_at, updated_at, " + "last_viewed_at, content_rating, summary, tagline, rating, " + "duration, guid, directors, writers, actors, genres, studio, " + "labels, live, channel_call_sign, channel_identifier, channel_thumb " + "FROM session_history_metadata" ) c_db.execute( - 'DROP TABLE session_history_metadata' + "DROP TABLE session_history_metadata" ) c_db.execute( - 'ALTER TABLE session_history_metadata_temp RENAME TO session_history_metadata' + "ALTER TABLE session_history_metadata_temp RENAME TO session_history_metadata" ) # Upgrade users table from earlier versions try: - c_db.execute('SELECT do_notify FROM users') + c_db.execute("SELECT do_notify FROM users") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table users.") c_db.execute( - 'ALTER TABLE users ADD COLUMN do_notify INTEGER DEFAULT 1' + "ALTER TABLE users ADD COLUMN do_notify INTEGER DEFAULT 1" ) # Upgrade users table from earlier versions try: - c_db.execute('SELECT keep_history FROM users') + c_db.execute("SELECT keep_history FROM users") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table users.") c_db.execute( - 'ALTER TABLE users ADD COLUMN keep_history INTEGER DEFAULT 1' + "ALTER TABLE users ADD COLUMN keep_history INTEGER DEFAULT 1" ) # Upgrade users table from earlier versions try: - c_db.execute('SELECT custom_avatar_url FROM users') + c_db.execute("SELECT custom_avatar_url FROM users") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table users.") c_db.execute( - 'ALTER TABLE users ADD COLUMN custom_avatar_url TEXT' + "ALTER TABLE users ADD COLUMN custom_avatar_url TEXT" ) # Upgrade users table from earlier versions try: - c_db.execute('SELECT deleted_user FROM users') + c_db.execute("SELECT deleted_user FROM users") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table users.") c_db.execute( - 'ALTER TABLE users ADD COLUMN deleted_user INTEGER DEFAULT 0' + "ALTER TABLE users ADD COLUMN deleted_user INTEGER DEFAULT 0" ) # Upgrade users table from earlier versions try: - c_db.execute('SELECT allow_guest FROM users') + c_db.execute("SELECT allow_guest FROM users") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table users.") c_db.execute( - 'ALTER TABLE users ADD COLUMN allow_guest INTEGER DEFAULT 0' + "ALTER TABLE users ADD COLUMN allow_guest INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE users ADD COLUMN user_token TEXT' + "ALTER TABLE users ADD COLUMN user_token TEXT" ) c_db.execute( - 'ALTER TABLE users ADD COLUMN server_token TEXT' + "ALTER TABLE users ADD COLUMN server_token TEXT" ) # Upgrade users table from earlier versions try: - c_db.execute('SELECT shared_libraries FROM users') + c_db.execute("SELECT shared_libraries FROM users") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table users.") c_db.execute( - 'ALTER TABLE users ADD COLUMN shared_libraries TEXT' + "ALTER TABLE users ADD COLUMN shared_libraries TEXT" ) c_db.execute( - 'ALTER TABLE users ADD COLUMN filter_all TEXT' + "ALTER TABLE users ADD COLUMN filter_all TEXT" ) c_db.execute( - 'ALTER TABLE users ADD COLUMN filter_movies TEXT' + "ALTER TABLE users ADD COLUMN filter_movies TEXT" ) c_db.execute( - 'ALTER TABLE users ADD COLUMN filter_tv TEXT' + "ALTER TABLE users ADD COLUMN filter_tv TEXT" ) c_db.execute( - 'ALTER TABLE users ADD COLUMN filter_music TEXT' + "ALTER TABLE users ADD COLUMN filter_music TEXT" ) c_db.execute( - 'ALTER TABLE users ADD COLUMN filter_photos TEXT' + "ALTER TABLE users ADD COLUMN filter_photos TEXT" ) # Upgrade users table from earlier versions try: - c_db.execute('SELECT is_admin FROM users') + c_db.execute("SELECT is_admin FROM users") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table users.") c_db.execute( - 'ALTER TABLE users ADD COLUMN is_admin INTEGER DEFAULT 0' + "ALTER TABLE users ADD COLUMN is_admin INTEGER DEFAULT 0" ) # Upgrade users table from earlier versions try: - c_db.execute('SELECT is_active FROM users') + c_db.execute("SELECT is_active FROM users") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table users.") c_db.execute( - 'ALTER TABLE users ADD COLUMN is_active INTEGER DEFAULT 1' + "ALTER TABLE users ADD COLUMN is_active INTEGER DEFAULT 1" ) # Upgrade users table from earlier versions try: - c_db.execute('SELECT title FROM users') + c_db.execute("SELECT title FROM users") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table users.") c_db.execute( - 'ALTER TABLE users ADD COLUMN title TEXT' + "ALTER TABLE users ADD COLUMN title TEXT" ) try: - result = c_db.execute('SELECT * FROM users WHERE friendly_name = username').fetchall() + result = c_db.execute("SELECT * FROM users WHERE friendly_name = username").fetchall() if result: logger.debug("Altering database. Resetting user friendly names equal to username.") - c_db.execute('UPDATE users SET friendly_name = NULL WHERE friendly_name = username') + c_db.execute("UPDATE users SET friendly_name = NULL WHERE friendly_name = username") except sqlite3.OperationalError: pass # Upgrade notify_log table from earlier versions try: - c_db.execute('SELECT poster_url FROM notify_log') + c_db.execute("SELECT poster_url FROM notify_log") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table notify_log.") c_db.execute( - 'ALTER TABLE notify_log ADD COLUMN poster_url TEXT' + "ALTER TABLE notify_log ADD COLUMN poster_url TEXT" ) # Upgrade notify_log table from earlier versions (populate table with data from notify_log) try: - c_db.execute('SELECT timestamp FROM notify_log') + c_db.execute("SELECT timestamp FROM notify_log") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table notify_log.") c_db.execute( - 'CREATE TABLE IF NOT EXISTS notify_log_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp INTEGER, ' - 'session_key INTEGER, rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, ' - 'user_id INTEGER, user TEXT, agent_id INTEGER, agent_name TEXT, notify_action TEXT, ' - 'subject_text TEXT, body_text TEXT, script_args TEXT, poster_url TEXT)' + "CREATE TABLE IF NOT EXISTS notify_log_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp INTEGER, " + "session_key INTEGER, rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, " + "user_id INTEGER, user TEXT, agent_id INTEGER, agent_name TEXT, notify_action TEXT, " + "subject_text TEXT, body_text TEXT, script_args TEXT, poster_url TEXT)" ) c_db.execute( - 'INSERT INTO notify_log_temp (session_key, rating_key, user_id, user, agent_id, agent_name, ' - 'poster_url, timestamp, notify_action) ' - 'SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, timestamp, ' - 'notify_action FROM notify_log_temp ' - 'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, ' - 'on_play, "play" FROM notify_log WHERE on_play ' - 'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, ' - 'on_stop, "stop" FROM notify_log WHERE on_stop ' - 'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, ' - 'on_watched, "watched" FROM notify_log WHERE on_watched ' - 'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, ' - 'on_pause, "pause" FROM notify_log WHERE on_pause ' - 'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, ' - 'on_resume, "resume" FROM notify_log WHERE on_resume ' - 'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, ' - 'on_buffer, "buffer" FROM notify_log WHERE on_buffer ' - 'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, ' - 'on_created, "created" FROM notify_log WHERE on_created ' - 'ORDER BY timestamp ') + "INSERT INTO notify_log_temp (session_key, rating_key, user_id, user, agent_id, agent_name, " + "poster_url, timestamp, notify_action) " + "SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, timestamp, " + "notify_action FROM notify_log_temp " + "UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, " + "on_play, 'play' FROM notify_log WHERE on_play " + "UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, " + "on_stop, 'stop' FROM notify_log WHERE on_stop " + "UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, " + "on_watched, 'watched' FROM notify_log WHERE on_watched " + "UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, " + "on_pause, 'pause' FROM notify_log WHERE on_pause " + "UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, " + "on_resume, 'resume' FROM notify_log WHERE on_resume " + "UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, " + "on_buffer, 'buffer' FROM notify_log WHERE on_buffer " + "UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, " + "on_created, 'created' FROM notify_log WHERE on_created " + "ORDER BY timestamp ") c_db.execute( - 'DROP TABLE notify_log' + "DROP TABLE notify_log" ) c_db.execute( - 'ALTER TABLE notify_log_temp RENAME TO notify_log' + "ALTER TABLE notify_log_temp RENAME TO notify_log" ) # Upgrade notify_log table from earlier versions try: - c_db.execute('SELECT notifier_id FROM notify_log') + c_db.execute("SELECT notifier_id FROM notify_log") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table notify_log.") c_db.execute( - 'ALTER TABLE notify_log ADD COLUMN notifier_id INTEGER' + "ALTER TABLE notify_log ADD COLUMN notifier_id INTEGER" ) # Upgrade notify_log table from earlier versions try: - c_db.execute('SELECT success FROM notify_log') + c_db.execute("SELECT success FROM notify_log") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table notify_log.") c_db.execute( - 'ALTER TABLE notify_log ADD COLUMN success INTEGER DEFAULT 0' + "ALTER TABLE notify_log ADD COLUMN success INTEGER DEFAULT 0" ) c_db.execute( - 'UPDATE notify_log SET success = 1' + "UPDATE notify_log SET success = 1" ) # Upgrade notify_log table from earlier versions try: - c_db.execute('SELECT tag FROM notify_log') + c_db.execute("SELECT tag FROM notify_log") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table notify_log.") c_db.execute( - 'ALTER TABLE notify_log ADD COLUMN tag TEXT' + "ALTER TABLE notify_log ADD COLUMN tag TEXT" ) # Upgrade newsletter_log table from earlier versions try: - c_db.execute('SELECT start_time FROM newsletter_log') + c_db.execute("SELECT start_time FROM newsletter_log") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table newsletter_log.") c_db.execute( - 'ALTER TABLE newsletter_log ADD COLUMN start_time INTEGER' + "ALTER TABLE newsletter_log ADD COLUMN start_time INTEGER" ) c_db.execute( - 'ALTER TABLE newsletter_log ADD COLUMN end_time INTEGER' + "ALTER TABLE newsletter_log ADD COLUMN end_time INTEGER" ) # Upgrade newsletter_log table from earlier versions try: - c_db.execute('SELECT filename FROM newsletter_log') + c_db.execute("SELECT filename FROM newsletter_log") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table newsletter_log.") c_db.execute( - 'ALTER TABLE newsletter_log ADD COLUMN filename TEXT' + "ALTER TABLE newsletter_log ADD COLUMN filename TEXT" ) # Upgrade newsletter_log table from earlier versions try: - c_db.execute('SELECT email_msg_id FROM newsletter_log') + c_db.execute("SELECT email_msg_id FROM newsletter_log") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table newsletter_log.") c_db.execute( - 'ALTER TABLE newsletter_log ADD COLUMN email_msg_id TEXT' + "ALTER TABLE newsletter_log ADD COLUMN email_msg_id TEXT" ) # Upgrade newsletters table from earlier versions try: - c_db.execute('SELECT id_name FROM newsletters') + c_db.execute("SELECT id_name FROM newsletters") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table newsletters.") c_db.execute( - 'ALTER TABLE newsletters ADD COLUMN id_name TEXT NOT NULL' + "ALTER TABLE newsletters ADD COLUMN id_name TEXT NOT NULL" ) # Upgrade newsletters table from earlier versions try: - result = c_db.execute('SELECT SQL FROM sqlite_master WHERE type="table" AND name="newsletters"').fetchone() - if 'TEXT NOT NULL DEFAULT "0 0 * * 0"' in result[0]: + result = c_db.execute("SELECT SQL FROM sqlite_master WHERE type='table' AND name='newsletters'").fetchone() + if "TEXT NOT NULL DEFAULT \"0 0 * * 0\"" in result[0]: logger.debug("Altering database. Updating default cron value in newsletters table.") c_db.execute( - 'CREATE TABLE newsletters_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'agent_id INTEGER, agent_name TEXT, agent_label TEXT, id_name TEXT NOT NULL, ' - 'friendly_name TEXT, newsletter_config TEXT, email_config TEXT, ' - 'subject TEXT, body TEXT, message TEXT, ' - 'cron TEXT NOT NULL DEFAULT \'0 0 * * 0\', active INTEGER DEFAULT 0)' + "CREATE TABLE newsletters_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "agent_id INTEGER, agent_name TEXT, agent_label TEXT, id_name TEXT NOT NULL, " + "friendly_name TEXT, newsletter_config TEXT, email_config TEXT, " + "subject TEXT, body TEXT, message TEXT, " + "cron TEXT NOT NULL DEFAULT '0 0 * * 0', active INTEGER DEFAULT 0)" ) c_db.execute( - 'INSERT INTO newsletters_temp (id, agent_id, agent_name, agent_label, id_name, ' - 'friendly_name, newsletter_config, email_config, subject, body, message, cron, active) ' - 'SELECT id, agent_id, agent_name, agent_label, id_name, ' - 'friendly_name, newsletter_config, email_config, subject, body, message, cron, active ' - 'FROM newsletters' + "INSERT INTO newsletters_temp (id, agent_id, agent_name, agent_label, id_name, " + "friendly_name, newsletter_config, email_config, subject, body, message, cron, active) " + "SELECT id, agent_id, agent_name, agent_label, id_name, " + "friendly_name, newsletter_config, email_config, subject, body, message, cron, active " + "FROM newsletters" ) c_db.execute( - 'DROP TABLE newsletters' + "DROP TABLE newsletters" ) c_db.execute( - 'ALTER TABLE newsletters_temp RENAME TO newsletters' + "ALTER TABLE newsletters_temp RENAME TO newsletters" ) except sqlite3.OperationalError: logger.warn("Unable to update default cron value in newsletters table.") try: c_db.execute( - 'DROP TABLE newsletters_temp' + "DROP TABLE newsletters_temp" ) except: pass # Upgrade library_sections table from earlier versions (remove UNIQUE constraint on section_id) try: - result = c_db.execute('SELECT SQL FROM sqlite_master WHERE type="table" AND name="library_sections"').fetchone() - if 'section_id INTEGER UNIQUE' in result[0]: + result = c_db.execute("SELECT SQL FROM sqlite_master WHERE type='table' AND name='library_sections'").fetchone() + if "section_id INTEGER UNIQUE" in result[0]: logger.debug("Altering database. Removing unique constraint on section_id from library_sections table.") c_db.execute( - 'CREATE TABLE library_sections_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'server_id TEXT, section_id INTEGER, section_name TEXT, section_type TEXT, ' - 'thumb TEXT, custom_thumb_url TEXT, art TEXT, count INTEGER, parent_count INTEGER, child_count INTEGER, ' - 'do_notify INTEGER DEFAULT 1, do_notify_created INTEGER DEFAULT 1, keep_history INTEGER DEFAULT 1, ' - 'deleted_section INTEGER DEFAULT 0, UNIQUE(server_id, section_id))' + "CREATE TABLE library_sections_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "server_id TEXT, section_id INTEGER, section_name TEXT, section_type TEXT, " + "thumb TEXT, custom_thumb_url TEXT, art TEXT, count INTEGER, parent_count INTEGER, child_count INTEGER, " + "do_notify INTEGER DEFAULT 1, do_notify_created INTEGER DEFAULT 1, keep_history INTEGER DEFAULT 1, " + "deleted_section INTEGER DEFAULT 0, UNIQUE(server_id, section_id))" ) c_db.execute( - 'INSERT INTO library_sections_temp (id, server_id, section_id, section_name, section_type, ' - 'thumb, custom_thumb_url, art, count, parent_count, child_count, do_notify, do_notify_created, ' - 'keep_history, deleted_section) ' - 'SELECT id, server_id, section_id, section_name, section_type, ' - 'thumb, custom_thumb_url, art, count, parent_count, child_count, do_notify, do_notify_created, ' - 'keep_history, deleted_section ' - 'FROM library_sections' + "INSERT INTO library_sections_temp (id, server_id, section_id, section_name, section_type, " + "thumb, custom_thumb_url, art, count, parent_count, child_count, do_notify, do_notify_created, " + "keep_history, deleted_section) " + "SELECT id, server_id, section_id, section_name, section_type, " + "thumb, custom_thumb_url, art, count, parent_count, child_count, do_notify, do_notify_created, " + "keep_history, deleted_section " + "FROM library_sections" ) c_db.execute( - 'DROP TABLE library_sections' + "DROP TABLE library_sections" ) c_db.execute( - 'ALTER TABLE library_sections_temp RENAME TO library_sections' + "ALTER TABLE library_sections_temp RENAME TO library_sections" ) except sqlite3.OperationalError: logger.warn("Unable to remove section_id unique constraint from library_sections.") try: c_db.execute( - 'DROP TABLE library_sections_temp' + "DROP TABLE library_sections_temp" ) except: pass # Upgrade library_sections table from earlier versions (remove duplicated libraries) try: - result = c_db.execute('SELECT * FROM library_sections WHERE server_id = ""').fetchall() + result = c_db.execute("SELECT * FROM library_sections WHERE server_id = ''").fetchall() if len(result) > 0: logger.debug("Altering database. Removing duplicate libraries from library_sections table.") c_db.execute( - 'DELETE FROM library_sections WHERE server_id = ""' + "DELETE FROM library_sections WHERE server_id = ''" ) except sqlite3.OperationalError: logger.warn("Unable to remove duplicate libraries from library_sections table.") # Upgrade library_sections table from earlier versions try: - c_db.execute('SELECT agent FROM library_sections') + c_db.execute("SELECT agent FROM library_sections") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table library_sections.") c_db.execute( - 'ALTER TABLE library_sections ADD COLUMN agent TEXT' + "ALTER TABLE library_sections ADD COLUMN agent TEXT" ) # Upgrade library_sections table from earlier versions try: - c_db.execute('SELECT custom_art_url FROM library_sections') + c_db.execute("SELECT custom_art_url FROM library_sections") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table library_sections.") c_db.execute( - 'ALTER TABLE library_sections ADD COLUMN custom_art_url TEXT' + "ALTER TABLE library_sections ADD COLUMN custom_art_url TEXT" ) # Upgrade library_sections table from earlier versions try: - c_db.execute('SELECT is_active FROM library_sections') + c_db.execute("SELECT is_active FROM library_sections") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table library_sections.") c_db.execute( - 'ALTER TABLE library_sections ADD COLUMN is_active INTEGER DEFAULT 1' + "ALTER TABLE library_sections ADD COLUMN is_active INTEGER DEFAULT 1" ) # Upgrade library_sections table from earlier versions try: - result = c_db.execute('SELECT thumb, art FROM library_sections WHERE section_id = ?', + result = c_db.execute("SELECT thumb, art FROM library_sections WHERE section_id = ?", [common.LIVE_TV_SECTION_ID]).fetchone() if result and (not result[0] or not result[1]): logger.debug("Altering database. Updating database table library_sections.") - c_db.execute('UPDATE library_sections SET thumb = ?, art =? WHERE section_id = ?', + c_db.execute("UPDATE library_sections SET thumb = ?, art =? WHERE section_id = ?", [common.DEFAULT_LIVE_TV_THUMB, common.DEFAULT_LIVE_TV_ART_FULL, common.LIVE_TV_SECTION_ID]) @@ -2266,49 +2269,49 @@ def dbcheck(): # Upgrade users table from earlier versions (remove UNIQUE constraint on username) try: - result = c_db.execute('SELECT SQL FROM sqlite_master WHERE type="table" AND name="users"').fetchone() - if 'username TEXT NOT NULL UNIQUE' in result[0]: + result = c_db.execute("SELECT SQL FROM sqlite_master WHERE type='table' AND name='users'").fetchone() + if "username TEXT NOT NULL UNIQUE" in result[0]: logger.debug("Altering database. Removing unique constraint on username from users table.") c_db.execute( - 'CREATE TABLE users_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'user_id INTEGER DEFAULT NULL UNIQUE, username TEXT NOT NULL, friendly_name TEXT, ' - 'thumb TEXT, custom_avatar_url TEXT, email TEXT, is_home_user INTEGER DEFAULT NULL, ' - 'is_allow_sync INTEGER DEFAULT NULL, is_restricted INTEGER DEFAULT NULL, do_notify INTEGER DEFAULT 1, ' - 'keep_history INTEGER DEFAULT 1, deleted_user INTEGER DEFAULT 0)' + "CREATE TABLE users_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "user_id INTEGER DEFAULT NULL UNIQUE, username TEXT NOT NULL, friendly_name TEXT, " + "thumb TEXT, custom_avatar_url TEXT, email TEXT, is_home_user INTEGER DEFAULT NULL, " + "is_allow_sync INTEGER DEFAULT NULL, is_restricted INTEGER DEFAULT NULL, do_notify INTEGER DEFAULT 1, " + "keep_history INTEGER DEFAULT 1, deleted_user INTEGER DEFAULT 0)" ) c_db.execute( - 'INSERT INTO users_temp (id, user_id, username, friendly_name, thumb, custom_avatar_url, ' - 'email, is_home_user, is_allow_sync, is_restricted, do_notify, keep_history, deleted_user) ' - 'SELECT id, user_id, username, friendly_name, thumb, custom_avatar_url, ' - 'email, is_home_user, is_allow_sync, is_restricted, do_notify, keep_history, deleted_user ' - 'FROM users' + "INSERT INTO users_temp (id, user_id, username, friendly_name, thumb, custom_avatar_url, " + "email, is_home_user, is_allow_sync, is_restricted, do_notify, keep_history, deleted_user) " + "SELECT id, user_id, username, friendly_name, thumb, custom_avatar_url, " + "email, is_home_user, is_allow_sync, is_restricted, do_notify, keep_history, deleted_user " + "FROM users" ) c_db.execute( - 'DROP TABLE users' + "DROP TABLE users" ) c_db.execute( - 'ALTER TABLE users_temp RENAME TO users' + "ALTER TABLE users_temp RENAME TO users" ) except sqlite3.OperationalError: logger.warn("Unable to remove username unique constraint from users.") try: c_db.execute( - 'DROP TABLE users_temp' + "DROP TABLE users_temp" ) except: pass # Upgrade mobile_devices table from earlier versions try: - result = c_db.execute('SELECT SQL FROM sqlite_master WHERE type="table" AND name="mobile_devices"').fetchone() - if 'device_token TEXT NOT NULL UNIQUE' in result[0]: + result = c_db.execute("SELECT SQL FROM sqlite_master WHERE type='table' AND name='mobile_devices'").fetchone() + if "device_token TEXT NOT NULL UNIQUE" in result[0]: logger.debug("Altering database. Dropping and recreating mobile_devices table.") c_db.execute( - 'DROP TABLE mobile_devices' + "DROP TABLE mobile_devices" ) c_db.execute( - 'CREATE TABLE IF NOT EXISTS mobile_devices (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'device_id TEXT NOT NULL UNIQUE, device_token TEXT, device_name TEXT, friendly_name TEXT)' + "CREATE TABLE IF NOT EXISTS mobile_devices (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "device_id TEXT NOT NULL UNIQUE, device_token TEXT, device_name TEXT, friendly_name TEXT)" ) except sqlite3.OperationalError: logger.warn("Failed to recreate mobile_devices table.") @@ -2316,408 +2319,408 @@ def dbcheck(): # Upgrade mobile_devices table from earlier versions try: - c_db.execute('SELECT last_seen FROM mobile_devices') + c_db.execute("SELECT last_seen FROM mobile_devices") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table mobile_devices.") c_db.execute( - 'ALTER TABLE mobile_devices ADD COLUMN last_seen INTEGER' + "ALTER TABLE mobile_devices ADD COLUMN last_seen INTEGER" ) # Upgrade mobile_devices table from earlier versions try: - c_db.execute('SELECT official FROM mobile_devices') + c_db.execute("SELECT official FROM mobile_devices") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table mobile_devices.") c_db.execute( - 'ALTER TABLE mobile_devices ADD COLUMN official INTEGER DEFAULT 0' + "ALTER TABLE mobile_devices ADD COLUMN official INTEGER DEFAULT 0" ) # Update official mobile device flag - for device_id, in c_db.execute('SELECT device_id FROM mobile_devices').fetchall(): - c_db.execute('UPDATE mobile_devices SET official = ? WHERE device_id = ?', + for device_id, in c_db.execute("SELECT device_id FROM mobile_devices").fetchall(): + c_db.execute("UPDATE mobile_devices SET official = ? WHERE device_id = ?", [mobile_app.validate_onesignal_id(device_id), device_id]) # Upgrade mobile_devices table from earlier versions try: - c_db.execute('SELECT onesignal_id FROM mobile_devices') + c_db.execute("SELECT onesignal_id FROM mobile_devices") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table mobile_devices.") c_db.execute( - 'ALTER TABLE mobile_devices ADD COLUMN onesignal_id TEXT' + "ALTER TABLE mobile_devices ADD COLUMN onesignal_id TEXT" ) # Upgrade mobile_devices table from earlier versions try: - c_db.execute('SELECT platform FROM mobile_devices') + c_db.execute("SELECT platform FROM mobile_devices") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table mobile_devices.") c_db.execute( - 'ALTER TABLE mobile_devices ADD COLUMN platform TEXT' + "ALTER TABLE mobile_devices ADD COLUMN platform TEXT" ) c_db.execute( - 'ALTER TABLE mobile_devices ADD COLUMN version TEXT' + "ALTER TABLE mobile_devices ADD COLUMN version TEXT" ) # Update mobile device platforms for device_id, in c_db.execute( - 'SELECT device_id FROM mobile_devices WHERE official > 0').fetchall(): - c_db.execute('UPDATE mobile_devices SET platform = ? WHERE device_id = ?', - ['android', device_id]) + "SELECT device_id FROM mobile_devices WHERE official > 0").fetchall(): + c_db.execute("UPDATE mobile_devices SET platform = ? WHERE device_id = ?", + ["android", device_id]) # Upgrade notifiers table from earlier versions try: - c_db.execute('SELECT custom_conditions FROM notifiers') + c_db.execute("SELECT custom_conditions FROM notifiers") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table notifiers.") c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN custom_conditions TEXT' + "ALTER TABLE notifiers ADD COLUMN custom_conditions TEXT" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN custom_conditions_logic TEXT' + "ALTER TABLE notifiers ADD COLUMN custom_conditions_logic TEXT" ) # Upgrade notifiers table from earlier versions try: - c_db.execute('SELECT on_change FROM notifiers') + c_db.execute("SELECT on_change FROM notifiers") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table notifiers.") c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_change INTEGER DEFAULT 0' + "ALTER TABLE notifiers ADD COLUMN on_change INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_change_subject TEXT' + "ALTER TABLE notifiers ADD COLUMN on_change_subject TEXT" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_change_body TEXT' + "ALTER TABLE notifiers ADD COLUMN on_change_body TEXT" ) # Upgrade notifiers table from earlier versions try: - c_db.execute('SELECT on_plexpydbcorrupt FROM notifiers') + c_db.execute("SELECT on_plexpydbcorrupt FROM notifiers") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table notifiers.") c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_plexpydbcorrupt INTEGER DEFAULT 0' + "ALTER TABLE notifiers ADD COLUMN on_plexpydbcorrupt INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_plexpydbcorrupt_subject TEXT' + "ALTER TABLE notifiers ADD COLUMN on_plexpydbcorrupt_subject TEXT" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_plexpydbcorrupt_body TEXT' + "ALTER TABLE notifiers ADD COLUMN on_plexpydbcorrupt_body TEXT" ) # Upgrade notifiers table from earlier versions try: - c_db.execute('SELECT on_error FROM notifiers') + c_db.execute("SELECT on_error FROM notifiers") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table notifiers.") c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_error INTEGER DEFAULT 0' + "ALTER TABLE notifiers ADD COLUMN on_error INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_error_subject TEXT' + "ALTER TABLE notifiers ADD COLUMN on_error_subject TEXT" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_error_body TEXT' + "ALTER TABLE notifiers ADD COLUMN on_error_body TEXT" ) # Upgrade notifiers table from earlier versions try: - c_db.execute('SELECT on_intro FROM notifiers') + c_db.execute("SELECT on_intro FROM notifiers") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table notifiers.") c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_intro INTEGER DEFAULT 0' + "ALTER TABLE notifiers ADD COLUMN on_intro INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_intro_subject TEXT' + "ALTER TABLE notifiers ADD COLUMN on_intro_subject TEXT" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_intro_body TEXT' + "ALTER TABLE notifiers ADD COLUMN on_intro_body TEXT" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_credits INTEGER DEFAULT 0' + "ALTER TABLE notifiers ADD COLUMN on_credits INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_credits_subject TEXT' + "ALTER TABLE notifiers ADD COLUMN on_credits_subject TEXT" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_credits_body TEXT' + "ALTER TABLE notifiers ADD COLUMN on_credits_body TEXT" ) # Upgrade notifiers table from earlier versions try: - c_db.execute('SELECT on_commercial FROM notifiers') + c_db.execute("SELECT on_commercial FROM notifiers") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table notifiers.") c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_commercial INTEGER DEFAULT 0' + "ALTER TABLE notifiers ADD COLUMN on_commercial INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_commercial_subject TEXT' + "ALTER TABLE notifiers ADD COLUMN on_commercial_subject TEXT" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_commercial_body TEXT' + "ALTER TABLE notifiers ADD COLUMN on_commercial_body TEXT" ) # Upgrade tvmaze_lookup table from earlier versions try: - c_db.execute('SELECT rating_key FROM tvmaze_lookup') + c_db.execute("SELECT rating_key FROM tvmaze_lookup") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table tvmaze_lookup.") c_db.execute( - 'ALTER TABLE tvmaze_lookup ADD COLUMN rating_key INTEGER' + "ALTER TABLE tvmaze_lookup ADD COLUMN rating_key INTEGER" ) c_db.execute( - 'DROP INDEX IF EXISTS idx_tvmaze_lookup_thetvdb_id' + "DROP INDEX IF EXISTS idx_tvmaze_lookup_thetvdb_id" ) c_db.execute( - 'DROP INDEX IF EXISTS idx_tvmaze_lookup_imdb_id' + "DROP INDEX IF EXISTS idx_tvmaze_lookup_imdb_id" ) # Upgrade themoviedb_lookup table from earlier versions try: - c_db.execute('SELECT rating_key FROM themoviedb_lookup') + c_db.execute("SELECT rating_key FROM themoviedb_lookup") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table themoviedb_lookup.") c_db.execute( - 'ALTER TABLE themoviedb_lookup ADD COLUMN rating_key INTEGER' + "ALTER TABLE themoviedb_lookup ADD COLUMN rating_key INTEGER" ) c_db.execute( - 'DROP INDEX IF EXISTS idx_themoviedb_lookup_thetvdb_id' + "DROP INDEX IF EXISTS idx_themoviedb_lookup_thetvdb_id" ) c_db.execute( - 'DROP INDEX IF EXISTS idx_themoviedb_lookup_imdb_id' + "DROP INDEX IF EXISTS idx_themoviedb_lookup_imdb_id" ) # Upgrade user_login table from earlier versions try: - c_db.execute('SELECT success FROM user_login') + c_db.execute("SELECT success FROM user_login") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table user_login.") c_db.execute( - 'ALTER TABLE user_login ADD COLUMN success INTEGER DEFAULT 1' + "ALTER TABLE user_login ADD COLUMN success INTEGER DEFAULT 1" ) # Upgrade user_login table from earlier versions try: - c_db.execute('SELECT expiry FROM user_login') + c_db.execute("SELECT expiry FROM user_login") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table user_login.") c_db.execute( - 'ALTER TABLE user_login ADD COLUMN expiry TEXT' + "ALTER TABLE user_login ADD COLUMN expiry TEXT" ) c_db.execute( - 'ALTER TABLE user_login ADD COLUMN jwt_token TEXT' + "ALTER TABLE user_login ADD COLUMN jwt_token TEXT" ) # Rename notifiers in the database - result = c_db.execute('SELECT agent_label FROM notifiers ' - 'WHERE agent_label = "XBMC" ' - 'OR agent_label = "OSX Notify" ' - 'OR agent_name = "androidapp"').fetchone() + result = c_db.execute("SELECT agent_label FROM notifiers " + "WHERE agent_label = 'XBMC' " + "OR agent_label = 'OSX Notify' " + "OR agent_name = 'androidapp'").fetchone() if result: logger.debug("Altering database. Renaming notifiers.") c_db.execute( - 'UPDATE notifiers SET agent_label = "Kodi" WHERE agent_label = "XBMC"' + "UPDATE notifiers SET agent_label = 'Kodi' WHERE agent_label = 'XBMC'" ) c_db.execute( - 'UPDATE notifiers SET agent_label = "macOS Notification Center" WHERE agent_label = "OSX Notify"' + "UPDATE notifiers SET agent_label = 'macOS Notification Center' WHERE agent_label = 'OSX Notify'" ) c_db.execute( - 'UPDATE notifiers SET agent_name = "remoteapp", agent_label = "Tautulli Remote App" ' - 'WHERE agent_name = "androidapp"' + "UPDATE notifiers SET agent_name = 'remoteapp', agent_label = 'Tautulli Remote App' " + "WHERE agent_name = 'androidapp'" ) # Upgrade exports table from earlier versions try: - c_db.execute('SELECT thumb_level FROM exports') + c_db.execute("SELECT thumb_level FROM exports") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table exports.") c_db.execute( - 'ALTER TABLE exports ADD COLUMN thumb_level INTEGER DEFAULT 0' + "ALTER TABLE exports ADD COLUMN thumb_level INTEGER DEFAULT 0" ) c_db.execute( - 'UPDATE exports SET thumb_level = 9 WHERE include_thumb = 1' + "UPDATE exports SET thumb_level = 9 WHERE include_thumb = 1" ) c_db.execute( - 'ALTER TABLE exports ADD COLUMN art_level INTEGER DEFAULT 0' + "ALTER TABLE exports ADD COLUMN art_level INTEGER DEFAULT 0" ) c_db.execute( - 'UPDATE exports SET art_level = 9 WHERE include_art = 1' + "UPDATE exports SET art_level = 9 WHERE include_art = 1" ) # Upgrade exports table from earlier versions try: - c_db.execute('SELECT title FROM exports') + c_db.execute("SELECT title FROM exports") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table exports.") c_db.execute( - 'ALTER TABLE exports ADD COLUMN title TEXT' + "ALTER TABLE exports ADD COLUMN title TEXT" ) c_db.execute( - 'ALTER TABLE exports ADD COLUMN individual_files INTEGER DEFAULT 0' + "ALTER TABLE exports ADD COLUMN individual_files INTEGER DEFAULT 0" ) # Upgrade exports table from earlier versions try: - c_db.execute('SELECT total_items FROM exports') + c_db.execute("SELECT total_items FROM exports") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table exports.") c_db.execute( - 'ALTER TABLE exports ADD COLUMN exported_items INTEGER DEFAULT 0' + "ALTER TABLE exports ADD COLUMN exported_items INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE exports ADD COLUMN total_items INTEGER DEFAULT 0' + "ALTER TABLE exports ADD COLUMN total_items INTEGER DEFAULT 0" ) # Fix unique constraints try: - c_db.execute('DELETE FROM tvmaze_lookup ' - 'WHERE id NOT IN (SELECT MIN(id) FROM tvmaze_lookup GROUP BY rating_key)') + c_db.execute("DELETE FROM tvmaze_lookup " + "WHERE id NOT IN (SELECT MIN(id) FROM tvmaze_lookup GROUP BY rating_key)") except sqlite3.OperationalError: pass try: - c_db.execute('DELETE FROM themoviedb_lookup ' - 'WHERE id NOT IN (SELECT MIN(id) FROM themoviedb_lookup GROUP BY rating_key)') + c_db.execute("DELETE FROM themoviedb_lookup " + "WHERE id NOT IN (SELECT MIN(id) FROM themoviedb_lookup GROUP BY rating_key)") except sqlite3.OperationalError: pass try: - c_db.execute('DELETE FROM musicbrainz_lookup ' - 'WHERE id NOT IN (SELECT MIN(id) FROM musicbrainz_lookup GROUP BY rating_key)') + c_db.execute("DELETE FROM musicbrainz_lookup " + "WHERE id NOT IN (SELECT MIN(id) FROM musicbrainz_lookup GROUP BY rating_key)") except sqlite3.OperationalError: pass try: - c_db.execute('DELETE FROM image_hash_lookup ' - 'WHERE id NOT IN (SELECT MIN(id) FROM image_hash_lookup GROUP BY img_hash)') + c_db.execute("DELETE FROM image_hash_lookup " + "WHERE id NOT IN (SELECT MIN(id) FROM image_hash_lookup GROUP BY img_hash)") except sqlite3.OperationalError: pass try: - c_db.execute('DELETE FROM cloudinary_lookup ' - 'WHERE id NOT IN (SELECT MIN(id) FROM cloudinary_lookup GROUP BY img_hash)') + c_db.execute("DELETE FROM cloudinary_lookup " + "WHERE id NOT IN (SELECT MIN(id) FROM cloudinary_lookup GROUP BY img_hash)") except sqlite3.OperationalError: pass try: - c_db.execute('DELETE FROM imgur_lookup ' - 'WHERE id NOT IN (SELECT MIN(id) FROM imgur_lookup GROUP BY img_hash)') + c_db.execute("DELETE FROM imgur_lookup " + "WHERE id NOT IN (SELECT MIN(id) FROM imgur_lookup GROUP BY img_hash)") except sqlite3.OperationalError: pass # Add "Local" user to database as default unauthenticated user. - result = c_db.execute('SELECT id FROM users WHERE username = "Local"') + result = c_db.execute("SELECT id FROM users WHERE username = 'Local'") if not result.fetchone(): logger.debug("User 'Local' does not exist. Adding user.") - c_db.execute('INSERT INTO users (user_id, username) VALUES (0, "Local")') + c_db.execute("INSERT INTO users (user_id, username) VALUES (0, 'Local')") # Create session_history table indices c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_media_type" ' - 'ON "session_history" ("media_type")' + "CREATE INDEX IF NOT EXISTS idx_session_history_media_type " + "ON session_history (media_type)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_media_type_stopped" ' - 'ON "session_history" ("media_type", "stopped" ASC)' + "CREATE INDEX IF NOT EXISTS idx_session_history_media_type_stopped " + "ON session_history (media_type, stopped ASC)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_rating_key" ' - 'ON "session_history" ("rating_key")' + "CREATE INDEX IF NOT EXISTS idx_session_history_rating_key " + "ON session_history (rating_key)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_parent_rating_key" ' - 'ON "session_history" ("parent_rating_key")' + "CREATE INDEX IF NOT EXISTS idx_session_history_parent_rating_key " + "ON session_history (parent_rating_key)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_grandparent_rating_key" ' - 'ON "session_history" ("grandparent_rating_key")' + "CREATE INDEX IF NOT EXISTS idx_session_history_grandparent_rating_key " + "ON session_history (grandparent_rating_key)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_user" ' - 'ON "session_history" ("user")' + "CREATE INDEX IF NOT EXISTS idx_session_history_user " + "ON session_history (user)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_user_id" ' - 'ON "session_history" ("user_id")' + "CREATE INDEX IF NOT EXISTS idx_session_history_user_id " + "ON session_history (user_id)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_user_id_stopped" ' - 'ON "session_history" ("user_id", "stopped" ASC)' + "CREATE INDEX IF NOT EXISTS idx_session_history_user_id_stopped " + "ON session_history (user_id, stopped ASC)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_section_id" ' - 'ON "session_history" ("section_id")' + "CREATE INDEX IF NOT EXISTS idx_session_history_section_id " + "ON session_history (section_id)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_section_id_stopped" ' - 'ON "session_history" ("section_id", "stopped" ASC)' + "CREATE INDEX IF NOT EXISTS idx_session_history_section_id_stopped " + "ON session_history (section_id, stopped ASC)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_reference_id" ' - 'ON "session_history" ("reference_id" ASC)' + "CREATE INDEX IF NOT EXISTS idx_session_history_reference_id " + "ON session_history (reference_id ASC)" ) # Create session_history_metadata table indices c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_metadata_rating_key" ' - 'ON "session_history_metadata" ("rating_key")' + "CREATE INDEX IF NOT EXISTS idx_session_history_metadata_rating_key " + "ON session_history_metadata (rating_key)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_metadata_guid" ' - 'ON "session_history_metadata" ("guid")' + "CREATE INDEX IF NOT EXISTS idx_session_history_metadata_guid " + "ON session_history_metadata (guid)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_metadata_live" ' - 'ON "session_history_metadata" ("live")' + "CREATE INDEX IF NOT EXISTS idx_session_history_metadata_live " + "ON session_history_metadata (live)" ) # Create session_history_media_info table indices c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_media_info_transcode_decision" ' - 'ON "session_history_media_info" ("transcode_decision")' + "CREATE INDEX IF NOT EXISTS idx_session_history_media_info_transcode_decision " + "ON session_history_media_info (transcode_decision)" ) # Create lookup table indices c_db.execute( - 'CREATE UNIQUE INDEX IF NOT EXISTS "idx_tvmaze_lookup" ' - 'ON "tvmaze_lookup" ("rating_key")' + "CREATE UNIQUE INDEX IF NOT EXISTS idx_tvmaze_lookup " + "ON tvmaze_lookup (rating_key)" ) c_db.execute( - 'CREATE UNIQUE INDEX IF NOT EXISTS "idx_themoviedb_lookup" ' - 'ON "themoviedb_lookup" ("rating_key")' + "CREATE UNIQUE INDEX IF NOT EXISTS idx_themoviedb_lookup " + "ON themoviedb_lookup (rating_key)" ) c_db.execute( - 'CREATE UNIQUE INDEX IF NOT EXISTS "idx_musicbrainz_lookup" ' - 'ON "musicbrainz_lookup" ("rating_key")' + "CREATE UNIQUE INDEX IF NOT EXISTS idx_musicbrainz_lookup " + "ON musicbrainz_lookup (rating_key)" ) c_db.execute( - 'CREATE UNIQUE INDEX IF NOT EXISTS "idx_image_hash_lookup" ' - 'ON "image_hash_lookup" ("img_hash")' + "CREATE UNIQUE INDEX IF NOT EXISTS idx_image_hash_lookup " + "ON image_hash_lookup (img_hash)" ) c_db.execute( - 'CREATE UNIQUE INDEX IF NOT EXISTS "idx_cloudinary_lookup" ' - 'ON "cloudinary_lookup" ("img_hash")' + "CREATE UNIQUE INDEX IF NOT EXISTS idx_cloudinary_lookup " + "ON cloudinary_lookup (img_hash)" ) c_db.execute( - 'CREATE UNIQUE INDEX IF NOT EXISTS "idx_imgur_lookup" ' - 'ON "imgur_lookup" ("img_hash")' + "CREATE UNIQUE INDEX IF NOT EXISTS idx_imgur_lookup " + "ON imgur_lookup (img_hash)" ) c_db.execute( - 'CREATE UNIQUE INDEX IF NOT EXISTS "idx_sessions_continued" ' - 'ON "sessions_continued" ("user_id", "machine_id", "media_type")' + "CREATE UNIQUE INDEX IF NOT EXISTS idx_sessions_continued " + "ON sessions_continued (user_id, machine_id, media_type)" ) # Set database version - result = c_db.execute('SELECT value FROM version_info WHERE key = "version"').fetchone() + result = c_db.execute("SELECT value FROM version_info WHERE key = 'version'").fetchone() if not result: c_db.execute( - 'INSERT OR REPLACE INTO version_info (key, value) VALUES ("version", ?)', + "INSERT OR REPLACE INTO version_info (key, value) VALUES ('version', ?)", [common.RELEASE] ) elif helpers.version_to_tuple(result[0]) < helpers.version_to_tuple(common.RELEASE): c_db.execute( - 'UPDATE version_info SET value = ? WHERE key = "version"', + "UPDATE version_info SET value = ? WHERE key = 'version'", [common.RELEASE] ) @@ -2727,9 +2730,9 @@ def dbcheck(): # Migrate poster_urls to imgur_lookup table try: db = database.MonitorDatabase() - result = db.select('SELECT SQL FROM sqlite_master WHERE type="table" AND name="poster_urls"') + result = db.select("SELECT SQL FROM sqlite_master WHERE type='table' AND name='poster_urls'") if result: - result = db.select('SELECT * FROM poster_urls') + result = db.select("SELECT * FROM poster_urls") logger.debug("Altering database. Updating database table imgur_lookup.") data_factory = datafactory.DataFactory() diff --git a/plexpy/activity_pinger.py b/plexpy/activity_pinger.py index f1aac594..f349268c 100644 --- a/plexpy/activity_pinger.py +++ b/plexpy/activity_pinger.py @@ -99,22 +99,22 @@ def check_active_sessions(ws_request=False): # Using the set config parameter as the interval, probably not the most accurate but # it will have to do for now. If it's a websocket request don't use this method. paused_counter = int(stream['paused_counter']) + plexpy.CONFIG.MONITORING_INTERVAL - monitor_db.action('UPDATE sessions SET paused_counter = ? ' - 'WHERE session_key = ? AND rating_key = ?', + monitor_db.action("UPDATE sessions SET paused_counter = ? " + "WHERE session_key = ? AND rating_key = ?", [paused_counter, stream['session_key'], stream['rating_key']]) if session['state'] == 'buffering' and plexpy.CONFIG.BUFFER_THRESHOLD > 0: # The stream is buffering so we need to increment the buffer_count # We're going just increment on every monitor ping, # would be difficult to keep track otherwise - monitor_db.action('UPDATE sessions SET buffer_count = buffer_count + 1 ' - 'WHERE session_key = ? AND rating_key = ?', + monitor_db.action("UPDATE sessions SET buffer_count = buffer_count + 1 " + "WHERE session_key = ? AND rating_key = ?", [stream['session_key'], stream['rating_key']]) # Check the current buffer count and last buffer to determine if we should notify - buffer_values = monitor_db.select('SELECT buffer_count, buffer_last_triggered ' - 'FROM sessions ' - 'WHERE session_key = ? AND rating_key = ?', + buffer_values = monitor_db.select("SELECT buffer_count, buffer_last_triggered " + "FROM sessions " + "WHERE session_key = ? AND rating_key = ?", [stream['session_key'], stream['rating_key']]) if buffer_values[0]['buffer_count'] >= plexpy.CONFIG.BUFFER_THRESHOLD: @@ -125,9 +125,9 @@ def check_active_sessions(ws_request=False): logger.info("Tautulli Monitor :: User '%s' has triggered a buffer warning." % stream['user']) # Set the buffer trigger time - monitor_db.action('UPDATE sessions ' - 'SET buffer_last_triggered = strftime("%s","now") ' - 'WHERE session_key = ? AND rating_key = ?', + monitor_db.action("UPDATE sessions " + "SET buffer_last_triggered = strftime('%s', 'now') " + "WHERE session_key = ? AND rating_key = ?", [stream['session_key'], stream['rating_key']]) plexpy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_buffer'}) @@ -139,9 +139,9 @@ def check_active_sessions(ws_request=False): logger.info("Tautulli Monitor :: User '%s' has triggered multiple buffer warnings." % stream['user']) # Set the buffer trigger time - monitor_db.action('UPDATE sessions ' - 'SET buffer_last_triggered = strftime("%s","now") ' - 'WHERE session_key = ? AND rating_key = ?', + monitor_db.action("UPDATE sessions " + "SET buffer_last_triggered = strftime('%s', 'now') " + "WHERE session_key = ? AND rating_key = ?", [stream['session_key'], stream['rating_key']]) plexpy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_buffer'}) @@ -171,8 +171,8 @@ def check_active_sessions(ws_request=False): if not stream['stopped']: # Set the stream stop time stream['stopped'] = helpers.timestamp() - monitor_db.action('UPDATE sessions SET stopped = ?, state = ? ' - 'WHERE session_key = ? AND rating_key = ?', + monitor_db.action("UPDATE sessions SET stopped = ?, state = ? " + "WHERE session_key = ? AND rating_key = ?", [stream['stopped'], 'stopped', stream['session_key'], stream['rating_key']]) progress_percent = helpers.get_percent(stream['view_offset'], stream['duration']) diff --git a/plexpy/activity_processor.py b/plexpy/activity_processor.py index 71b6e3e0..588e91ce 100644 --- a/plexpy/activity_processor.py +++ b/plexpy/activity_processor.py @@ -331,10 +331,10 @@ class ActivityProcessor(object): if session['live']: # Check if we should group the session, select the last guid from the user - query = 'SELECT session_history.id, session_history_metadata.guid, session_history.reference_id ' \ - 'FROM session_history ' \ - 'JOIN session_history_metadata ON session_history.id == session_history_metadata.id ' \ - 'WHERE session_history.user_id = ? ORDER BY session_history.id DESC LIMIT 1 ' + query = "SELECT session_history.id, session_history_metadata.guid, session_history.reference_id " \ + "FROM session_history " \ + "JOIN session_history_metadata ON session_history.id == session_history_metadata.id " \ + "WHERE session_history.user_id = ? ORDER BY session_history.id DESC LIMIT 1 " args = [session['user_id']] @@ -351,8 +351,8 @@ class ActivityProcessor(object): else: # Check if we should group the session, select the last two rows from the user - query = 'SELECT id, rating_key, view_offset, reference_id FROM session_history ' \ - 'WHERE user_id = ? AND rating_key = ? ORDER BY id DESC LIMIT 2 ' + query = "SELECT id, rating_key, view_offset, reference_id FROM session_history " \ + "WHERE user_id = ? AND rating_key = ? ORDER BY id DESC LIMIT 2 " args = [session['user_id'], session['rating_key']] @@ -375,7 +375,7 @@ class ActivityProcessor(object): marker_first, marker_final ) - query = 'UPDATE session_history SET reference_id = ? WHERE id = ? ' + query = "UPDATE session_history SET reference_id = ? WHERE id = ? " # If previous session view offset less than watched percent, # and new session view offset is greater, @@ -547,12 +547,12 @@ class ActivityProcessor(object): return session['id'] def get_sessions(self, user_id=None, ip_address=None): - query = 'SELECT * FROM sessions' + query = "SELECT * FROM sessions" args = [] if str(user_id).isdigit(): - ip = ' GROUP BY ip_address' if ip_address else '' - query += ' WHERE user_id = ?' + ip + ip = " GROUP BY ip_address" if ip_address else "" + query += " WHERE user_id = ?" + ip args.append(user_id) sessions = self.db.select(query, args) @@ -560,8 +560,8 @@ class ActivityProcessor(object): def get_session_by_key(self, session_key=None): if str(session_key).isdigit(): - session = self.db.select_single('SELECT * FROM sessions ' - 'WHERE session_key = ? ', + session = self.db.select_single("SELECT * FROM sessions " + "WHERE session_key = ? ", args=[session_key]) if session: return session @@ -570,8 +570,8 @@ class ActivityProcessor(object): def get_session_by_id(self, session_id=None): if session_id: - session = self.db.select_single('SELECT * FROM sessions ' - 'WHERE session_id = ? ', + session = self.db.select_single("SELECT * FROM sessions " + "WHERE session_id = ? ", args=[session_id]) if session: return session @@ -597,15 +597,15 @@ class ActivityProcessor(object): def delete_session(self, session_key=None, row_id=None): if str(session_key).isdigit(): - self.db.action('DELETE FROM sessions WHERE session_key = ?', [session_key]) + self.db.action("DELETE FROM sessions WHERE session_key = ?", [session_key]) elif str(row_id).isdigit(): - self.db.action('DELETE FROM sessions WHERE id = ?', [row_id]) + self.db.action("DELETE FROM sessions WHERE id = ?", [row_id]) def set_session_last_paused(self, session_key=None, timestamp=None): if str(session_key).isdigit(): - result = self.db.select('SELECT last_paused, paused_counter ' - 'FROM sessions ' - 'WHERE session_key = ?', args=[session_key]) + result = self.db.select("SELECT last_paused, paused_counter " + "FROM sessions " + "WHERE session_key = ?", args=[session_key]) paused_counter = None for session in result: @@ -626,15 +626,15 @@ class ActivityProcessor(object): def increment_session_buffer_count(self, session_key=None): if str(session_key).isdigit(): - self.db.action('UPDATE sessions SET buffer_count = buffer_count + 1 ' - 'WHERE session_key = ?', + self.db.action("UPDATE sessions SET buffer_count = buffer_count + 1 " + "WHERE session_key = ?", [session_key]) def get_session_buffer_count(self, session_key=None): if str(session_key).isdigit(): - buffer_count = self.db.select_single('SELECT buffer_count ' - 'FROM sessions ' - 'WHERE session_key = ?', + buffer_count = self.db.select_single("SELECT buffer_count " + "FROM sessions " + "WHERE session_key = ?", [session_key]) if buffer_count: return buffer_count['buffer_count'] @@ -643,15 +643,15 @@ class ActivityProcessor(object): def set_session_buffer_trigger_time(self, session_key=None): if str(session_key).isdigit(): - self.db.action('UPDATE sessions SET buffer_last_triggered = strftime("%s","now") ' - 'WHERE session_key = ?', + self.db.action("UPDATE sessions SET buffer_last_triggered = strftime('%s', 'now') " + "WHERE session_key = ?", [session_key]) def get_session_buffer_trigger_time(self, session_key=None): if str(session_key).isdigit(): - last_time = self.db.select_single('SELECT buffer_last_triggered ' - 'FROM sessions ' - 'WHERE session_key = ?', + last_time = self.db.select_single("SELECT buffer_last_triggered " + "FROM sessions " + "WHERE session_key = ?", [session_key]) if last_time: return last_time['buffer_last_triggered'] @@ -660,12 +660,12 @@ class ActivityProcessor(object): def set_temp_stopped(self): stopped_time = helpers.timestamp() - self.db.action('UPDATE sessions SET stopped = ?', [stopped_time]) + self.db.action("UPDATE sessions SET stopped = ?", [stopped_time]) def increment_write_attempts(self, session_key=None): if str(session_key).isdigit(): session = self.get_session_by_key(session_key=session_key) - self.db.action('UPDATE sessions SET write_attempts = ? WHERE session_key = ?', + self.db.action("UPDATE sessions SET write_attempts = ? WHERE session_key = ?", [session['write_attempts'] + 1, session_key]) def set_marker(self, session_key=None, marker_idx=None, marker_type=None): @@ -674,13 +674,13 @@ class ActivityProcessor(object): int(marker_type == 'commercial'), int(marker_type == 'credits') ] - self.db.action('UPDATE sessions SET intro = ?, commercial = ?, credits = ?, marker = ? ' - 'WHERE session_key = ?', + self.db.action("UPDATE sessions SET intro = ?, commercial = ?, credits = ?, marker = ? " + "WHERE session_key = ?", marker_args + [marker_idx, session_key]) def set_watched(self, session_key=None): - self.db.action('UPDATE sessions SET watched = ? ' - 'WHERE session_key = ?', + self.db.action("UPDATE sessions SET watched = ? " + "WHERE session_key = ?", [1, session_key]) def write_continued_session(self, user_id=None, machine_id=None, media_type=None, stopped=None): @@ -689,9 +689,9 @@ class ActivityProcessor(object): self.db.upsert(table_name='sessions_continued', key_dict=keys, value_dict=values) def is_initial_stream(self, user_id=None, machine_id=None, media_type=None, started=None): - last_session = self.db.select_single('SELECT stopped ' - 'FROM sessions_continued ' - 'WHERE user_id = ? AND machine_id = ? AND media_type = ? ' - 'ORDER BY stopped DESC', + last_session = self.db.select_single("SELECT stopped " + "FROM sessions_continued " + "WHERE user_id = ? AND machine_id = ? AND media_type = ? " + "ORDER BY stopped DESC", [user_id, machine_id, media_type]) return int(started - last_session.get('stopped', 0) >= plexpy.CONFIG.NOTIFY_CONTINUED_SESSION_THRESHOLD) diff --git a/plexpy/database.py b/plexpy/database.py index 72ce7782..859d9274 100644 --- a/plexpy/database.py +++ b/plexpy/database.py @@ -54,7 +54,7 @@ def validate_database(database=None): return 'Uncaught exception' try: - connection.execute('SELECT started from session_history') + connection.execute("SELECT started from session_history") connection.close() except (sqlite3.OperationalError, sqlite3.DatabaseError, ValueError) as e: logger.error("Tautulli Database :: Invalid database specified: %s", e) @@ -92,11 +92,11 @@ def import_tautulli_db(database=None, method=None, backup=False): set_is_importing(True) db = MonitorDatabase() - db.connection.execute('BEGIN IMMEDIATE') - db.connection.execute('ATTACH ? AS import_db', [database]) + db.connection.execute("BEGIN IMMEDIATE") + db.connection.execute("ATTACH ? AS import_db", [database]) try: - version_info = db.select_single('SELECT * FROM import_db.version_info WHERE key = "version"') + version_info = db.select_single("SELECT * FROM import_db.version_info WHERE key = 'version'") import_db_version = version_info['value'] except (sqlite3.OperationalError, KeyError): import_db_version = 'v2.6.10' @@ -105,7 +105,7 @@ def import_tautulli_db(database=None, method=None, backup=False): import_db_version = helpers.version_to_tuple(import_db_version) # Get the current number of used ids in the session_history table - session_history_seq = db.select_single('SELECT seq FROM sqlite_sequence WHERE name = "session_history"') + session_history_seq = db.select_single("SELECT seq FROM sqlite_sequence WHERE name = 'session_history'") session_history_rows = session_history_seq.get('seq', 0) session_history_tables = ('session_history', 'session_history_metadata', 'session_history_media_info') @@ -113,11 +113,11 @@ def import_tautulli_db(database=None, method=None, backup=False): if method == 'merge': logger.info("Tautulli Database :: Creating temporary database tables to re-index grouped session history.") for table_name in session_history_tables: - db.action('CREATE TABLE {table}_copy AS SELECT * FROM import_db.{table}'.format(table=table_name)) - db.action('UPDATE {table}_copy SET id = id + ?'.format(table=table_name), + db.action("CREATE TABLE {table}_copy AS SELECT * FROM import_db.{table}".format(table=table_name)) + db.action("UPDATE {table}_copy SET id = id + ?".format(table=table_name), [session_history_rows]) if table_name == 'session_history': - db.action('UPDATE {table}_copy SET reference_id = reference_id + ?'.format(table=table_name), + db.action("UPDATE {table}_copy SET reference_id = reference_id + ?".format(table=table_name), [session_history_rows]) # Migrate section_id from session_history_metadata to session_history @@ -128,28 +128,28 @@ def import_tautulli_db(database=None, method=None, backup=False): else: from_db_name = 'import_db' copy = '' - db.action('ALTER TABLE {from_db}.session_history{copy} ' - 'ADD COLUMN section_id INTEGER'.format(from_db=from_db_name, + db.action("ALTER TABLE {from_db}.session_history{copy} " + "ADD COLUMN section_id INTEGER".format(from_db=from_db_name, copy=copy)) - db.action('UPDATE {from_db}.session_history{copy} SET section_id = (' - 'SELECT section_id FROM {from_db}.session_history_metadata{copy} ' - 'WHERE {from_db}.session_history_metadata{copy}.id = ' - '{from_db}.session_history{copy}.id)'.format(from_db=from_db_name, + db.action("UPDATE {from_db}.session_history{copy} SET section_id = (" + "SELECT section_id FROM {from_db}.session_history_metadata{copy} " + "WHERE {from_db}.session_history_metadata{copy}.id = " + "{from_db}.session_history{copy}.id)".format(from_db=from_db_name, copy=copy)) # Keep track of all table columns so that duplicates can be removed after importing table_columns = {} - tables = db.select('SELECT name FROM import_db.sqlite_master ' - 'WHERE type = "table" AND name NOT LIKE "sqlite_%"' - 'ORDER BY name') + tables = db.select("SELECT name FROM import_db.sqlite_master " + "WHERE type = 'table' AND name NOT LIKE 'sqlite_%'" + "ORDER BY name") for table in tables: table_name = table['name'] if table_name == 'sessions' or table_name == 'version_info': # Skip temporary sessions table continue - current_table = db.select('PRAGMA main.table_info({table})'.format(table=table_name)) + current_table = db.select("PRAGMA main.table_info({table})".format(table=table_name)) if not current_table: # Skip table does not exits continue @@ -158,8 +158,8 @@ def import_tautulli_db(database=None, method=None, backup=False): if method == 'overwrite': # Clear the table and reset the autoincrement ids - db.action('DELETE FROM {table}'.format(table=table_name)) - db.action('DELETE FROM sqlite_sequence WHERE name = ?', [table_name]) + db.action("DELETE FROM {table}".format(table=table_name)) + db.action("DELETE FROM sqlite_sequence WHERE name = ?", [table_name]) if method == 'merge' and table_name in session_history_tables: from_db_name = 'main' @@ -170,7 +170,7 @@ def import_tautulli_db(database=None, method=None, backup=False): # Get the list of columns to import current_columns = [c['name'] for c in current_table] - import_table = db.select('PRAGMA {from_db}.table_info({from_table})'.format(from_db=from_db_name, + import_table = db.select("PRAGMA {from_db}.table_info({from_table})".format(from_db=from_db_name, from_table=from_table_name)) if method == 'merge' and table_name not in session_history_tables: @@ -182,29 +182,29 @@ def import_tautulli_db(database=None, method=None, backup=False): insert_columns = ', '.join(import_columns) # Insert the data with ignore instead of replace to be safe - db.action('INSERT OR IGNORE INTO {table} ({columns}) ' - 'SELECT {columns} FROM {from_db}.{from_table}'.format(table=table_name, + db.action("INSERT OR IGNORE INTO {table} ({columns}) " + "SELECT {columns} FROM {from_db}.{from_table}".format(table=table_name, columns=insert_columns, from_db=from_db_name, from_table=from_table_name)) - db.connection.execute('DETACH import_db') + db.connection.execute("DETACH import_db") if method == 'merge': for table_name, columns in sorted(table_columns.items()): duplicate_columns = ', '.join([c for c in columns if c not in ('id', 'reference_id')]) logger.info("Tautulli Database :: Removing duplicate rows from database table '%s'.", table_name) if table_name in session_history_tables[1:]: - db.action('DELETE FROM {table} WHERE id NOT IN ' - '(SELECT id FROM session_history)'.format(table=table_name)) + db.action("DELETE FROM {table} WHERE id NOT IN " + "(SELECT id FROM session_history)".format(table=table_name)) else: - db.action('DELETE FROM {table} WHERE id NOT IN ' - '(SELECT MIN(id) FROM {table} GROUP BY {columns})'.format(table=table_name, + db.action("DELETE FROM {table} WHERE id NOT IN " + "(SELECT MIN(id) FROM {table} GROUP BY {columns})".format(table=table_name, columns=duplicate_columns)) logger.info("Tautulli Database :: Deleting temporary database tables.") for table_name in session_history_tables: - db.action('DROP TABLE {table}_copy'.format(table=table_name)) + db.action("DROP TABLE {table}_copy".format(table=table_name)) vacuum() @@ -217,7 +217,7 @@ def import_tautulli_db(database=None, method=None, backup=False): def integrity_check(): monitor_db = MonitorDatabase() - result = monitor_db.select_single('PRAGMA integrity_check') + result = monitor_db.select_single("PRAGMA integrity_check") return result @@ -227,7 +227,7 @@ def clear_table(table=None): logger.debug("Tautulli Database :: Clearing database table '%s'." % table) try: - monitor_db.action('DELETE FROM %s' % table) + monitor_db.action("DELETE FROM %s" % table) vacuum() return True except Exception as e: @@ -286,7 +286,7 @@ def delete_user_history(user_id=None): monitor_db = MonitorDatabase() # Get all history associated with the user_id - result = monitor_db.select('SELECT id FROM session_history WHERE user_id = ?', + result = monitor_db.select("SELECT id FROM session_history WHERE user_id = ?", [user_id]) row_ids = [row['id'] for row in result] @@ -299,7 +299,7 @@ def delete_library_history(section_id=None): monitor_db = MonitorDatabase() # Get all history associated with the section_id - result = monitor_db.select('SELECT id FROM session_history WHERE section_id = ?', + result = monitor_db.select("SELECT id FROM session_history WHERE section_id = ?", [section_id]) row_ids = [row['id'] for row in result] @@ -312,7 +312,7 @@ def vacuum(): logger.info("Tautulli Database :: Vacuuming database.") try: - monitor_db.action('VACUUM') + monitor_db.action("VACUUM") except Exception as e: logger.error("Tautulli Database :: Failed to vacuum database: %s" % e) @@ -322,7 +322,7 @@ def optimize(): logger.info("Tautulli Database :: Optimizing database.") try: - monitor_db.action('PRAGMA optimize') + monitor_db.action("PRAGMA optimize") except Exception as e: logger.error("Tautulli Database :: Failed to optimize database: %s" % e) @@ -362,7 +362,7 @@ def make_backup(cleanup=False, scheduler=False): os.makedirs(backup_folder) db = MonitorDatabase() - db.connection.execute('BEGIN IMMEDIATE') + db.connection.execute("BEGIN IMMEDIATE") shutil.copyfile(db_filename(), backup_file_fp) db.connection.rollback() @@ -496,6 +496,6 @@ class MonitorDatabase(object): def last_insert_id(self): # Get the last insert row id - result = self.select_single(query='SELECT last_insert_rowid() AS last_id') + result = self.select_single(query="SELECT last_insert_rowid() AS last_id") if result: return result.get('last_id', None) \ No newline at end of file diff --git a/plexpy/datafactory.py b/plexpy/datafactory.py index e2ee0c2b..3db32a51 100644 --- a/plexpy/datafactory.py +++ b/plexpy/datafactory.py @@ -93,61 +93,61 @@ class DataFactory(object): group_by = ['session_history.reference_id'] if grouping else ['session_history.id'] columns = [ - 'session_history.reference_id', - 'session_history.id AS row_id', - 'MAX(started) AS date', - 'MIN(started) AS started', - 'MAX(stopped) AS stopped', - 'SUM(CASE WHEN stopped > 0 THEN (stopped - started) ELSE 0 END) - \ - SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS play_duration', - 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS paused_counter', - 'session_history.view_offset', - 'session_history.user_id', - 'session_history.user', - '(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \ - THEN users.username ELSE users.friendly_name END) AS friendly_name', - 'users.thumb AS user_thumb', - 'users.custom_avatar_url AS custom_thumb', - 'platform', - 'product', - 'player', - 'ip_address', - 'machine_id', - 'location', - 'secure', - 'relayed', - 'session_history.media_type', - '(CASE WHEN session_history_metadata.live = 1 THEN \'live\' ELSE session_history.media_type END) \ - AS media_type_live', - 'session_history_metadata.rating_key', - 'session_history_metadata.parent_rating_key', - 'session_history_metadata.grandparent_rating_key', - 'session_history_metadata.full_title', - 'session_history_metadata.title', - 'session_history_metadata.parent_title', - 'session_history_metadata.grandparent_title', - 'session_history_metadata.original_title', - 'session_history_metadata.year', - 'session_history_metadata.media_index', - 'session_history_metadata.parent_media_index', - 'session_history_metadata.thumb', - 'session_history_metadata.parent_thumb', - 'session_history_metadata.grandparent_thumb', - 'session_history_metadata.live', - 'session_history_metadata.added_at', - 'session_history_metadata.originally_available_at', - 'session_history_metadata.guid', - 'MAX((CASE WHEN (view_offset IS NULL OR view_offset = "") THEN 0.1 ELSE view_offset * 1.0 END) / \ - (CASE WHEN (session_history_metadata.duration IS NULL OR session_history_metadata.duration = "") \ - THEN 1.0 ELSE session_history_metadata.duration * 1.0 END) * 100) AS percent_complete', - 'session_history_metadata.duration', - 'session_history_metadata.marker_credits_first', - 'session_history_metadata.marker_credits_final', - 'session_history_media_info.transcode_decision', - 'COUNT(*) AS group_count', - 'GROUP_CONCAT(session_history.id) AS group_ids', - 'NULL AS state', - 'NULL AS session_key' + "session_history.reference_id", + "session_history.id AS row_id", + "MAX(started) AS date", + "MIN(started) AS started", + "MAX(stopped) AS stopped", + "SUM(CASE WHEN stopped > 0 THEN (stopped - started) ELSE 0 END) - \ + SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS play_duration", + "SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS paused_counter", + "session_history.view_offset", + "session_history.user_id", + "session_history.user", + "(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = '' \ + THEN users.username ELSE users.friendly_name END) AS friendly_name", + "users.thumb AS user_thumb", + "users.custom_avatar_url AS custom_thumb", + "platform", + "product", + "player", + "ip_address", + "machine_id", + "location", + "secure", + "relayed", + "session_history.media_type", + "(CASE WHEN session_history_metadata.live = 1 THEN 'live' ELSE session_history.media_type END) \ + AS media_type_live", + "session_history_metadata.rating_key", + "session_history_metadata.parent_rating_key", + "session_history_metadata.grandparent_rating_key", + "session_history_metadata.full_title", + "session_history_metadata.title", + "session_history_metadata.parent_title", + "session_history_metadata.grandparent_title", + "session_history_metadata.original_title", + "session_history_metadata.year", + "session_history_metadata.media_index", + "session_history_metadata.parent_media_index", + "session_history_metadata.thumb", + "session_history_metadata.parent_thumb", + "session_history_metadata.grandparent_thumb", + "session_history_metadata.live", + "session_history_metadata.added_at", + "session_history_metadata.originally_available_at", + "session_history_metadata.guid", + "MAX((CASE WHEN (view_offset IS NULL OR view_offset = '') THEN 0.1 ELSE view_offset * 1.0 END) / \ + (CASE WHEN (session_history_metadata.duration IS NULL OR session_history_metadata.duration = '') \ + THEN 1.0 ELSE session_history_metadata.duration * 1.0 END) * 100) AS percent_complete", + "session_history_metadata.duration", + "session_history_metadata.marker_credits_first", + "session_history_metadata.marker_credits_final", + "session_history_media_info.transcode_decision", + "COUNT(*) AS group_count", + "GROUP_CONCAT(session_history.id) AS group_ids", + "NULL AS state", + "NULL AS session_key" ] if include_activity: @@ -157,60 +157,60 @@ class DataFactory(object): group_by_union = ['session_key'] columns_union = [ - 'NULL AS reference_id', - 'NULL AS row_id', - 'started AS date', - 'started', - 'stopped', - 'SUM(CASE WHEN stopped > 0 THEN (stopped - started) ELSE (strftime("%s", "now") - started) END) - \ - SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS play_duration', - 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS paused_counter', - 'view_offset', - 'user_id', - 'user', - '(CASE WHEN friendly_name IS NULL OR TRIM(friendly_name) = "" \ - THEN user ELSE friendly_name END) AS friendly_name', - 'NULL AS user_thumb', - 'NULL AS custom_thumb', - 'platform', - 'product', - 'player', - 'ip_address', - 'machine_id', - 'location', - 'secure', - 'relayed', - 'media_type', - '(CASE WHEN live = 1 THEN \'live\' ELSE media_type END) AS media_type_live', - 'rating_key', - 'parent_rating_key', - 'grandparent_rating_key', - 'full_title', - 'title', - 'parent_title', - 'grandparent_title', - 'original_title', - 'year', - 'media_index', - 'parent_media_index', - 'thumb', - 'parent_thumb', - 'grandparent_thumb', - 'live', - 'added_at', - 'originally_available_at', - 'guid', - 'MAX((CASE WHEN (view_offset IS NULL OR view_offset = "") THEN 0.1 ELSE view_offset * 1.0 END) / \ - (CASE WHEN (duration IS NULL OR duration = "") \ - THEN 1.0 ELSE duration * 1.0 END) * 100) AS percent_complete', - 'duration', - 'NULL AS marker_credits_first', - 'NULL AS marker_credits_final', - 'transcode_decision', - 'NULL AS group_count', - 'NULL AS group_ids', - 'state', - 'session_key' + "NULL AS reference_id", + "NULL AS row_id", + "started AS date", + "started", + "stopped", + "SUM(CASE WHEN stopped > 0 THEN (stopped - started) ELSE (strftime('%s', 'now') - started) END) - \ + SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS play_duration", + "SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS paused_counter", + "view_offset", + "user_id", + "user", + "(CASE WHEN friendly_name IS NULL OR TRIM(friendly_name) = '' \ + THEN user ELSE friendly_name END) AS friendly_name", + "NULL AS user_thumb", + "NULL AS custom_thumb", + "platform", + "product", + "player", + "ip_address", + "machine_id", + "location", + "secure", + "relayed", + "media_type", + "(CASE WHEN live = 1 THEN 'live' ELSE media_type END) AS media_type_live", + "rating_key", + "parent_rating_key", + "grandparent_rating_key", + "full_title", + "title", + "parent_title", + "grandparent_title", + "original_title", + "year", + "media_index", + "parent_media_index", + "thumb", + "parent_thumb", + "grandparent_thumb", + "live", + "added_at", + "originally_available_at", + "guid", + "MAX((CASE WHEN (view_offset IS NULL OR view_offset = '') THEN 0.1 ELSE view_offset * 1.0 END) / \ + (CASE WHEN (duration IS NULL OR duration = '') \ + THEN 1.0 ELSE duration * 1.0 END) * 100) AS percent_complete", + "duration", + "NULL AS marker_credits_first", + "NULL AS marker_credits_final", + "transcode_decision", + "NULL AS group_count", + "NULL AS group_ids", + "state", + "session_key" ] else: @@ -392,20 +392,20 @@ class DataFactory(object): if stat == 'top_movies': top_movies = [] try: - query = 'SELECT sh.id, shm.full_title, shm.year, sh.rating_key, shm.thumb, sh.section_id, ' \ - 'shm.art, sh.media_type, shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, ' \ - 'MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type = "movie" %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY shm.full_title, shm.year ' \ - 'ORDER BY %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) + query = "SELECT sh.id, shm.full_title, shm.year, sh.rating_key, shm.thumb, sh.section_id, " \ + "shm.art, sh.media_type, shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, " \ + "MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s " \ + " AND session_history.media_type = 'movie' %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY shm.full_title, shm.year " \ + "ORDER BY %s DESC, sh.started DESC " \ + "LIMIT %s OFFSET %s " % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_movies: %s." % e) @@ -444,21 +444,21 @@ class DataFactory(object): elif stat == 'popular_movies': popular_movies = [] try: - query = 'SELECT sh.id, shm.full_title, shm.year, sh.rating_key, shm.thumb, sh.section_id, ' \ - 'shm.art, sh.media_type, shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, ' \ - 'COUNT(DISTINCT sh.user_id) AS users_watched, ' \ - 'MAX(sh.started) AS last_watch, COUNT(sh.id) as total_plays, SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type = "movie" %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY shm.full_title, shm.year ' \ - 'ORDER BY users_watched DESC, %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) + query = "SELECT sh.id, shm.full_title, shm.year, sh.rating_key, shm.thumb, sh.section_id, " \ + "shm.art, sh.media_type, shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, " \ + "COUNT(DISTINCT sh.user_id) AS users_watched, " \ + "MAX(sh.started) AS last_watch, COUNT(sh.id) as total_plays, SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s " \ + " AND session_history.media_type = 'movie' %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY shm.full_title, shm.year " \ + "ORDER BY users_watched DESC, %s DESC, sh.started DESC " \ + "LIMIT %s OFFSET %s " % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_movies: %s." % e) @@ -495,22 +495,22 @@ class DataFactory(object): elif stat == 'top_tv': top_tv = [] try: - query = 'SELECT sh.id, shm.grandparent_title, sh.grandparent_rating_key, ' \ - 'shm.grandparent_thumb, sh.section_id, ' \ - 'shm.year, sh.rating_key, shm.art, sh.media_type, ' \ - 'shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, ' \ - 'MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type = "episode" %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY shm.grandparent_title ' \ - 'ORDER BY %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) + query = "SELECT sh.id, shm.grandparent_title, sh.grandparent_rating_key, " \ + "shm.grandparent_thumb, sh.section_id, " \ + "shm.year, sh.rating_key, shm.art, sh.media_type, " \ + "shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, " \ + "MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s " \ + " AND session_history.media_type = 'episode' %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY shm.grandparent_title " \ + "ORDER BY %s DESC, sh.started DESC " \ + "LIMIT %s OFFSET %s " % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_tv: %s." % e) @@ -549,23 +549,23 @@ class DataFactory(object): elif stat == 'popular_tv': popular_tv = [] try: - query = 'SELECT sh.id, shm.grandparent_title, sh.grandparent_rating_key, ' \ - 'shm.grandparent_thumb, sh.section_id, ' \ - 'shm.year, sh.rating_key, shm.art, sh.media_type, ' \ - 'shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, ' \ - 'COUNT(DISTINCT sh.user_id) AS users_watched, ' \ - 'MAX(sh.started) AS last_watch, COUNT(sh.id) as total_plays, SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type = "episode" %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY shm.grandparent_title ' \ - 'ORDER BY users_watched DESC, %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) + query = "SELECT sh.id, shm.grandparent_title, sh.grandparent_rating_key, " \ + "shm.grandparent_thumb, sh.section_id, " \ + "shm.year, sh.rating_key, shm.art, sh.media_type, " \ + "shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, " \ + "COUNT(DISTINCT sh.user_id) AS users_watched, " \ + "MAX(sh.started) AS last_watch, COUNT(sh.id) as total_plays, SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s " \ + " AND session_history.media_type = 'episode' %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY shm.grandparent_title " \ + "ORDER BY users_watched DESC, %s DESC, sh.started DESC " \ + "LIMIT %s OFFSET %s " % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_tv: %s." % e) @@ -602,21 +602,21 @@ class DataFactory(object): elif stat == 'top_music': top_music = [] try: - query = 'SELECT sh.id, shm.grandparent_title, shm.original_title, shm.year, ' \ - 'sh.grandparent_rating_key, shm.grandparent_thumb, sh.section_id, ' \ - 'shm.art, sh.media_type, shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, ' \ - 'MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type = "track" %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY shm.original_title, shm.grandparent_title ' \ - 'ORDER BY %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) + query = "SELECT sh.id, shm.grandparent_title, shm.original_title, shm.year, " \ + "sh.grandparent_rating_key, shm.grandparent_thumb, sh.section_id, " \ + "shm.art, sh.media_type, shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, " \ + "MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s " \ + " AND session_history.media_type = 'track' %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY shm.original_title, shm.grandparent_title " \ + "ORDER BY %s DESC, sh.started DESC " \ + "LIMIT %s OFFSET %s " % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_music: %s." % e) @@ -655,22 +655,22 @@ class DataFactory(object): elif stat == 'popular_music': popular_music = [] try: - query = 'SELECT sh.id, shm.grandparent_title, shm.original_title, shm.year, ' \ - 'sh.grandparent_rating_key, shm.grandparent_thumb, sh.section_id, ' \ - 'shm.art, sh.media_type, shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, ' \ - 'COUNT(DISTINCT sh.user_id) AS users_watched, ' \ - 'MAX(sh.started) AS last_watch, COUNT(sh.id) as total_plays, SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type = "track" %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY shm.original_title, shm.grandparent_title ' \ - 'ORDER BY users_watched DESC, %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) + query = "SELECT sh.id, shm.grandparent_title, shm.original_title, shm.year, " \ + "sh.grandparent_rating_key, shm.grandparent_thumb, sh.section_id, " \ + "shm.art, sh.media_type, shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, " \ + "COUNT(DISTINCT sh.user_id) AS users_watched, " \ + "MAX(sh.started) AS last_watch, COUNT(sh.id) as total_plays, SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s " \ + " AND session_history.media_type = 'track' %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY shm.original_title, shm.grandparent_title " \ + "ORDER BY users_watched DESC, %s DESC, sh.started DESC " \ + "LIMIT %s OFFSET %s " % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_music: %s." % e) @@ -707,28 +707,28 @@ class DataFactory(object): elif stat == 'top_libraries': top_libraries = [] try: - query = 'SELECT sh.id, shm.title, shm.grandparent_title, shm.full_title, shm.year, ' \ - 'shm.media_index, shm.parent_media_index, ' \ - 'sh.rating_key, shm.grandparent_rating_key, shm.thumb, shm.grandparent_thumb, ' \ - 'sh.user, sh.user_id, sh.player, sh.section_id, ' \ - 'shm.art, sh.media_type, shm.content_rating, shm.labels, shm.live, shm.guid, ' \ - 'ls.section_name, ls.section_type, ' \ - 'ls.thumb AS library_thumb, ls.custom_thumb_url AS custom_thumb, ' \ - 'ls.art AS library_art, ls.custom_art_url AS custom_art, ' \ - 'sh.started, ' \ - 'MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'LEFT OUTER JOIN (SELECT * FROM library_sections WHERE deleted_section = 0) ' \ - ' AS ls ON sh.section_id = ls.section_id ' \ - 'GROUP BY sh.section_id ' \ - 'ORDER BY %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) + query = "SELECT sh.id, shm.title, shm.grandparent_title, shm.full_title, shm.year, " \ + "shm.media_index, shm.parent_media_index, " \ + "sh.rating_key, shm.grandparent_rating_key, shm.thumb, shm.grandparent_thumb, " \ + "sh.user, sh.user_id, sh.player, sh.section_id, " \ + "shm.art, sh.media_type, shm.content_rating, shm.labels, shm.live, shm.guid, " \ + "ls.section_name, ls.section_type, " \ + "ls.thumb AS library_thumb, ls.custom_thumb_url AS custom_thumb, " \ + "ls.art AS library_art, ls.custom_art_url AS custom_art, " \ + "sh.started, " \ + "MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "LEFT OUTER JOIN (SELECT * FROM library_sections WHERE deleted_section = 0) " \ + " AS ls ON sh.section_id = ls.section_id " \ + "GROUP BY sh.section_id " \ + "ORDER BY %s DESC, sh.started DESC " \ + "LIMIT %s OFFSET %s " % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_libraries: %s." % e) @@ -795,28 +795,28 @@ class DataFactory(object): elif stat == 'top_users': top_users = [] try: - query = 'SELECT sh.id, shm.title, shm.grandparent_title, shm.full_title, shm.year, ' \ - 'shm.media_index, shm.parent_media_index, ' \ - 'sh.rating_key, shm.grandparent_rating_key, shm.thumb, shm.grandparent_thumb, ' \ - 'sh.user, sh.user_id, sh.player, sh.section_id, ' \ - 'shm.art, sh.media_type, shm.content_rating, shm.labels, shm.live, shm.guid, ' \ - 'u.thumb AS user_thumb, u.custom_avatar_url AS custom_thumb, ' \ - 'sh.started, ' \ - '(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = ""' \ - ' THEN u.username ELSE u.friendly_name END) ' \ - ' AS friendly_name, ' \ - 'MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'LEFT OUTER JOIN users AS u ON sh.user_id = u.user_id ' \ - 'GROUP BY sh.user_id ' \ - 'ORDER BY %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) + query = "SELECT sh.id, shm.title, shm.grandparent_title, shm.full_title, shm.year, " \ + "shm.media_index, shm.parent_media_index, " \ + "sh.rating_key, shm.grandparent_rating_key, shm.thumb, shm.grandparent_thumb, " \ + "sh.user, sh.user_id, sh.player, sh.section_id, " \ + "shm.art, sh.media_type, shm.content_rating, shm.labels, shm.live, shm.guid, " \ + "u.thumb AS user_thumb, u.custom_avatar_url AS custom_thumb, " \ + "sh.started, " \ + "(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = ''" \ + " THEN u.username ELSE u.friendly_name END) " \ + " AS friendly_name, " \ + "MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "LEFT OUTER JOIN users AS u ON sh.user_id = u.user_id " \ + "GROUP BY sh.user_id " \ + "ORDER BY %s DESC, sh.started DESC " \ + "LIMIT %s OFFSET %s " % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_users: %s." % e) @@ -873,17 +873,17 @@ class DataFactory(object): top_platform = [] try: - query = 'SELECT sh.platform, sh.started, ' \ - 'MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY %s) AS sh ' \ - 'GROUP BY sh.platform ' \ - 'ORDER BY %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) + query = "SELECT sh.platform, sh.started, " \ + "MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY %s) AS sh " \ + "GROUP BY sh.platform " \ + "ORDER BY %s DESC, sh.started DESC " \ + "LIMIT %s OFFSET %s " % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_platforms: %s." % e) @@ -924,65 +924,65 @@ class DataFactory(object): if plexpy.CONFIG.WATCHED_MARKER == 1: watched_threshold = ( - '(CASE WHEN shm.marker_credits_final IS NULL ' - 'THEN sh._duration * (CASE WHEN sh.media_type = "movie" THEN %d ELSE %d END) / 100.0 ' - 'ELSE shm.marker_credits_final END) ' - 'AS watched_threshold' + "(CASE WHEN shm.marker_credits_final IS NULL " + "THEN sh._duration * (CASE WHEN sh.media_type = 'movie' THEN %d ELSE %d END) / 100.0 " + "ELSE shm.marker_credits_final END) " + "AS watched_threshold" ) % (movie_watched_percent, tv_watched_percent) - watched_where = '_view_offset >= watched_threshold' + watched_where = "_view_offset >= watched_threshold" elif plexpy.CONFIG.WATCHED_MARKER == 2: watched_threshold = ( - '(CASE WHEN shm.marker_credits_first IS NULL ' - 'THEN sh._duration * (CASE WHEN sh.media_type = "movie" THEN %d ELSE %d END) / 100.0 ' - 'ELSE shm.marker_credits_first END) ' - 'AS watched_threshold' + "(CASE WHEN shm.marker_credits_first IS NULL " + "THEN sh._duration * (CASE WHEN sh.media_type = 'movie' THEN %d ELSE %d END) / 100.0 " + "ELSE shm.marker_credits_first END) " + "AS watched_threshold" ) % (movie_watched_percent, tv_watched_percent) - watched_where = '_view_offset >= watched_threshold' + watched_where = "_view_offset >= watched_threshold" elif plexpy.CONFIG.WATCHED_MARKER == 3: watched_threshold = ( - 'MIN(' - '(CASE WHEN shm.marker_credits_first IS NULL ' - 'THEN sh._duration * (CASE WHEN sh.media_type = "movie" THEN %d ELSE %d END) / 100.0 ' - 'ELSE shm.marker_credits_first END), ' - 'sh._duration * (CASE WHEN sh.media_type = "movie" THEN %d ELSE %d END) / 100.0) ' - 'AS watched_threshold' + "MIN(" + "(CASE WHEN shm.marker_credits_first IS NULL " + "THEN sh._duration * (CASE WHEN sh.media_type = 'movie' THEN %d ELSE %d END) / 100.0 " + "ELSE shm.marker_credits_first END), " + "sh._duration * (CASE WHEN sh.media_type = 'movie' THEN %d ELSE %d END) / 100.0) " + "AS watched_threshold" ) % (movie_watched_percent, tv_watched_percent, movie_watched_percent, tv_watched_percent) - watched_where = '_view_offset >= watched_threshold' + watched_where = "_view_offset >= watched_threshold" else: - watched_threshold = 'NULL AS watched_threshold' + watched_threshold = "NULL AS watched_threshold" watched_where = ( - 'sh.media_type == "movie" AND percent_complete >= %d ' - 'OR sh.media_type == "episode" AND percent_complete >= %d' + "sh.media_type == 'movie' AND percent_complete >= %d " + "OR sh.media_type == 'episode' AND percent_complete >= %d" ) % (movie_watched_percent, tv_watched_percent) last_watched = [] try: - query = 'SELECT sh.id, shm.title, shm.grandparent_title, shm.full_title, shm.year, ' \ - 'shm.media_index, shm.parent_media_index, ' \ - 'sh.rating_key, shm.grandparent_rating_key, shm.thumb, shm.grandparent_thumb, ' \ - 'sh.user, sh.user_id, u.custom_avatar_url as user_thumb, sh.player, sh.section_id, ' \ - 'shm.art, sh.media_type, shm.content_rating, shm.labels, shm.live, shm.guid, ' \ - '(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = ""' \ - ' THEN u.username ELSE u.friendly_name END) ' \ - ' AS friendly_name, ' \ - 'MAX(sh.started) AS last_watch, sh._view_offset, sh._duration, ' \ - '(sh._view_offset / sh._duration * 100) AS percent_complete, ' \ - '%s ' \ - 'FROM (SELECT *, MAX(session_history.id), ' \ - ' (CASE WHEN view_offset IS NULL THEN 0.1 ELSE view_offset * 1.0 END) AS _view_offset, ' \ - ' (CASE WHEN duration IS NULL THEN 1.0 ELSE duration * 1.0 END) AS _duration ' \ - ' FROM session_history ' \ - ' JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \ - ' WHERE session_history.stopped >= %s ' \ - ' AND (session_history.media_type = "movie" ' \ - ' OR session_history.media_type = "episode") %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'LEFT OUTER JOIN users AS u ON sh.user_id = u.user_id ' \ - 'WHERE %s ' \ - 'GROUP BY sh.id ' \ - 'ORDER BY last_watch DESC ' \ - 'LIMIT %s OFFSET %s' % (watched_threshold, + query = "SELECT sh.id, shm.title, shm.grandparent_title, shm.full_title, shm.year, " \ + "shm.media_index, shm.parent_media_index, " \ + "sh.rating_key, shm.grandparent_rating_key, shm.thumb, shm.grandparent_thumb, " \ + "sh.user, sh.user_id, u.custom_avatar_url as user_thumb, sh.player, sh.section_id, " \ + "shm.art, sh.media_type, shm.content_rating, shm.labels, shm.live, shm.guid, " \ + "(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = ''" \ + " THEN u.username ELSE u.friendly_name END) " \ + " AS friendly_name, " \ + "MAX(sh.started) AS last_watch, sh._view_offset, sh._duration, " \ + "(sh._view_offset / sh._duration * 100) AS percent_complete, " \ + "%s " \ + "FROM (SELECT *, MAX(session_history.id), " \ + " (CASE WHEN view_offset IS NULL THEN 0.1 ELSE view_offset * 1.0 END) AS _view_offset, " \ + " (CASE WHEN duration IS NULL THEN 1.0 ELSE duration * 1.0 END) AS _duration " \ + " FROM session_history " \ + " JOIN session_history_metadata ON session_history_metadata.id = session_history.id " \ + " WHERE session_history.stopped >= %s " \ + " AND (session_history.media_type = 'movie' " \ + " OR session_history.media_type = 'episode') %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "LEFT OUTER JOIN users AS u ON sh.user_id = u.user_id " \ + "WHERE %s " \ + "GROUP BY sh.id " \ + "ORDER BY last_watch DESC " \ + "LIMIT %s OFFSET %s" % (watched_threshold, timestamp, where_id, group_by, watched_where, stats_count, stats_start) result = monitor_db.select(query) @@ -1068,10 +1068,10 @@ class DataFactory(object): most_concurrent = [] try: - base_query = 'SELECT sh.started, sh.stopped ' \ - 'FROM session_history AS sh ' \ - 'JOIN session_history_media_info AS shmi ON sh.id = shmi.id ' \ - 'WHERE sh.stopped >= %s ' % timestamp + base_query = "SELECT sh.started, sh.stopped " \ + "FROM session_history AS sh " \ + "JOIN session_history_media_info AS shmi ON sh.id = shmi.id " \ + "WHERE sh.stopped >= %s " % timestamp title = 'Concurrent Streams' query = base_query @@ -1081,21 +1081,21 @@ class DataFactory(object): title = 'Concurrent Transcodes' query = base_query \ - + 'AND shmi.transcode_decision = "transcode" ' + + "AND shmi.transcode_decision = 'transcode' " result = monitor_db.select(query) if result: most_concurrent.append(calc_most_concurrent(title, result)) title = 'Concurrent Direct Streams' query = base_query \ - + 'AND shmi.transcode_decision = "copy" ' + + "AND shmi.transcode_decision = 'copy' " result = monitor_db.select(query) if result: most_concurrent.append(calc_most_concurrent(title, result)) title = 'Concurrent Direct Plays' query = base_query \ - + 'AND shmi.transcode_decision = "direct play" ' + + "AND shmi.transcode_decision = 'direct play' " result = monitor_db.select(query) if result: most_concurrent.append(calc_most_concurrent(title, result)) @@ -1120,21 +1120,21 @@ class DataFactory(object): library_stats = [] try: - query = 'SELECT ls.id, ls.section_id, ls.section_name, ls.section_type, ls.thumb AS library_thumb, ' \ - 'ls.custom_thumb_url AS custom_thumb, ls.art AS library_art, ls.custom_art_url AS custom_art, ' \ - 'ls.count, ls.parent_count, ls.child_count, ' \ - 'sh.id, shm.title, shm.grandparent_title, shm.full_title, shm.year, ' \ - 'shm.media_index, shm.parent_media_index, ' \ - 'sh.rating_key, shm.grandparent_rating_key, shm.thumb, shm.grandparent_thumb, ' \ - 'sh.user, sh.user_id, sh.player, ' \ - 'shm.art, sh.media_type, shm.content_rating, shm.labels, shm.live, shm.guid, ' \ - 'MAX(sh.started) AS last_watch ' \ - 'FROM library_sections AS ls ' \ - 'LEFT OUTER JOIN session_history AS sh ON ls.section_id = sh.section_id ' \ - 'LEFT OUTER JOIN session_history_metadata AS shm ON sh.id = shm.id ' \ - 'WHERE ls.section_id IN (%s) AND ls.deleted_section = 0 ' \ - 'GROUP BY ls.id ' \ - 'ORDER BY ls.section_type, ls.count DESC, ls.parent_count DESC, ls.child_count DESC ' % ','.join(library_cards) + query = "SELECT ls.id, ls.section_id, ls.section_name, ls.section_type, ls.thumb AS library_thumb, " \ + "ls.custom_thumb_url AS custom_thumb, ls.art AS library_art, ls.custom_art_url AS custom_art, " \ + "ls.count, ls.parent_count, ls.child_count, " \ + "sh.id, shm.title, shm.grandparent_title, shm.full_title, shm.year, " \ + "shm.media_index, shm.parent_media_index, " \ + "sh.rating_key, shm.grandparent_rating_key, shm.thumb, shm.grandparent_thumb, " \ + "sh.user, sh.user_id, sh.player, " \ + "shm.art, sh.media_type, shm.content_rating, shm.labels, shm.live, shm.guid, " \ + "MAX(sh.started) AS last_watch " \ + "FROM library_sections AS ls " \ + "LEFT OUTER JOIN session_history AS sh ON ls.section_id = sh.section_id " \ + "LEFT OUTER JOIN session_history_metadata AS shm ON sh.id = shm.id " \ + "WHERE ls.section_id IN (%s) AND ls.deleted_section = 0 " \ + "GROUP BY ls.id " \ + "ORDER BY ls.section_type, ls.count DESC, ls.parent_count DESC, ls.child_count DESC " % ",".join(library_cards) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_library_stats: %s." % e) @@ -1228,15 +1228,15 @@ class DataFactory(object): try: if days > 0: if str(rating_key).isdigit(): - query = 'SELECT (SUM(stopped - started) - ' \ - 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, ' \ - 'COUNT(DISTINCT %s) AS total_plays, section_id ' \ - 'FROM session_history ' \ - 'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \ - 'WHERE stopped >= ? ' \ - 'AND (session_history.grandparent_rating_key IN (%s) ' \ - 'OR session_history.parent_rating_key IN (%s) ' \ - 'OR session_history.rating_key IN (%s))' % ( + query = "SELECT (SUM(stopped - started) - " \ + "SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, " \ + "COUNT(DISTINCT %s) AS total_plays, section_id " \ + "FROM session_history " \ + "JOIN session_history_metadata ON session_history_metadata.id = session_history.id " \ + "WHERE stopped >= ? " \ + "AND (session_history.grandparent_rating_key IN (%s) " \ + "OR session_history.parent_rating_key IN (%s) " \ + "OR session_history.rating_key IN (%s))" % ( group_by, rating_keys_arg, rating_keys_arg, rating_keys_arg ) @@ -1245,14 +1245,14 @@ class DataFactory(object): result = [] else: if str(rating_key).isdigit(): - query = 'SELECT (SUM(stopped - started) - ' \ - 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, ' \ - 'COUNT(DISTINCT %s) AS total_plays, section_id ' \ - 'FROM session_history ' \ - 'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \ - 'WHERE (session_history.grandparent_rating_key IN (%s) ' \ - 'OR session_history.parent_rating_key IN (%s) ' \ - 'OR session_history.rating_key IN (%s))' % ( + query = "SELECT (SUM(stopped - started) - " \ + "SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, " \ + "COUNT(DISTINCT %s) AS total_plays, section_id " \ + "FROM session_history " \ + "JOIN session_history_metadata ON session_history_metadata.id = session_history.id " \ + "WHERE (session_history.grandparent_rating_key IN (%s) " \ + "OR session_history.parent_rating_key IN (%s) " \ + "OR session_history.rating_key IN (%s))" % ( group_by, rating_keys_arg, rating_keys_arg, rating_keys_arg ) @@ -1308,20 +1308,20 @@ class DataFactory(object): try: if str(rating_key).isdigit(): - query = 'SELECT (CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" ' \ - 'THEN users.username ELSE users.friendly_name END) AS friendly_name, ' \ - 'users.user_id, users.username, users.thumb, users.custom_avatar_url AS custom_thumb, ' \ - 'COUNT(DISTINCT %s) AS total_plays, (SUM(stopped - started) - ' \ - 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, ' \ - 'section_id ' \ - 'FROM session_history ' \ - 'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \ - 'JOIN users ON users.user_id = session_history.user_id ' \ - 'WHERE (session_history.grandparent_rating_key IN (%s) ' \ - 'OR session_history.parent_rating_key IN (%s) ' \ - 'OR session_history.rating_key IN (%s)) ' \ - 'GROUP BY users.user_id ' \ - 'ORDER BY total_plays DESC, total_time DESC' % ( + query = "SELECT (CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = '' " \ + "THEN users.username ELSE users.friendly_name END) AS friendly_name, " \ + "users.user_id, users.username, users.thumb, users.custom_avatar_url AS custom_thumb, " \ + "COUNT(DISTINCT %s) AS total_plays, (SUM(stopped - started) - " \ + "SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, " \ + "section_id " \ + "FROM session_history " \ + "JOIN session_history_metadata ON session_history_metadata.id = session_history.id " \ + "JOIN users ON users.user_id = session_history.user_id " \ + "WHERE (session_history.grandparent_rating_key IN (%s) " \ + "OR session_history.parent_rating_key IN (%s) " \ + "OR session_history.rating_key IN (%s)) " \ + "GROUP BY users.user_id " \ + "ORDER BY total_plays DESC, total_time DESC" % ( group_by, rating_keys_arg, rating_keys_arg, rating_keys_arg ) @@ -1362,53 +1362,53 @@ class DataFactory(object): user_cond = '' table = 'session_history' if row_id else 'sessions' if session.get_session_user_id(): - user_cond = 'AND %s.user_id = %s ' % (table, session.get_session_user_id()) + user_cond = "AND %s.user_id = %s " % (table, session.get_session_user_id()) if row_id: - query = 'SELECT bitrate, video_full_resolution, ' \ - 'optimized_version, optimized_version_profile, optimized_version_title, ' \ - 'synced_version, synced_version_profile, ' \ - 'container, video_codec, video_bitrate, video_width, video_height, video_framerate, ' \ - 'video_dynamic_range, aspect_ratio, ' \ - 'audio_codec, audio_bitrate, audio_channels, audio_language, audio_language_code, ' \ - 'subtitle_codec, subtitle_forced, subtitle_language, ' \ - 'stream_bitrate, stream_video_full_resolution, quality_profile, stream_container_decision, stream_container, ' \ - 'stream_video_decision, stream_video_codec, stream_video_bitrate, stream_video_width, stream_video_height, ' \ - 'stream_video_framerate, stream_video_dynamic_range, ' \ - 'stream_audio_decision, stream_audio_codec, stream_audio_bitrate, stream_audio_channels, ' \ - 'stream_audio_language, stream_audio_language_code, ' \ - 'subtitles, stream_subtitle_decision, stream_subtitle_codec, stream_subtitle_forced, stream_subtitle_language, ' \ - 'transcode_hw_decoding, transcode_hw_encoding, ' \ - 'video_decision, audio_decision, transcode_decision, width, height, container, ' \ - 'transcode_container, transcode_video_codec, transcode_audio_codec, transcode_audio_channels, ' \ - 'transcode_width, transcode_height, ' \ - 'session_history_metadata.media_type, title, grandparent_title, original_title ' \ - 'FROM session_history_media_info ' \ - 'JOIN session_history ON session_history_media_info.id = session_history.id ' \ - 'JOIN session_history_metadata ON session_history_media_info.id = session_history_metadata.id ' \ - 'WHERE session_history_media_info.id = ? %s' % user_cond + query = "SELECT bitrate, video_full_resolution, " \ + "optimized_version, optimized_version_profile, optimized_version_title, " \ + "synced_version, synced_version_profile, " \ + "container, video_codec, video_bitrate, video_width, video_height, video_framerate, " \ + "video_dynamic_range, aspect_ratio, " \ + "audio_codec, audio_bitrate, audio_channels, audio_language, audio_language_code, " \ + "subtitle_codec, subtitle_forced, subtitle_language, " \ + "stream_bitrate, stream_video_full_resolution, quality_profile, stream_container_decision, stream_container, " \ + "stream_video_decision, stream_video_codec, stream_video_bitrate, stream_video_width, stream_video_height, " \ + "stream_video_framerate, stream_video_dynamic_range, " \ + "stream_audio_decision, stream_audio_codec, stream_audio_bitrate, stream_audio_channels, " \ + "stream_audio_language, stream_audio_language_code, " \ + "subtitles, stream_subtitle_decision, stream_subtitle_codec, stream_subtitle_forced, stream_subtitle_language, " \ + "transcode_hw_decoding, transcode_hw_encoding, " \ + "video_decision, audio_decision, transcode_decision, width, height, container, " \ + "transcode_container, transcode_video_codec, transcode_audio_codec, transcode_audio_channels, " \ + "transcode_width, transcode_height, " \ + "session_history_metadata.media_type, title, grandparent_title, original_title " \ + "FROM session_history_media_info " \ + "JOIN session_history ON session_history_media_info.id = session_history.id " \ + "JOIN session_history_metadata ON session_history_media_info.id = session_history_metadata.id " \ + "WHERE session_history_media_info.id = ? %s" % user_cond result = monitor_db.select(query, args=[row_id]) elif session_key: - query = 'SELECT bitrate, video_full_resolution, ' \ - 'optimized_version, optimized_version_profile, optimized_version_title, ' \ - 'synced_version, synced_version_profile, ' \ - 'container, video_codec, video_bitrate, video_width, video_height, video_framerate, ' \ - 'video_dynamic_range, aspect_ratio, ' \ - 'audio_codec, audio_bitrate, audio_channels, audio_language, audio_language_code, ' \ - 'subtitle_codec, subtitle_forced, subtitle_language, ' \ - 'stream_bitrate, stream_video_full_resolution, quality_profile, stream_container_decision, stream_container, ' \ - 'stream_video_decision, stream_video_codec, stream_video_bitrate, stream_video_width, stream_video_height, ' \ - 'stream_video_framerate, stream_video_dynamic_range, ' \ - 'stream_audio_decision, stream_audio_codec, stream_audio_bitrate, stream_audio_channels, ' \ - 'stream_audio_language, stream_audio_language_code, ' \ - 'subtitles, stream_subtitle_decision, stream_subtitle_codec, stream_subtitle_forced, stream_subtitle_language, ' \ - 'transcode_hw_decoding, transcode_hw_encoding, ' \ - 'video_decision, audio_decision, transcode_decision, width, height, container, ' \ - 'transcode_container, transcode_video_codec, transcode_audio_codec, transcode_audio_channels, ' \ - 'transcode_width, transcode_height, ' \ - 'media_type, title, grandparent_title, original_title ' \ - 'FROM sessions ' \ - 'WHERE session_key = ? %s' % user_cond + query = "SELECT bitrate, video_full_resolution, " \ + "optimized_version, optimized_version_profile, optimized_version_title, " \ + "synced_version, synced_version_profile, " \ + "container, video_codec, video_bitrate, video_width, video_height, video_framerate, " \ + "video_dynamic_range, aspect_ratio, " \ + "audio_codec, audio_bitrate, audio_channels, audio_language, audio_language_code, " \ + "subtitle_codec, subtitle_forced, subtitle_language, " \ + "stream_bitrate, stream_video_full_resolution, quality_profile, stream_container_decision, stream_container, " \ + "stream_video_decision, stream_video_codec, stream_video_bitrate, stream_video_width, stream_video_height, " \ + "stream_video_framerate, stream_video_dynamic_range, " \ + "stream_audio_decision, stream_audio_codec, stream_audio_bitrate, stream_audio_channels, " \ + "stream_audio_language, stream_audio_language_code, " \ + "subtitles, stream_subtitle_decision, stream_subtitle_codec, stream_subtitle_forced, stream_subtitle_language, " \ + "transcode_hw_decoding, transcode_hw_encoding, " \ + "video_decision, audio_decision, transcode_decision, width, height, container, " \ + "transcode_container, transcode_video_codec, transcode_audio_codec, transcode_audio_channels, " \ + "transcode_width, transcode_height, " \ + "media_type, title, grandparent_title, original_title " \ + "FROM sessions " \ + "WHERE session_key = ? %s" % user_cond result = monitor_db.select(query, args=[session_key]) else: return None @@ -1499,43 +1499,43 @@ class DataFactory(object): if rating_key or guid: if guid: - where = 'session_history_metadata.guid LIKE ?' + where = "session_history_metadata.guid LIKE ?" args = [guid.split('?')[0] + '%'] # SQLite LIKE wildcard else: - where = 'session_history_metadata.rating_key = ?' + where = "session_history_metadata.rating_key = ?" args = [rating_key] - query = 'SELECT session_history.section_id, session_history_metadata.id, ' \ - 'session_history_metadata.rating_key, session_history_metadata.parent_rating_key, ' \ - 'session_history_metadata.grandparent_rating_key, session_history_metadata.title, ' \ - 'session_history_metadata.parent_title, session_history_metadata.grandparent_title, ' \ - 'session_history_metadata.original_title, session_history_metadata.full_title, ' \ - 'library_sections.section_name, ' \ - 'session_history_metadata.media_index, session_history_metadata.parent_media_index, ' \ - 'session_history_metadata.thumb, ' \ - 'session_history_metadata.parent_thumb, session_history_metadata.grandparent_thumb, ' \ - 'session_history_metadata.art, session_history_metadata.media_type, session_history_metadata.year, ' \ - 'session_history_metadata.originally_available_at, session_history_metadata.added_at, ' \ - 'session_history_metadata.updated_at, session_history_metadata.last_viewed_at, ' \ - 'session_history_metadata.content_rating, session_history_metadata.summary, ' \ - 'session_history_metadata.tagline, session_history_metadata.rating, session_history_metadata.duration, ' \ - 'session_history_metadata.guid, session_history_metadata.directors, session_history_metadata.writers, ' \ - 'session_history_metadata.actors, session_history_metadata.genres, session_history_metadata.studio, ' \ - 'session_history_metadata.labels, ' \ - 'session_history_media_info.container, session_history_media_info.bitrate, ' \ - 'session_history_media_info.video_codec, session_history_media_info.video_resolution, ' \ - 'session_history_media_info.video_full_resolution, ' \ - 'session_history_media_info.video_framerate, session_history_media_info.audio_codec, ' \ - 'session_history_media_info.audio_channels, session_history_metadata.live, ' \ - 'session_history_metadata.channel_call_sign, session_history_metadata.channel_identifier, ' \ - 'session_history_metadata.channel_thumb ' \ - 'FROM session_history_metadata ' \ - 'JOIN library_sections ON session_history.section_id = library_sections.section_id ' \ - 'JOIN session_history ON session_history_metadata.id = session_history.id ' \ - 'JOIN session_history_media_info ON session_history_metadata.id = session_history_media_info.id ' \ - 'WHERE %s ' \ - 'ORDER BY session_history_metadata.id DESC ' \ - 'LIMIT 1' % where + query = "SELECT session_history.section_id, session_history_metadata.id, " \ + "session_history_metadata.rating_key, session_history_metadata.parent_rating_key, " \ + "session_history_metadata.grandparent_rating_key, session_history_metadata.title, " \ + "session_history_metadata.parent_title, session_history_metadata.grandparent_title, " \ + "session_history_metadata.original_title, session_history_metadata.full_title, " \ + "library_sections.section_name, " \ + "session_history_metadata.media_index, session_history_metadata.parent_media_index, " \ + "session_history_metadata.thumb, " \ + "session_history_metadata.parent_thumb, session_history_metadata.grandparent_thumb, " \ + "session_history_metadata.art, session_history_metadata.media_type, session_history_metadata.year, " \ + "session_history_metadata.originally_available_at, session_history_metadata.added_at, " \ + "session_history_metadata.updated_at, session_history_metadata.last_viewed_at, " \ + "session_history_metadata.content_rating, session_history_metadata.summary, " \ + "session_history_metadata.tagline, session_history_metadata.rating, session_history_metadata.duration, " \ + "session_history_metadata.guid, session_history_metadata.directors, session_history_metadata.writers, " \ + "session_history_metadata.actors, session_history_metadata.genres, session_history_metadata.studio, " \ + "session_history_metadata.labels, " \ + "session_history_media_info.container, session_history_media_info.bitrate, " \ + "session_history_media_info.video_codec, session_history_media_info.video_resolution, " \ + "session_history_media_info.video_full_resolution, " \ + "session_history_media_info.video_framerate, session_history_media_info.audio_codec, " \ + "session_history_media_info.audio_channels, session_history_metadata.live, " \ + "session_history_metadata.channel_call_sign, session_history_metadata.channel_identifier, " \ + "session_history_metadata.channel_thumb " \ + "FROM session_history_metadata " \ + "JOIN library_sections ON session_history.section_id = library_sections.section_id " \ + "JOIN session_history ON session_history_metadata.id = session_history.id " \ + "JOIN session_history_media_info ON session_history_metadata.id = session_history_media_info.id " \ + "WHERE %s " \ + "ORDER BY session_history_metadata.id DESC " \ + "LIMIT 1" % where result = monitor_db.select(query=query, args=args) else: result = [] @@ -1614,14 +1614,14 @@ class DataFactory(object): where, args = datatables.build_custom_where(custom_where=custom_where) try: - query = 'SELECT SUM(CASE WHEN stopped > 0 THEN (stopped - started) ELSE 0 END) - ' \ - 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS total_duration, ' \ - '(CASE WHEN session_history_metadata.live = 1 THEN "live" ELSE session_history.media_type END) ' \ - 'AS media_type_live ' \ - 'FROM session_history ' \ - 'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \ - 'JOIN session_history_media_info ON session_history_media_info.id = session_history.id ' \ - '%s ' % where + query = "SELECT SUM(CASE WHEN stopped > 0 THEN (stopped - started) ELSE 0 END) - " \ + "SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS total_duration, " \ + "(CASE WHEN session_history_metadata.live = 1 THEN 'live' ELSE session_history.media_type END) " \ + "AS media_type_live " \ + "FROM session_history " \ + "JOIN session_history_metadata ON session_history_metadata.id = session_history.id " \ + "JOIN session_history_media_info ON session_history_media_info.id = session_history.id " \ + "%s " % where result = monitor_db.select(query, args=args) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_total_duration: %s." % e) @@ -1644,7 +1644,7 @@ class DataFactory(object): if session_key: try: - query = 'SELECT ip_address FROM sessions WHERE session_key = %d %s' % (int(session_key), user_cond) + query = "SELECT ip_address FROM sessions WHERE session_key = %d %s" % (int(session_key), user_cond) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_session_ip: %s." % e) @@ -1694,19 +1694,19 @@ class DataFactory(object): where = '' if where_params: - where = 'WHERE ' + ' AND '.join([w + ' = ?' for w in where_params]) + where = "WHERE " + " AND ".join([w + " = ?" for w in where_params]) if order_by: - order_by = 'ORDER BY ' + order_by + ' DESC' + order_by = "ORDER BY " + order_by + " DESC" if service == 'imgur': - query = 'SELECT imgur_title AS img_title, imgur_url AS img_url FROM imgur_lookup ' \ - 'JOIN image_hash_lookup ON imgur_lookup.img_hash = image_hash_lookup.img_hash ' \ - '%s %s' % (where, order_by) + query = "SELECT imgur_title AS img_title, imgur_url AS img_url FROM imgur_lookup " \ + "JOIN image_hash_lookup ON imgur_lookup.img_hash = image_hash_lookup.img_hash " \ + "%s %s" % (where, order_by) elif service == 'cloudinary': - query = 'SELECT cloudinary_title AS img_title, cloudinary_url AS img_url FROM cloudinary_lookup ' \ - 'JOIN image_hash_lookup ON cloudinary_lookup.img_hash = image_hash_lookup.img_hash ' \ - '%s %s' % (where, order_by) + query = "SELECT cloudinary_title AS img_title, cloudinary_url AS img_url FROM cloudinary_lookup " \ + "JOIN image_hash_lookup ON cloudinary_lookup.img_hash = image_hash_lookup.img_hash " \ + "%s %s" % (where, order_by) else: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_img_info: " "service not provided.") @@ -1754,14 +1754,14 @@ class DataFactory(object): args = [] log_msg = '' if rating_key: - where = 'WHERE rating_key = ?' + where = "WHERE rating_key = ?" args = [rating_key] log_msg = ' for rating_key %s' % rating_key if service.lower() == 'imgur': # Delete from Imgur - query = 'SELECT imgur_title, delete_hash, fallback FROM imgur_lookup ' \ - 'JOIN image_hash_lookup ON imgur_lookup.img_hash = image_hash_lookup.img_hash %s' % where + query = "SELECT imgur_title, delete_hash, fallback FROM imgur_lookup " \ + "JOIN image_hash_lookup ON imgur_lookup.img_hash = image_hash_lookup.img_hash %s" % where results = monitor_db.select(query, args=args) for imgur_info in results: @@ -1772,15 +1772,15 @@ class DataFactory(object): logger.info("Tautulli DataFactory :: Deleting Imgur info%s from the database." % log_msg) - result = monitor_db.action('DELETE FROM imgur_lookup WHERE img_hash ' - 'IN (SELECT img_hash FROM image_hash_lookup %s)' % where, + result = monitor_db.action("DELETE FROM imgur_lookup WHERE img_hash " + "IN (SELECT img_hash FROM image_hash_lookup %s)" % where, args) elif service.lower() == 'cloudinary': # Delete from Cloudinary - query = 'SELECT cloudinary_title, rating_key, fallback FROM cloudinary_lookup ' \ - 'JOIN image_hash_lookup ON cloudinary_lookup.img_hash = image_hash_lookup.img_hash %s ' \ - 'GROUP BY rating_key' % where + query = "SELECT cloudinary_title, rating_key, fallback FROM cloudinary_lookup " \ + "JOIN image_hash_lookup ON cloudinary_lookup.img_hash = image_hash_lookup.img_hash %s " \ + "GROUP BY rating_key" % where results = monitor_db.select(query, args=args) if delete_all: @@ -1791,8 +1791,8 @@ class DataFactory(object): logger.info("Tautulli DataFactory :: Deleting Cloudinary info%s from the database." % log_msg) - result = monitor_db.action('DELETE FROM cloudinary_lookup WHERE img_hash ' - 'IN (SELECT img_hash FROM image_hash_lookup %s)' % where, + result = monitor_db.action("DELETE FROM cloudinary_lookup WHERE img_hash " + "IN (SELECT img_hash FROM image_hash_lookup %s)" % where, args) else: @@ -1883,15 +1883,15 @@ class DataFactory(object): if rating_key: logger.info("Tautulli DataFactory :: Deleting lookup info for rating_key %s from the database." % rating_key) - result_themoviedb = monitor_db.action('DELETE FROM themoviedb_lookup WHERE rating_key = ?', [rating_key]) - result_tvmaze = monitor_db.action('DELETE FROM tvmaze_lookup WHERE rating_key = ?', [rating_key]) - result_musicbrainz = monitor_db.action('DELETE FROM musicbrainz_lookup WHERE rating_key = ?', [rating_key]) + result_themoviedb = monitor_db.action("DELETE FROM themoviedb_lookup WHERE rating_key = ?", [rating_key]) + result_tvmaze = monitor_db.action("DELETE FROM tvmaze_lookup WHERE rating_key = ?", [rating_key]) + result_musicbrainz = monitor_db.action("DELETE FROM musicbrainz_lookup WHERE rating_key = ?", [rating_key]) return bool(result_themoviedb or result_tvmaze or result_musicbrainz) elif service and delete_all: if service.lower() in ('themoviedb', 'tvmaze', 'musicbrainz'): logger.info("Tautulli DataFactory :: Deleting all lookup info for '%s' from the database." % service) - result = monitor_db.action('DELETE FROM %s_lookup' % service.lower()) + result = monitor_db.action("DELETE FROM %s_lookup" % service.lower()) return bool(result) else: logger.error("Tautulli DataFactory :: Unable to delete lookup info: invalid service '%s' provided." @@ -1901,13 +1901,13 @@ class DataFactory(object): monitor_db = database.MonitorDatabase() if rating_key: - query = 'SELECT rating_key, parent_rating_key, grandparent_rating_key, title, parent_title, grandparent_title, ' \ - 'media_index, parent_media_index, year, media_type ' \ - 'FROM session_history_metadata ' \ - 'WHERE rating_key = ? ' \ - 'OR parent_rating_key = ? ' \ - 'OR grandparent_rating_key = ? ' \ - 'LIMIT 1' + query = "SELECT rating_key, parent_rating_key, grandparent_rating_key, title, parent_title, grandparent_title, " \ + "media_index, parent_media_index, year, media_type " \ + "FROM session_history_metadata " \ + "WHERE rating_key = ? " \ + "OR parent_rating_key = ? " \ + "OR grandparent_rating_key = ? " \ + "LIMIT 1" result = monitor_db.select(query=query, args=[rating_key, rating_key, rating_key]) else: result = [] @@ -1974,12 +1974,12 @@ class DataFactory(object): # Get the grandparent rating key try: - query = 'SELECT rating_key, parent_rating_key, grandparent_rating_key ' \ - 'FROM session_history_metadata ' \ - 'WHERE rating_key = ? ' \ - 'OR parent_rating_key = ? ' \ - 'OR grandparent_rating_key = ? ' \ - 'LIMIT 1' + query = "SELECT rating_key, parent_rating_key, grandparent_rating_key " \ + "FROM session_history_metadata " \ + "WHERE rating_key = ? " \ + "OR parent_rating_key = ? " \ + "OR grandparent_rating_key = ? " \ + "LIMIT 1" result = monitor_db.select(query=query, args=[rating_key, rating_key, rating_key]) grandparent_rating_key = result[0]['grandparent_rating_key'] @@ -1988,12 +1988,12 @@ class DataFactory(object): logger.warn("Tautulli DataFactory :: Unable to execute database query for get_rating_keys_list: %s." % e) return {} - query = 'SELECT rating_key, parent_rating_key, grandparent_rating_key, title, parent_title, grandparent_title, ' \ - 'media_index, parent_media_index ' \ - 'FROM session_history_metadata ' \ - 'WHERE {0} = ? ' \ - 'GROUP BY {1} ' \ - 'ORDER BY {1} DESC ' + query = "SELECT rating_key, parent_rating_key, grandparent_rating_key, title, parent_title, grandparent_title, " \ + "media_index, parent_media_index " \ + "FROM session_history_metadata " \ + "WHERE {0} = ? " \ + "GROUP BY {1} " \ + "ORDER BY {1} DESC " # get grandparent_rating_keys grandparents = {} @@ -2070,13 +2070,13 @@ class DataFactory(object): if metadata['media_type'] == 'show' or metadata['media_type'] == 'artist': # check grandparent_rating_key (2 tables) query = ( - 'SELECT id FROM session_history ' - 'WHERE grandparent_rating_key = ? ' + "SELECT id FROM session_history " + "WHERE grandparent_rating_key = ? " ) args = [old_key] if _UPDATE_METADATA_IDS['grandparent_rating_key_ids']: - query += 'AND id NOT IN (%s)' % ','.join(_UPDATE_METADATA_IDS['grandparent_rating_key_ids']) + query += "AND id NOT IN (%s)" % ",".join(_UPDATE_METADATA_IDS['grandparent_rating_key_ids']) ids = [str(row['id']) for row in monitor_db.select(query, args)] if ids: @@ -2085,26 +2085,26 @@ class DataFactory(object): continue monitor_db.action( - 'UPDATE session_history SET grandparent_rating_key = ? ' - 'WHERE id IN (%s)' % ','.join(ids), + "UPDATE session_history SET grandparent_rating_key = ? " + "WHERE id IN (%s)" % ",".join(ids), [new_key] ) monitor_db.action( - 'UPDATE session_history_metadata SET grandparent_rating_key = ? ' - 'WHERE id IN (%s)' % ','.join(ids), + "UPDATE session_history_metadata SET grandparent_rating_key = ? " + "WHERE id IN (%s)" % ",".join(ids), [new_key] ) elif metadata['media_type'] == 'season' or metadata['media_type'] == 'album': # check parent_rating_key (2 tables) query = ( - 'SELECT id FROM session_history ' - 'WHERE parent_rating_key = ? ' + "SELECT id FROM session_history " + "WHERE parent_rating_key = ? " ) args = [old_key] if _UPDATE_METADATA_IDS['parent_rating_key_ids']: - query += 'AND id NOT IN (%s)' % ','.join(_UPDATE_METADATA_IDS['parent_rating_key_ids']) + query += "AND id NOT IN (%s)" % ",".join(_UPDATE_METADATA_IDS['parent_rating_key_ids']) ids = [str(row['id']) for row in monitor_db.select(query, args)] if ids: @@ -2113,26 +2113,26 @@ class DataFactory(object): continue monitor_db.action( - 'UPDATE session_history SET parent_rating_key = ? ' - 'WHERE id IN (%s)' % ','.join(ids), + "UPDATE session_history SET parent_rating_key = ? " + "WHERE id IN (%s)" % ",".join(ids), [new_key] ) monitor_db.action( - 'UPDATE session_history_metadata SET parent_rating_key = ? ' - 'WHERE id IN (%s)' % ','.join(ids), + "UPDATE session_history_metadata SET parent_rating_key = ? " + "WHERE id IN (%s)" % ",".join(ids), [new_key] ) else: # check rating_key (2 tables) query = ( - 'SELECT id FROM session_history ' - 'WHERE rating_key = ? ' + "SELECT id FROM session_history " + "WHERE rating_key = ? " ) args = [old_key] if _UPDATE_METADATA_IDS['rating_key_ids']: - query += 'AND id NOT IN (%s)' % ','.join(_UPDATE_METADATA_IDS['rating_key_ids']) + query += "AND id NOT IN (%s)" % ",".join(_UPDATE_METADATA_IDS['rating_key_ids']) ids = [str(row['id']) for row in monitor_db.select(query, args)] if ids: @@ -2141,13 +2141,13 @@ class DataFactory(object): continue monitor_db.action( - 'UPDATE session_history SET rating_key = ? ' - 'WHERE id IN (%s)' % ','.join(ids), + "UPDATE session_history SET rating_key = ? " + "WHERE id IN (%s)" % ",".join(ids), [new_key] ) monitor_db.action( - 'UPDATE session_history_media_info SET rating_key = ? ' - 'WHERE id IN (%s)' % ','.join(ids), + "UPDATE session_history_media_info SET rating_key = ? " + "WHERE id IN (%s)" % ",".join(ids), [new_key] ) @@ -2181,21 +2181,21 @@ class DataFactory(object): monitor_db = database.MonitorDatabase() - query = 'UPDATE session_history SET section_id = ? ' \ - 'WHERE id IN (%s)' % ','.join(ids) + query = "UPDATE session_history SET section_id = ? " \ + "WHERE id IN (%s)" % ",".join(ids) args = [metadata['section_id']] monitor_db.action(query=query, args=args) # Update the session_history_metadata table - query = 'UPDATE session_history_metadata SET rating_key = ?, parent_rating_key = ?, ' \ - 'grandparent_rating_key = ?, title = ?, parent_title = ?, grandparent_title = ?, ' \ - 'original_title = ?, full_title = ?, ' \ - 'media_index = ?, parent_media_index = ?, thumb = ?, parent_thumb = ?, ' \ - 'grandparent_thumb = ?, art = ?, media_type = ?, year = ?, originally_available_at = ?, ' \ - 'added_at = ?, updated_at = ?, last_viewed_at = ?, content_rating = ?, summary = ?, ' \ - 'tagline = ?, rating = ?, duration = ?, guid = ?, directors = ?, writers = ?, actors = ?, ' \ - 'genres = ?, studio = ?, labels = ? ' \ - 'WHERE id IN (%s)' % ','.join(ids) + query = "UPDATE session_history_metadata SET rating_key = ?, parent_rating_key = ?, " \ + "grandparent_rating_key = ?, title = ?, parent_title = ?, grandparent_title = ?, " \ + "original_title = ?, full_title = ?, " \ + "media_index = ?, parent_media_index = ?, thumb = ?, parent_thumb = ?, " \ + "grandparent_thumb = ?, art = ?, media_type = ?, year = ?, originally_available_at = ?, " \ + "added_at = ?, updated_at = ?, last_viewed_at = ?, content_rating = ?, summary = ?, " \ + "tagline = ?, rating = ?, duration = ?, guid = ?, directors = ?, writers = ?, actors = ?, " \ + "genres = ?, studio = ?, labels = ? " \ + "WHERE id IN (%s)" % ",".join(ids) args = [metadata['rating_key'], metadata['parent_rating_key'], metadata['grandparent_rating_key'], metadata['title'], metadata['parent_title'], metadata['grandparent_title'], @@ -2212,19 +2212,19 @@ class DataFactory(object): def get_notification_log(self, kwargs=None): data_tables = datatables.DataTables() - columns = ['notify_log.id', - 'notify_log.timestamp', - 'notify_log.session_key', - 'notify_log.rating_key', - 'notify_log.user_id', - 'notify_log.user', - 'notify_log.notifier_id', - 'notify_log.agent_id', - 'notify_log.agent_name', - 'notify_log.notify_action', - 'notify_log.subject_text', - 'notify_log.body_text', - 'notify_log.success' + columns = ["notify_log.id", + "notify_log.timestamp", + "notify_log.session_key", + "notify_log.rating_key", + "notify_log.user_id", + "notify_log.user", + "notify_log.notifier_id", + "notify_log.agent_id", + "notify_log.agent_name", + "notify_log.notify_action", + "notify_log.subject_text", + "notify_log.body_text", + "notify_log.success" ] try: query = data_tables.ssp_query(table_name='notify_log', @@ -2281,8 +2281,8 @@ class DataFactory(object): try: logger.info("Tautulli DataFactory :: Clearing notification logs from database.") - monitor_db.action('DELETE FROM notify_log') - monitor_db.action('VACUUM') + monitor_db.action("DELETE FROM notify_log") + monitor_db.action("VACUUM") return True except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for delete_notification_log: %s." % e) @@ -2291,18 +2291,18 @@ class DataFactory(object): def get_newsletter_log(self, kwargs=None): data_tables = datatables.DataTables() - columns = ['newsletter_log.id', - 'newsletter_log.timestamp', - 'newsletter_log.newsletter_id', - 'newsletter_log.agent_id', - 'newsletter_log.agent_name', - 'newsletter_log.notify_action', - 'newsletter_log.subject_text', - 'newsletter_log.body_text', - 'newsletter_log.start_date', - 'newsletter_log.end_date', - 'newsletter_log.uuid', - 'newsletter_log.success' + columns = ["newsletter_log.id", + "newsletter_log.timestamp", + "newsletter_log.newsletter_id", + "newsletter_log.agent_id", + "newsletter_log.agent_name", + "newsletter_log.notify_action", + "newsletter_log.subject_text", + "newsletter_log.body_text", + "newsletter_log.start_date", + "newsletter_log.end_date", + "newsletter_log.uuid", + "newsletter_log.success" ] try: query = data_tables.ssp_query(table_name='newsletter_log', @@ -2353,8 +2353,8 @@ class DataFactory(object): try: logger.info("Tautulli DataFactory :: Clearing newsletter logs from database.") - monitor_db.action('DELETE FROM newsletter_log') - monitor_db.action('VACUUM') + monitor_db.action("DELETE FROM newsletter_log") + monitor_db.action("VACUUM") return True except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for delete_newsletter_log: %s." % e) @@ -2365,15 +2365,15 @@ class DataFactory(object): if user_id: if history_only: - query = 'SELECT machine_id FROM session_history ' \ - 'WHERE user_id = ? ' \ - 'GROUP BY machine_id' + query = "SELECT machine_id FROM session_history " \ + "WHERE user_id = ? " \ + "GROUP BY machine_id" else: - query = 'SELECT * FROM (' \ - 'SELECT user_id, machine_id FROM session_history ' \ - 'UNION SELECT user_id, machine_id from sessions_continued) ' \ - 'WHERE user_id = ? ' \ - 'GROUP BY machine_id' + query = "SELECT * FROM (" \ + "SELECT user_id, machine_id FROM session_history " \ + "UNION SELECT user_id, machine_id from sessions_continued) " \ + "WHERE user_id = ? " \ + "GROUP BY machine_id" try: result = monitor_db.select(query=query, args=[user_id]) @@ -2390,7 +2390,7 @@ class DataFactory(object): if rating_key: try: - query = 'SELECT * FROM recently_added WHERE rating_key = ?' + query = "SELECT * FROM recently_added WHERE rating_key = ?" result = monitor_db.select(query=query, args=[rating_key]) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_recently_added_item: %s." % e) diff --git a/plexpy/exporter.py b/plexpy/exporter.py index 908aced7..3a48c6d7 100644 --- a/plexpy/exporter.py +++ b/plexpy/exporter.py @@ -2291,9 +2291,9 @@ class ExportObject(Export): def get_export(export_id): db = database.MonitorDatabase() - result = db.select_single('SELECT timestamp, title, file_format, thumb_level, art_level, ' - 'individual_files, complete ' - 'FROM exports WHERE id = ?', + result = db.select_single("SELECT timestamp, title, file_format, thumb_level, art_level, " + "individual_files, complete " + "FROM exports WHERE id = ?", [export_id]) if result: @@ -2324,7 +2324,7 @@ def delete_export(export_id): if deleted: logger.info("Tautulli Exporter :: Deleting export_id %s from the database.", export_id) db = database.MonitorDatabase() - result = db.action('DELETE FROM exports WHERE id = ?', args=[export_id]) + result = db.action("DELETE FROM exports WHERE id = ?", args=[export_id]) return deleted else: @@ -2349,7 +2349,7 @@ def delete_all_exports(): def cancel_exports(): db = database.MonitorDatabase() - db.action('UPDATE exports SET complete = -1 WHERE complete = 0') + db.action("UPDATE exports SET complete = -1 WHERE complete = 0") def get_export_datatable(section_id=None, user_id=None, rating_key=None, kwargs=None): @@ -2368,27 +2368,27 @@ def get_export_datatable(section_id=None, user_id=None, rating_key=None, kwargs= if rating_key: custom_where.append(['exports.rating_key', rating_key]) - columns = ['exports.id AS export_id', - 'exports.timestamp', - 'exports.section_id', - 'exports.user_id', - 'exports.rating_key', - 'exports.media_type', - 'CASE WHEN exports.media_type = "photoalbum" THEN "Photo Album" ELSE ' - 'UPPER(SUBSTR(exports.media_type, 1, 1)) || SUBSTR(exports.media_type, 2) END ' - 'AS media_type_title', - 'exports.title', - 'exports.file_format', - 'exports.metadata_level', - 'exports.media_info_level', - 'exports.thumb_level', - 'exports.art_level', - 'exports.custom_fields', - 'exports.individual_files', - 'exports.file_size', - 'exports.complete', - 'exports.total_items', - 'exports.exported_items' + columns = ["exports.id AS export_id", + "exports.timestamp", + "exports.section_id", + "exports.user_id", + "exports.rating_key", + "exports.media_type", + "CASE WHEN exports.media_type = 'photoalbum' THEN 'Photo Album' ELSE " + "UPPER(SUBSTR(exports.media_type, 1, 1)) || SUBSTR(exports.media_type, 2) END " + "AS media_type_title", + "exports.title", + "exports.file_format", + "exports.metadata_level", + "exports.media_info_level", + "exports.thumb_level", + "exports.art_level", + "exports.custom_fields", + "exports.individual_files", + "exports.file_size", + "exports.complete", + "exports.total_items", + "exports.exported_items" ] try: query = data_tables.ssp_query(table_name='exports', diff --git a/plexpy/graphs.py b/plexpy/graphs.py index e9afa704..49dfee57 100644 --- a/plexpy/graphs.py +++ b/plexpy/graphs.py @@ -64,42 +64,42 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT sh.date_played, ' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, ' \ - 'SUM(shm.live) AS live_count ' \ - 'FROM (SELECT *,' \ - ' date(started, "unixepoch", "localtime") AS date_played ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY date_played, %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY sh.date_played ' \ - 'ORDER BY sh.started' % (timestamp, user_cond, group_by) + query = "SELECT sh.date_played, " \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, " \ + "SUM(shm.live) AS live_count " \ + "FROM (SELECT *," \ + " date(started, 'unixepoch', 'localtime') AS date_played " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY date_played, %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY sh.date_played " \ + "ORDER BY sh.started" % (timestamp, user_cond, group_by) result = monitor_db.select(query) else: - query = 'SELECT sh.date_played, ' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS music_count, ' \ - 'SUM(CASE WHEN shm.live = 1 ' \ - ' THEN sh.d ELSE 0 END) AS live_count ' \ - 'FROM (SELECT *,' \ - ' date(started, "unixepoch", "localtime") AS date_played,' \ - ' SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s' \ - ' GROUP BY date_played, %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY sh.date_played ' \ - 'ORDER BY sh.started' % (timestamp, user_cond, group_by) + query = "SELECT sh.date_played, " \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS music_count, " \ + "SUM(CASE WHEN shm.live = 1 " \ + " THEN sh.d ELSE 0 END) AS live_count " \ + "FROM (SELECT *," \ + " date(started, 'unixepoch', 'localtime') AS date_played," \ + " SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s" \ + " GROUP BY date_played, %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY sh.date_played " \ + "ORDER BY sh.started" % (timestamp, user_cond, group_by) result = monitor_db.select(query) except Exception as e: @@ -173,9 +173,9 @@ class Graphs(object): user_cond = '' if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() + user_cond = "AND session_history.user_id = %s " % session.get_session_user_id() elif user_id and user_id.isdigit(): - user_cond = 'AND session_history.user_id = %s ' % user_id + user_cond = "AND session_history.user_id = %s " % user_id if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -184,58 +184,58 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT sh.daynumber, ' \ - '(CASE sh.daynumber ' \ - ' WHEN 0 THEN "Sunday" ' \ - ' WHEN 1 THEN "Monday" ' \ - ' WHEN 2 THEN "Tuesday" ' \ - ' WHEN 3 THEN "Wednesday" ' \ - ' WHEN 4 THEN "Thursday" ' \ - ' WHEN 5 THEN "Friday" ' \ - ' ELSE "Saturday" END) AS dayofweek, ' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, ' \ - 'SUM(shm.live) AS live_count ' \ - 'FROM (SELECT *, ' \ - ' CAST(strftime("%%w", date(started, "unixepoch", "localtime")) AS INTEGER) AS daynumber' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY daynumber, %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY dayofweek ' \ - 'ORDER BY sh.daynumber' % (timestamp, user_cond, group_by) + query = "SELECT sh.daynumber, " \ + "(CASE sh.daynumber " \ + " WHEN 0 THEN 'Sunday' " \ + " WHEN 1 THEN 'Monday' " \ + " WHEN 2 THEN 'Tuesday' " \ + " WHEN 3 THEN 'Wednesday' " \ + " WHEN 4 THEN 'Thursday' " \ + " WHEN 5 THEN 'Friday' " \ + " ELSE 'Saturday' END) AS dayofweek, " \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, " \ + "SUM(shm.live) AS live_count " \ + "FROM (SELECT *, " \ + " CAST(strftime('%%w', date(started, 'unixepoch', 'localtime')) AS INTEGER) AS daynumber" \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY daynumber, %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY dayofweek " \ + "ORDER BY sh.daynumber" % (timestamp, user_cond, group_by) result = monitor_db.select(query) else: - query = 'SELECT sh.daynumber, ' \ - '(CASE sh.daynumber ' \ - ' WHEN 0 THEN "Sunday" ' \ - ' WHEN 1 THEN "Monday" ' \ - ' WHEN 2 THEN "Tuesday" ' \ - ' WHEN 3 THEN "Wednesday" ' \ - ' WHEN 4 THEN "Thursday" ' \ - ' WHEN 5 THEN "Friday" ' \ - ' ELSE "Saturday" END) AS dayofweek, ' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS music_count, ' \ - 'SUM(CASE WHEN shm.live = 1 ' \ - ' THEN sh.d ELSE 0 END) AS live_count ' \ - 'FROM (SELECT *, ' \ - ' CAST(strftime("%%w", date(started, "unixepoch", "localtime")) AS INTEGER) AS daynumber, ' \ - ' SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s' \ - ' GROUP BY daynumber, %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY dayofweek ' \ - 'ORDER BY sh.daynumber' % (timestamp, user_cond, group_by) + query = "SELECT sh.daynumber, " \ + "(CASE sh.daynumber " \ + " WHEN 0 THEN 'Sunday' " \ + " WHEN 1 THEN 'Monday' " \ + " WHEN 2 THEN 'Tuesday' " \ + " WHEN 3 THEN 'Wednesday' " \ + " WHEN 4 THEN 'Thursday' " \ + " WHEN 5 THEN 'Friday' " \ + " ELSE 'Saturday' END) AS dayofweek, " \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS music_count, " \ + "SUM(CASE WHEN shm.live = 1 " \ + " THEN sh.d ELSE 0 END) AS live_count " \ + "FROM (SELECT *, " \ + " CAST(strftime('%%w', date(started, 'unixepoch', 'localtime')) AS INTEGER) AS daynumber, " \ + " SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s" \ + " GROUP BY daynumber, %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY dayofweek " \ + "ORDER BY sh.daynumber" % (timestamp, user_cond, group_by) result = monitor_db.select(query) except Exception as e: @@ -321,42 +321,42 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT sh.hourofday, ' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, ' \ - 'SUM(shm.live) AS live_count ' \ - 'FROM (SELECT *, ' \ - ' strftime("%%H", datetime(started, "unixepoch", "localtime")) AS hourofday' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY hourofday, %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY sh.hourofday ' \ - 'ORDER BY sh.hourofday' % (timestamp, user_cond, group_by) + query = "SELECT sh.hourofday, " \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, " \ + "SUM(shm.live) AS live_count " \ + "FROM (SELECT *, " \ + " strftime('%%H', datetime(started, 'unixepoch', 'localtime')) AS hourofday" \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY hourofday, %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY sh.hourofday " \ + "ORDER BY sh.hourofday" % (timestamp, user_cond, group_by) result = monitor_db.select(query) else: - query = 'SELECT sh.hourofday, ' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS music_count, ' \ - 'SUM(CASE WHEN shm.live = 1 ' \ - ' THEN sh.d ELSE 0 END) AS live_count ' \ - 'FROM (SELECT *, ' \ - ' strftime("%%H", datetime(started, "unixepoch", "localtime")) AS hourofday, ' \ - ' SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s' \ - ' GROUP BY hourofday, %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY sh.hourofday ' \ - 'ORDER BY sh.hourofday' % (timestamp, user_cond, group_by) + query = "SELECT sh.hourofday, " \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS music_count, " \ + "SUM(CASE WHEN shm.live = 1 " \ + " THEN sh.d ELSE 0 END) AS live_count " \ + "FROM (SELECT *, " \ + " strftime('%%H', datetime(started, 'unixepoch', 'localtime')) AS hourofday, " \ + " SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s" \ + " GROUP BY hourofday, %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY sh.hourofday " \ + "ORDER BY sh.hourofday" % (timestamp, user_cond, group_by) result = monitor_db.select(query) except Exception as e: @@ -440,42 +440,42 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT sh.datestring, ' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, ' \ - 'SUM(shm.live) AS live_count ' \ - 'FROM (SELECT *, ' \ - ' strftime("%%Y-%%m", datetime(started, "unixepoch", "localtime")) AS datestring' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY datestring, %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY sh.datestring ' \ - 'ORDER BY sh.datestring' % (timestamp, user_cond, group_by) + query = "SELECT sh.datestring, " \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, " \ + "SUM(shm.live) AS live_count " \ + "FROM (SELECT *, " \ + " strftime('%%Y-%%m', datetime(started, 'unixepoch', 'localtime')) AS datestring" \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY datestring, %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY sh.datestring " \ + "ORDER BY sh.datestring" % (timestamp, user_cond, group_by) result = monitor_db.select(query) else: - query = 'SELECT sh.datestring, ' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS music_count, ' \ - 'SUM(CASE WHEN shm.live = 1 ' \ - ' THEN sh.d ELSE 0 END) AS live_count ' \ - 'FROM (SELECT *, ' \ - ' strftime("%%Y-%%m", datetime(started, "unixepoch", "localtime")) AS datestring, ' \ - ' SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s' \ - ' GROUP BY datestring, %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY sh.datestring ' \ - 'ORDER BY sh.datestring' % (timestamp, user_cond, group_by) + query = "SELECT sh.datestring, " \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS music_count, " \ + "SUM(CASE WHEN shm.live = 1 " \ + " THEN sh.d ELSE 0 END) AS live_count " \ + "FROM (SELECT *, " \ + " strftime('%%Y-%%m', datetime(started, 'unixepoch', 'localtime')) AS datestring, " \ + " SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s" \ + " GROUP BY datestring, %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY sh.datestring " \ + "ORDER BY sh.datestring" % (timestamp, user_cond, group_by) result = monitor_db.select(query) except Exception as e: @@ -567,44 +567,44 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT sh.platform, ' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, ' \ - 'SUM(shm.live) AS live_count, ' \ - 'COUNT(sh.id) AS total_count ' \ - 'FROM (SELECT * ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY sh.platform ' \ - 'ORDER BY total_count DESC, sh.platform ASC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT sh.platform, " \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, " \ + "SUM(shm.live) AS live_count, " \ + "COUNT(sh.id) AS total_count " \ + "FROM (SELECT * " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY sh.platform " \ + "ORDER BY total_count DESC, sh.platform ASC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) else: - query = 'SELECT sh.platform, ' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS music_count, ' \ - 'SUM(CASE WHEN shm.live = 1 ' \ - ' THEN sh.d ELSE 0 END) AS live_count, ' \ - 'SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, ' \ - ' SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY sh.platform ' \ - 'ORDER BY total_duration DESC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT sh.platform, " \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS music_count, " \ + "SUM(CASE WHEN shm.live = 1 " \ + " THEN sh.d ELSE 0 END) AS live_count, " \ + "SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, " \ + " SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s" \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY sh.platform " \ + "ORDER BY total_duration DESC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) except Exception as e: @@ -666,50 +666,50 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT u.user_id, u.username, ' \ - '(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = "" ' \ - ' THEN u.username ELSE u.friendly_name END) AS friendly_name,' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, ' \ - 'SUM(shm.live) AS live_count, ' \ - 'COUNT(sh.id) AS total_count ' \ - 'FROM (SELECT * ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'JOIN users AS u ON u.user_id = sh.user_id ' \ - 'GROUP BY sh.user_id ' \ - 'ORDER BY total_count DESC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT u.user_id, u.username, " \ + "(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = '' " \ + " THEN u.username ELSE u.friendly_name END) AS friendly_name," \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, " \ + "SUM(shm.live) AS live_count, " \ + "COUNT(sh.id) AS total_count " \ + "FROM (SELECT * " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "JOIN users AS u ON u.user_id = sh.user_id " \ + "GROUP BY sh.user_id " \ + "ORDER BY total_count DESC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) else: - query = 'SELECT u.user_id, u.username, ' \ - '(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = "" ' \ - ' THEN u.username ELSE u.friendly_name END) AS friendly_name,' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS music_count, ' \ - 'SUM(CASE WHEN shm.live = 1 ' \ - ' THEN sh.d ELSE 0 END) AS live_count, ' \ - 'SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, ' \ - ' SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'JOIN users AS u ON u.user_id = sh.user_id ' \ - 'GROUP BY sh.user_id ' \ - 'ORDER BY total_duration DESC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT u.user_id, u.username, " \ + "(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = '' " \ + " THEN u.username ELSE u.friendly_name END) AS friendly_name," \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS music_count, " \ + "SUM(CASE WHEN shm.live = 1 " \ + " THEN sh.d ELSE 0 END) AS live_count, " \ + "SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, " \ + " SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s" \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "JOIN users AS u ON u.user_id = sh.user_id " \ + "GROUP BY sh.user_id " \ + "ORDER BY total_duration DESC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) except Exception as e: @@ -776,36 +776,36 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT sh.date_played, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "direct play" THEN 1 ELSE 0 END) AS dp_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "copy" THEN 1 ELSE 0 END) AS ds_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "transcode" THEN 1 ELSE 0 END) AS tc_count ' \ - 'FROM (SELECT *, ' \ - ' date(started, "unixepoch", "localtime") AS date_played ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY date_played, %s) AS sh ' \ - 'JOIN session_history_media_info AS shmi ON shmi.id = sh.id ' \ - 'GROUP BY sh.date_played ' \ - 'ORDER BY sh.started' % (timestamp, user_cond, group_by) + query = "SELECT sh.date_played, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'direct play' THEN 1 ELSE 0 END) AS dp_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'copy' THEN 1 ELSE 0 END) AS ds_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'transcode' THEN 1 ELSE 0 END) AS tc_count " \ + "FROM (SELECT *, " \ + " date(started, 'unixepoch', 'localtime') AS date_played " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY date_played, %s) AS sh " \ + "JOIN session_history_media_info AS shmi ON shmi.id = sh.id " \ + "GROUP BY sh.date_played " \ + "ORDER BY sh.started" % (timestamp, user_cond, group_by) result = monitor_db.select(query) else: - query = 'SELECT sh.date_played, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "direct play" THEN sh.d ELSE 0 END) AS dp_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "copy" THEN sh.d ELSE 0 END) AS ds_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "transcode" THEN sh.d ELSE 0 END) AS tc_count ' \ - 'FROM (SELECT *, ' \ - ' date(started, "unixepoch", "localtime") AS date_played,' \ - ' SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s' \ - ' GROUP BY date_played, %s) AS sh ' \ - 'JOIN session_history_media_info AS shmi ON shmi.id = sh.id ' \ - 'GROUP BY sh.date_played ' \ - 'ORDER BY sh.started' % (timestamp, user_cond, group_by) + query = "SELECT sh.date_played, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'direct play' THEN sh.d ELSE 0 END) AS dp_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'copy' THEN sh.d ELSE 0 END) AS ds_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'transcode' THEN sh.d ELSE 0 END) AS tc_count " \ + "FROM (SELECT *, " \ + " date(started, 'unixepoch', 'localtime') AS date_played," \ + " SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s" \ + " GROUP BY date_played, %s) AS sh " \ + "JOIN session_history_media_info AS shmi ON shmi.id = sh.id " \ + "GROUP BY sh.date_played " \ + "ORDER BY sh.started" % (timestamp, user_cond, group_by) result = monitor_db.select(query) except Exception as e: @@ -873,40 +873,40 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT shmi.video_full_resolution AS resolution, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "direct play" THEN 1 ELSE 0 END) AS dp_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "copy" THEN 1 ELSE 0 END) AS ds_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "transcode" THEN 1 ELSE 0 END) AS tc_count, ' \ - 'COUNT(sh.id) AS total_count ' \ - 'FROM (SELECT * ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type IN ("movie", "episode") %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_media_info AS shmi ON shmi.id = sh.id ' \ - 'GROUP BY resolution ' \ - 'ORDER BY total_count DESC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT shmi.video_full_resolution AS resolution, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'direct play' THEN 1 ELSE 0 END) AS dp_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'copy' THEN 1 ELSE 0 END) AS ds_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'transcode' THEN 1 ELSE 0 END) AS tc_count, " \ + "COUNT(sh.id) AS total_count " \ + "FROM (SELECT * " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s " \ + " AND session_history.media_type IN ('movie', 'episode') %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_media_info AS shmi ON shmi.id = sh.id " \ + "GROUP BY resolution " \ + "ORDER BY total_count DESC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) else: - query = 'SELECT shmi.video_full_resolution AS resolution,' \ - 'SUM(CASE WHEN shmi.transcode_decision = "direct play" THEN sh.d ELSE 0 END) AS dp_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "copy" THEN sh.d ELSE 0 END) AS ds_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "transcode" THEN sh.d ELSE 0 END) AS tc_count, ' \ - 'SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, ' \ - ' SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type IN ("movie", "episode") %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_media_info AS shmi ON shmi.id = sh.id ' \ - 'GROUP BY resolution ' \ - 'ORDER BY total_duration DESC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT shmi.video_full_resolution AS resolution," \ + "SUM(CASE WHEN shmi.transcode_decision = 'direct play' THEN sh.d ELSE 0 END) AS dp_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'copy' THEN sh.d ELSE 0 END) AS ds_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'transcode' THEN sh.d ELSE 0 END) AS tc_count, " \ + "SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, " \ + " SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s " \ + " AND session_history.media_type IN ('movie', 'episode') %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_media_info AS shmi ON shmi.id = sh.id " \ + "GROUP BY resolution " \ + "ORDER BY total_duration DESC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) except Exception as e: @@ -954,66 +954,66 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT ' \ - '(CASE WHEN shmi.stream_video_full_resolution IS NULL THEN ' \ - ' (CASE WHEN shmi.video_decision = "transcode" THEN ' \ - ' (CASE ' \ - ' WHEN shmi.transcode_height <= 360 THEN "SD" ' \ - ' WHEN shmi.transcode_height <= 480 THEN "480" ' \ - ' WHEN shmi.transcode_height <= 576 THEN "576" ' \ - ' WHEN shmi.transcode_height <= 720 THEN "720" ' \ - ' WHEN shmi.transcode_height <= 1080 THEN "1080" ' \ - ' WHEN shmi.transcode_height <= 1440 THEN "QHD" ' \ - ' WHEN shmi.transcode_height <= 2160 THEN "4k" ' \ - ' ELSE "unknown" END)' \ - ' ELSE shmi.video_full_resolution END) ' \ - ' ELSE shmi.stream_video_full_resolution END) AS resolution, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "direct play" THEN 1 ELSE 0 END) AS dp_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "copy" THEN 1 ELSE 0 END) AS ds_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "transcode" THEN 1 ELSE 0 END) AS tc_count, ' \ - 'COUNT(sh.id) AS total_count ' \ - 'FROM (SELECT * ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type IN ("movie", "episode") %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_media_info AS shmi ON shmi.id = sh.id ' \ - 'GROUP BY resolution ' \ - 'ORDER BY total_count DESC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT " \ + "(CASE WHEN shmi.stream_video_full_resolution IS NULL THEN " \ + " (CASE WHEN shmi.video_decision = 'transcode' THEN " \ + " (CASE " \ + " WHEN shmi.transcode_height <= 360 THEN 'SD' " \ + " WHEN shmi.transcode_height <= 480 THEN '480' " \ + " WHEN shmi.transcode_height <= 576 THEN '576' " \ + " WHEN shmi.transcode_height <= 720 THEN '720' " \ + " WHEN shmi.transcode_height <= 1080 THEN '1080' " \ + " WHEN shmi.transcode_height <= 1440 THEN 'QHD' " \ + " WHEN shmi.transcode_height <= 2160 THEN '4k' " \ + " ELSE 'unknown' END)" \ + " ELSE shmi.video_full_resolution END) " \ + " ELSE shmi.stream_video_full_resolution END) AS resolution, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'direct play' THEN 1 ELSE 0 END) AS dp_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'copy' THEN 1 ELSE 0 END) AS ds_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'transcode' THEN 1 ELSE 0 END) AS tc_count, " \ + "COUNT(sh.id) AS total_count " \ + "FROM (SELECT * " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s " \ + " AND session_history.media_type IN ('movie', 'episode') %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_media_info AS shmi ON shmi.id = sh.id " \ + "GROUP BY resolution " \ + "ORDER BY total_count DESC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) else: - query = 'SELECT ' \ - '(CASE WHEN shmi.stream_video_full_resolution IS NULL THEN ' \ - ' (CASE WHEN shmi.video_decision = "transcode" THEN ' \ - ' (CASE ' \ - ' WHEN shmi.transcode_height <= 360 THEN "SD" ' \ - ' WHEN shmi.transcode_height <= 480 THEN "480" ' \ - ' WHEN shmi.transcode_height <= 576 THEN "576" ' \ - ' WHEN shmi.transcode_height <= 720 THEN "720" ' \ - ' WHEN shmi.transcode_height <= 1080 THEN "1080" ' \ - ' WHEN shmi.transcode_height <= 1440 THEN "QHD" ' \ - ' WHEN shmi.transcode_height <= 2160 THEN "4k" ' \ - ' ELSE "unknown" END)' \ - ' ELSE shmi.video_full_resolution END) ' \ - ' ELSE shmi.stream_video_full_resolution END) AS resolution, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "direct play" THEN sh.d ELSE 0 END) AS dp_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "copy" THEN sh.d ELSE 0 END) AS ds_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "transcode" THEN sh.d ELSE 0 END) AS tc_count, ' \ - 'SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, ' \ - ' SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type IN ("movie", "episode") %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_media_info AS shmi ON shmi.id = sh.id ' \ - 'GROUP BY resolution ' \ - 'ORDER BY total_duration DESC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT " \ + "(CASE WHEN shmi.stream_video_full_resolution IS NULL THEN " \ + " (CASE WHEN shmi.video_decision = 'transcode' THEN " \ + " (CASE " \ + " WHEN shmi.transcode_height <= 360 THEN 'SD' " \ + " WHEN shmi.transcode_height <= 480 THEN '480' " \ + " WHEN shmi.transcode_height <= 576 THEN '576' " \ + " WHEN shmi.transcode_height <= 720 THEN '720' " \ + " WHEN shmi.transcode_height <= 1080 THEN '1080' " \ + " WHEN shmi.transcode_height <= 1440 THEN 'QHD' " \ + " WHEN shmi.transcode_height <= 2160 THEN '4k' " \ + " ELSE 'unknown' END)" \ + " ELSE shmi.video_full_resolution END) " \ + " ELSE shmi.stream_video_full_resolution END) AS resolution, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'direct play' THEN sh.d ELSE 0 END) AS dp_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'copy' THEN sh.d ELSE 0 END) AS ds_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'transcode' THEN sh.d ELSE 0 END) AS tc_count, " \ + "SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, " \ + " SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s " \ + " AND session_history.media_type IN ('movie', 'episode') %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_media_info AS shmi ON shmi.id = sh.id " \ + "GROUP BY resolution " \ + "ORDER BY total_duration DESC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) except Exception as e: @@ -1061,38 +1061,38 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT sh.platform, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "direct play" THEN 1 ELSE 0 END) AS dp_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "copy" THEN 1 ELSE 0 END) AS ds_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "transcode" THEN 1 ELSE 0 END) AS tc_count, ' \ - 'COUNT(sh.id) AS total_count ' \ - 'FROM (SELECT * ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_media_info AS shmi ON shmi.id = sh.id ' \ - 'GROUP BY sh.platform ' \ - 'ORDER BY total_count DESC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT sh.platform, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'direct play' THEN 1 ELSE 0 END) AS dp_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'copy' THEN 1 ELSE 0 END) AS ds_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'transcode' THEN 1 ELSE 0 END) AS tc_count, " \ + "COUNT(sh.id) AS total_count " \ + "FROM (SELECT * " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_media_info AS shmi ON shmi.id = sh.id " \ + "GROUP BY sh.platform " \ + "ORDER BY total_count DESC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) else: - query = 'SELECT sh.platform, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "direct play" THEN sh.d ELSE 0 END) AS dp_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "copy" THEN sh.d ELSE 0 END) AS ds_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "transcode" THEN sh.d ELSE 0 END) AS tc_count, ' \ - 'SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, ' \ - ' SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_media_info AS shmi ON shmi.id = sh.id ' \ - 'GROUP BY sh.platform ' \ - 'ORDER BY total_duration DESC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT sh.platform, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'direct play' THEN sh.d ELSE 0 END) AS dp_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'copy' THEN sh.d ELSE 0 END) AS ds_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'transcode' THEN sh.d ELSE 0 END) AS tc_count, " \ + "SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, " \ + " SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_media_info AS shmi ON shmi.id = sh.id " \ + "GROUP BY sh.platform " \ + "ORDER BY total_duration DESC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) except Exception as e: @@ -1141,44 +1141,44 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT u.user_id, u.username, ' \ - '(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = "" ' \ - ' THEN u.username ELSE u.friendly_name END) AS friendly_name,' \ - 'SUM(CASE WHEN shmi.transcode_decision = "direct play" THEN 1 ELSE 0 END) AS dp_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "copy" THEN 1 ELSE 0 END) AS ds_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "transcode" THEN 1 ELSE 0 END) AS tc_count, ' \ - 'COUNT(sh.id) AS total_count ' \ - 'FROM (SELECT * ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_media_info AS shmi ON shmi.id = sh.id ' \ - 'JOIN users AS u ON u.user_id = sh.user_id ' \ - 'GROUP BY u.user_id ' \ - 'ORDER BY total_count DESC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT u.user_id, u.username, " \ + "(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = '' " \ + " THEN u.username ELSE u.friendly_name END) AS friendly_name," \ + "SUM(CASE WHEN shmi.transcode_decision = 'direct play' THEN 1 ELSE 0 END) AS dp_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'copy' THEN 1 ELSE 0 END) AS ds_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'transcode' THEN 1 ELSE 0 END) AS tc_count, " \ + "COUNT(sh.id) AS total_count " \ + "FROM (SELECT * " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_media_info AS shmi ON shmi.id = sh.id " \ + "JOIN users AS u ON u.user_id = sh.user_id " \ + "GROUP BY u.user_id " \ + "ORDER BY total_count DESC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) else: - query = 'SELECT u.user_id, u.username, ' \ - '(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = "" ' \ - ' THEN u.username ELSE u.friendly_name END) AS friendly_name,' \ - 'SUM(CASE WHEN shmi.transcode_decision = "direct play" THEN sh.d ELSE 0 END) AS dp_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "copy" THEN sh.d ELSE 0 END) AS ds_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "transcode" THEN sh.d ELSE 0 END) AS tc_count, ' \ - 'SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, ' \ - ' SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_media_info AS shmi ON shmi.id = sh.id ' \ - 'JOIN users AS u ON u.user_id = sh.user_id ' \ - 'GROUP BY u.user_id ' \ - 'ORDER BY total_duration DESC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT u.user_id, u.username, " \ + "(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = '' " \ + " THEN u.username ELSE u.friendly_name END) AS friendly_name," \ + "SUM(CASE WHEN shmi.transcode_decision = 'direct play' THEN sh.d ELSE 0 END) AS dp_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'copy' THEN sh.d ELSE 0 END) AS ds_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'transcode' THEN sh.d ELSE 0 END) AS tc_count, " \ + "SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, " \ + " SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_media_info AS shmi ON shmi.id = sh.id " \ + "JOIN users AS u ON u.user_id = sh.user_id " \ + "GROUP BY u.user_id " \ + "ORDER BY total_duration DESC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) except Exception as e: diff --git a/plexpy/libraries.py b/plexpy/libraries.py index ba30efa9..33832aba 100644 --- a/plexpy/libraries.py +++ b/plexpy/libraries.py @@ -95,8 +95,8 @@ def refresh_libraries(): add_live_tv_library(refresh=True) - query = 'UPDATE library_sections SET is_active = 0 WHERE server_id != ? OR ' \ - 'section_id NOT IN ({})'.format(', '.join(['?'] * len(section_ids))) + query = "UPDATE library_sections SET is_active = 0 WHERE server_id != ? OR " \ + "section_id NOT IN ({})".format(", ".join(["?"] * len(section_ids))) monitor_db.action(query=query, args=[plexpy.CONFIG.PMS_IDENTIFIER] + section_ids) new_keys = plexpy.CONFIG.HOME_LIBRARY_CARDS + new_keys @@ -112,8 +112,8 @@ def refresh_libraries(): def add_live_tv_library(refresh=False): monitor_db = database.MonitorDatabase() - result = monitor_db.select_single('SELECT * FROM library_sections ' - 'WHERE section_id = ? and server_id = ?', + result = monitor_db.select_single("SELECT * FROM library_sections " + "WHERE section_id = ? and server_id = ?", [common.LIVE_TV_SECTION_ID, plexpy.CONFIG.PMS_IDENTIFIER]) if result and not refresh or not result and refresh: @@ -138,7 +138,7 @@ def add_live_tv_library(refresh=False): def has_library_type(section_type): monitor_db = database.MonitorDatabase() - query = 'SELECT * FROM library_sections WHERE section_type = ? AND deleted_section = 0' + query = "SELECT * FROM library_sections WHERE section_type = ? AND deleted_section = 0" args = [section_type] result = monitor_db.select_single(query=query, args=args) return bool(result) @@ -328,44 +328,44 @@ class Libraries(object): group_by = 'session_history.reference_id' if grouping else 'session_history.id' - columns = ['library_sections.id AS row_id', - 'library_sections.server_id', - 'library_sections.section_id', - 'library_sections.section_name', - 'library_sections.section_type', - 'library_sections.count', - 'library_sections.parent_count', - 'library_sections.child_count', - 'library_sections.thumb AS library_thumb', - 'library_sections.custom_thumb_url AS custom_thumb', - 'library_sections.art AS library_art', - 'library_sections.custom_art_url AS custom_art', - 'COUNT(DISTINCT %s) AS plays' % group_by, - 'SUM(CASE WHEN session_history.stopped > 0 THEN (session_history.stopped - session_history.started) \ + columns = ["library_sections.id AS row_id", + "library_sections.server_id", + "library_sections.section_id", + "library_sections.section_name", + "library_sections.section_type", + "library_sections.count", + "library_sections.parent_count", + "library_sections.child_count", + "library_sections.thumb AS library_thumb", + "library_sections.custom_thumb_url AS custom_thumb", + "library_sections.art AS library_art", + "library_sections.custom_art_url AS custom_art", + "COUNT(DISTINCT %s) AS plays" % group_by, + "SUM(CASE WHEN session_history.stopped > 0 THEN (session_history.stopped - session_history.started) \ ELSE 0 END) - SUM(CASE WHEN session_history.paused_counter IS NULL THEN 0 ELSE \ - session_history.paused_counter END) AS duration', - 'MAX(session_history.started) AS last_accessed', - 'MAX(session_history.id) AS history_row_id', - 'session_history_metadata.full_title AS last_played', - 'session_history.rating_key', - 'session_history_metadata.media_type', - 'session_history_metadata.thumb', - 'session_history_metadata.parent_thumb', - 'session_history_metadata.grandparent_thumb', - 'session_history_metadata.parent_title', - 'session_history_metadata.year', - 'session_history_metadata.media_index', - 'session_history_metadata.parent_media_index', - 'session_history_metadata.content_rating', - 'session_history_metadata.labels', - 'session_history_metadata.live', - 'session_history_metadata.added_at', - 'session_history_metadata.originally_available_at', - 'session_history_metadata.guid', - 'library_sections.do_notify', - 'library_sections.do_notify_created', - 'library_sections.keep_history', - 'library_sections.is_active' + session_history.paused_counter END) AS duration", + "MAX(session_history.started) AS last_accessed", + "MAX(session_history.id) AS history_row_id", + "session_history_metadata.full_title AS last_played", + "session_history.rating_key", + "session_history_metadata.media_type", + "session_history_metadata.thumb", + "session_history_metadata.parent_thumb", + "session_history_metadata.grandparent_thumb", + "session_history_metadata.parent_title", + "session_history_metadata.year", + "session_history_metadata.media_index", + "session_history_metadata.parent_media_index", + "session_history_metadata.content_rating", + "session_history_metadata.labels", + "session_history_metadata.live", + "session_history_metadata.added_at", + "session_history_metadata.originally_available_at", + "session_history_metadata.guid", + "library_sections.do_notify", + "library_sections.do_notify_created", + "library_sections.keep_history", + "library_sections.is_active" ] try: query = data_tables.ssp_query(table_name='library_sections', @@ -499,11 +499,11 @@ class Libraries(object): group_by = 'rating_key' try: - query = 'SELECT MAX(started) AS last_played, COUNT(DISTINCT %s) AS play_count, ' \ - 'rating_key, parent_rating_key, grandparent_rating_key ' \ - 'FROM session_history ' \ - 'WHERE section_id = ? ' \ - 'GROUP BY %s ' % (count_by, group_by) + query = "SELECT MAX(started) AS last_played, COUNT(DISTINCT %s) AS play_count, " \ + "rating_key, parent_rating_key, grandparent_rating_key " \ + "FROM session_history " \ + "WHERE section_id = ? " \ + "GROUP BY %s " % (count_by, group_by) result = monitor_db.select(query, args=[section_id]) except Exception as e: logger.warn("Tautulli Libraries :: Unable to execute database query for get_datatables_media_info2: %s." % e) @@ -838,27 +838,27 @@ class Libraries(object): last_accessed = 'NULL' join = '' if include_last_accessed: - last_accessed = 'MAX(session_history.started)' - join = 'LEFT OUTER JOIN session_history ON library_sections.section_id = session_history.section_id ' \ + last_accessed = "MAX(session_history.started)" + join = "LEFT OUTER JOIN session_history ON library_sections.section_id = session_history.section_id " \ monitor_db = database.MonitorDatabase() try: if str(section_id).isdigit(): - where = 'library_sections.section_id = ?' + where = "library_sections.section_id = ?" args = [section_id] else: raise Exception('Missing section_id') - query = 'SELECT library_sections.id AS row_id, server_id, library_sections.section_id, ' \ - 'section_name, section_type, ' \ - 'count, parent_count, child_count, ' \ - 'library_sections.thumb AS library_thumb, custom_thumb_url AS custom_thumb, ' \ - 'library_sections.art AS library_art, ' \ - 'custom_art_url AS custom_art, is_active, ' \ - 'do_notify, do_notify_created, keep_history, deleted_section, %s AS last_accessed ' \ - 'FROM library_sections %s ' \ - 'WHERE %s AND server_id = ? ' % (last_accessed, join, where) + query = "SELECT library_sections.id AS row_id, server_id, library_sections.section_id, " \ + "section_name, section_type, " \ + "count, parent_count, child_count, " \ + "library_sections.thumb AS library_thumb, custom_thumb_url AS custom_thumb, " \ + "library_sections.art AS library_art, " \ + "custom_art_url AS custom_art, is_active, " \ + "do_notify, do_notify_created, keep_history, deleted_section, %s AS last_accessed " \ + "FROM library_sections %s " \ + "WHERE %s AND server_id = ? " % (last_accessed, join, where) result = monitor_db.select(query, args=args + [server_id]) except Exception as e: logger.warn("Tautulli Libraries :: Unable to execute database query for get_library_details: %s." % e) @@ -924,24 +924,24 @@ class Libraries(object): try: if days > 0: if str(section_id).isdigit(): - query = 'SELECT (SUM(stopped - started) - ' \ - 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, ' \ - 'COUNT(DISTINCT %s) AS total_plays ' \ - 'FROM session_history ' \ - 'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \ - 'WHERE stopped >= %s ' \ - 'AND section_id = ?' % (group_by, timestamp_query) + query = "SELECT (SUM(stopped - started) - " \ + "SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, " \ + "COUNT(DISTINCT %s) AS total_plays " \ + "FROM session_history " \ + "JOIN session_history_metadata ON session_history_metadata.id = session_history.id " \ + "WHERE stopped >= %s " \ + "AND section_id = ?" % (group_by, timestamp_query) result = monitor_db.select(query, args=[section_id]) else: result = [] else: if str(section_id).isdigit(): - query = 'SELECT (SUM(stopped - started) - ' \ - 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, ' \ - 'COUNT(DISTINCT %s) AS total_plays ' \ - 'FROM session_history ' \ - 'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \ - 'WHERE section_id = ?' % group_by + query = "SELECT (SUM(stopped - started) - " \ + "SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, " \ + "COUNT(DISTINCT %s) AS total_plays " \ + "FROM session_history " \ + "JOIN session_history_metadata ON session_history_metadata.id = session_history.id " \ + "WHERE section_id = ?" % group_by result = monitor_db.select(query, args=[section_id]) else: result = [] @@ -981,17 +981,17 @@ class Libraries(object): try: if str(section_id).isdigit(): - query = 'SELECT (CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" ' \ - 'THEN users.username ELSE users.friendly_name END) AS friendly_name, ' \ - 'users.user_id, users.username, users.thumb, users.custom_avatar_url AS custom_thumb, ' \ - 'COUNT(DISTINCT %s) AS total_plays, (SUM(stopped - started) - ' \ - 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time ' \ - 'FROM session_history ' \ - 'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \ - 'JOIN users ON users.user_id = session_history.user_id ' \ - 'WHERE section_id = ? ' \ - 'GROUP BY users.user_id ' \ - 'ORDER BY total_plays DESC, total_time DESC' % group_by + query = "SELECT (CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = '' " \ + "THEN users.username ELSE users.friendly_name END) AS friendly_name, " \ + "users.user_id, users.username, users.thumb, users.custom_avatar_url AS custom_thumb, " \ + "COUNT(DISTINCT %s) AS total_plays, (SUM(stopped - started) - " \ + "SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time " \ + "FROM session_history " \ + "JOIN session_history_metadata ON session_history_metadata.id = session_history.id " \ + "JOIN users ON users.user_id = session_history.user_id " \ + "WHERE section_id = ? " \ + "GROUP BY users.user_id " \ + "ORDER BY total_plays DESC, total_time DESC" % group_by result = monitor_db.select(query, args=[section_id]) else: result = [] @@ -1030,16 +1030,16 @@ class Libraries(object): try: if str(section_id).isdigit(): - query = 'SELECT session_history.id, session_history.media_type, guid, ' \ - 'session_history.rating_key, session_history.parent_rating_key, session_history.grandparent_rating_key, ' \ - 'title, parent_title, grandparent_title, original_title, ' \ - 'thumb, parent_thumb, grandparent_thumb, media_index, parent_media_index, ' \ - 'year, originally_available_at, added_at, live, started, user, content_rating, labels, section_id ' \ - 'FROM session_history_metadata ' \ - 'JOIN session_history ON session_history_metadata.id = session_history.id ' \ - 'WHERE section_id = ? ' \ - 'GROUP BY session_history.rating_key ' \ - 'ORDER BY MAX(started) DESC LIMIT ?' + query = "SELECT session_history.id, session_history.media_type, guid, " \ + "session_history.rating_key, session_history.parent_rating_key, session_history.grandparent_rating_key, " \ + "title, parent_title, grandparent_title, original_title, " \ + "thumb, parent_thumb, grandparent_thumb, media_index, parent_media_index, " \ + "year, originally_available_at, added_at, live, started, user, content_rating, labels, section_id " \ + "FROM session_history_metadata " \ + "JOIN session_history ON session_history_metadata.id = session_history.id " \ + "WHERE section_id = ? " \ + "GROUP BY session_history.rating_key " \ + "ORDER BY MAX(started) DESC LIMIT ?" result = monitor_db.select(query, args=[section_id, limit]) else: result = [] @@ -1085,8 +1085,8 @@ class Libraries(object): monitor_db = database.MonitorDatabase() try: - query = 'SELECT section_id, section_name, section_type, agent ' \ - 'FROM library_sections WHERE deleted_section = 0' + query = "SELECT section_id, section_name, section_type, agent " \ + "FROM library_sections WHERE deleted_section = 0" result = monitor_db.select(query=query) except Exception as e: logger.warn("Tautulli Libraries :: Unable to execute database query for get_sections: %s." % e) @@ -1110,8 +1110,8 @@ class Libraries(object): row_ids = list(map(helpers.cast_to_int, row_ids.split(','))) # Get the section_ids corresponding to the row_ids - result = monitor_db.select('SELECT server_id, section_id FROM library_sections ' - 'WHERE id IN ({})'.format(','.join(['?'] * len(row_ids))), row_ids) + result = monitor_db.select("SELECT server_id, section_id FROM library_sections " + "WHERE id IN ({})".format(",".join(["?"] * len(row_ids))), row_ids) success = [] for library in result: @@ -1135,9 +1135,9 @@ class Libraries(object): logger.info("Tautulli Libraries :: Deleting library with server_id %s and section_id %s from database." % (server_id, section_id)) try: - monitor_db.action('UPDATE library_sections ' - 'SET deleted_section = 1, keep_history = 0, do_notify = 0, do_notify_created = 0 ' - 'WHERE server_id = ? AND section_id = ?', [server_id, section_id]) + monitor_db.action("UPDATE library_sections " + "SET deleted_section = 1, keep_history = 0, do_notify = 0, do_notify_created = 0 " + "WHERE server_id = ? AND section_id = ?", [server_id, section_id]) return delete_success except Exception as e: logger.warn("Tautulli Libraries :: Unable to execute database query for delete: %s." % e) @@ -1150,26 +1150,26 @@ class Libraries(object): try: if section_id and section_id.isdigit(): - query = 'SELECT * FROM library_sections WHERE section_id = ?' + query = "SELECT * FROM library_sections WHERE section_id = ?" result = monitor_db.select(query=query, args=[section_id]) if result: logger.info("Tautulli Libraries :: Re-adding library with id %s to database." % section_id) - monitor_db.action('UPDATE library_sections ' - 'SET deleted_section = 0, keep_history = 1, do_notify = 1, do_notify_created = 1 ' - 'WHERE section_id = ?', + monitor_db.action("UPDATE library_sections " + "SET deleted_section = 0, keep_history = 1, do_notify = 1, do_notify_created = 1 " + "WHERE section_id = ?", [section_id]) return True else: return False elif section_name: - query = 'SELECT * FROM library_sections WHERE section_name = ?' + query = "SELECT * FROM library_sections WHERE section_name = ?" result = monitor_db.select(query=query, args=[section_name]) if result: logger.info("Tautulli Libraries :: Re-adding library with name %s to database." % section_name) - monitor_db.action('UPDATE library_sections ' - 'SET deleted_section = 0, keep_history = 1, do_notify = 1, do_notify_created = 1 ' - 'WHERE section_name = ?', + monitor_db.action("UPDATE library_sections " + "SET deleted_section = 0, keep_history = 1, do_notify = 1, do_notify_created = 1 " + "WHERE section_name = ?", [section_name]) return True else: @@ -1203,7 +1203,7 @@ class Libraries(object): try: logger.debug("Tautulli Libraries :: Deleting libraries where server_id does not match %s." % server_id) - monitor_db.action('DELETE FROM library_sections WHERE server_id != ?', [server_id]) + monitor_db.action("DELETE FROM library_sections WHERE server_id != ?", [server_id]) return 'Deleted duplicate libraries from the database.' except Exception as e: diff --git a/plexpy/mobile_app.py b/plexpy/mobile_app.py index 458f84a0..57734975 100644 --- a/plexpy/mobile_app.py +++ b/plexpy/mobile_app.py @@ -67,17 +67,17 @@ def get_mobile_devices(device_id=None, device_token=None): args = [] if device_id or device_token: - where = 'WHERE ' + where = "WHERE " if device_id: - where_id += 'device_id = ?' + where_id += "device_id = ?" args.append(device_id) if device_token: - where_token = 'device_token = ?' + where_token = "device_token = ?" args.append(device_token) - where += ' AND '.join([w for w in [where_id, where_token] if w]) + where += " AND ".join([w for w in [where_id, where_token] if w]) db = database.MonitorDatabase() - result = db.select('SELECT * FROM mobile_devices %s' % where, args=args) + result = db.select("SELECT * FROM mobile_devices %s" % where, args=args) return result @@ -128,7 +128,7 @@ def get_mobile_device_config(mobile_device_id=None): return None db = database.MonitorDatabase() - result = db.select_single('SELECT * FROM mobile_devices WHERE id = ?', + result = db.select_single("SELECT * FROM mobile_devices WHERE id = ?", args=[mobile_device_id]) if result['onesignal_id'] == _ONESIGNAL_DISABLED: @@ -163,11 +163,11 @@ def delete_mobile_device(mobile_device_id=None, device_id=None): if mobile_device_id: logger.debug("Tautulli MobileApp :: Deleting mobile_device_id %s from the database." % mobile_device_id) - result = db.action('DELETE FROM mobile_devices WHERE id = ?', args=[mobile_device_id]) + result = db.action("DELETE FROM mobile_devices WHERE id = ?", args=[mobile_device_id]) return True elif device_id: logger.debug("Tautulli MobileApp :: Deleting device_id %s from the database." % device_id) - result = db.action('DELETE FROM mobile_devices WHERE device_id = ?', args=[device_id]) + result = db.action("DELETE FROM mobile_devices WHERE device_id = ?", args=[device_id]) return True else: return False @@ -179,9 +179,9 @@ def set_official(device_id, onesignal_id): platform = 'android' if official > 0 else None try: - result = db.action('UPDATE mobile_devices ' - 'SET official = ?, platform = coalesce(platform, ?) ' - 'WHERE device_id = ?', + result = db.action("UPDATE mobile_devices " + "SET official = ?, platform = coalesce(platform, ?) " + "WHERE device_id = ?", args=[official, platform, device_id]) except Exception as e: logger.warn("Tautulli MobileApp :: Failed to set official flag for device: %s." % e) @@ -193,7 +193,7 @@ def set_last_seen(device_token=None): last_seen = helpers.timestamp() try: - result = db.action('UPDATE mobile_devices SET last_seen = ? WHERE device_token = ?', + result = db.action("UPDATE mobile_devices SET last_seen = ? WHERE device_token = ?", args=[last_seen, device_token]) except Exception as e: logger.warn("Tautulli MobileApp :: Failed to set last_seen time for device: %s." % e) diff --git a/plexpy/newsletter_handler.py b/plexpy/newsletter_handler.py index 8458e144..471a5984 100644 --- a/plexpy/newsletter_handler.py +++ b/plexpy/newsletter_handler.py @@ -181,9 +181,9 @@ def set_notify_success(newsletter_log_id): def get_last_newsletter_email_msg_id(newsletter_id, notify_action): db = database.MonitorDatabase() - result = db.select_single('SELECT email_msg_id FROM newsletter_log ' - 'WHERE newsletter_id = ? AND notify_action = ? AND success = 1 ' - 'ORDER BY timestamp DESC LIMIT 1', [newsletter_id, notify_action]) + result = db.select_single("SELECT email_msg_id FROM newsletter_log " + "WHERE newsletter_id = ? AND notify_action = ? AND success = 1 " + "ORDER BY timestamp DESC LIMIT 1", [newsletter_id, notify_action]) if result: return result['email_msg_id'] @@ -193,13 +193,13 @@ def get_newsletter(newsletter_uuid=None, newsletter_id_name=None): db = database.MonitorDatabase() if newsletter_uuid: - result = db.select_single('SELECT start_date, end_date, uuid, filename FROM newsletter_log ' - 'WHERE uuid = ?', [newsletter_uuid]) + result = db.select_single("SELECT start_date, end_date, uuid, filename FROM newsletter_log " + "WHERE uuid = ?", [newsletter_uuid]) elif newsletter_id_name: - result = db.select_single('SELECT start_date, end_date, uuid, filename FROM newsletter_log ' - 'JOIN newsletters ON newsletters.id = newsletter_log.newsletter_id ' - 'WHERE id_name = ? AND notify_action != "test" ' - 'ORDER BY timestamp DESC LIMIT 1', [newsletter_id_name]) + result = db.select_single("SELECT start_date, end_date, uuid, filename FROM newsletter_log " + "JOIN newsletters ON newsletters.id = newsletter_log.newsletter_id " + "WHERE id_name = ? AND notify_action != 'test' " + "ORDER BY timestamp DESC LIMIT 1", [newsletter_id_name]) else: result = None diff --git a/plexpy/newsletters.py b/plexpy/newsletters.py index 59663fe2..94f73c8f 100644 --- a/plexpy/newsletters.py +++ b/plexpy/newsletters.py @@ -117,15 +117,15 @@ def get_newsletters(newsletter_id=None): args = [] if newsletter_id: - where = 'WHERE ' + where = "WHERE " if newsletter_id: - where_id += 'id = ?' + where_id += "id = ?" args.append(newsletter_id) - where += ' AND '.join([w for w in [where_id] if w]) + where += " AND ".join([w for w in [where_id] if w]) db = database.MonitorDatabase() - result = db.select('SELECT id, agent_id, agent_name, agent_label, ' - 'friendly_name, cron, active FROM newsletters %s' % where, args=args) + result = db.select("SELECT id, agent_id, agent_name, agent_label, " + "friendly_name, cron, active FROM newsletters %s" % where, args=args) return result @@ -136,7 +136,7 @@ def delete_newsletter(newsletter_id=None): if str(newsletter_id).isdigit(): logger.debug("Tautulli Newsletters :: Deleting newsletter_id %s from the database." % newsletter_id) - result = db.action('DELETE FROM newsletters WHERE id = ?', args=[newsletter_id]) + result = db.action("DELETE FROM newsletters WHERE id = ?", args=[newsletter_id]) return True else: return False @@ -151,7 +151,7 @@ def get_newsletter_config(newsletter_id=None, mask_passwords=False): return None db = database.MonitorDatabase() - result = db.select_single('SELECT * FROM newsletters WHERE id = ?', args=[newsletter_id]) + result = db.select_single("SELECT * FROM newsletters WHERE id = ?", args=[newsletter_id]) if not result: return None @@ -309,7 +309,7 @@ def send_newsletter(newsletter_id=None, subject=None, body=None, message=None, n def blacklist_logger(): db = database.MonitorDatabase() - notifiers = db.select('SELECT newsletter_config, email_config FROM newsletters') + notifiers = db.select("SELECT newsletter_config, email_config FROM newsletters") for n in notifiers: config = json.loads(n['newsletter_config'] or '{}') @@ -346,7 +346,7 @@ def generate_newsletter_uuid(): while not uuid or uuid_exists: uuid = plexpy.generate_uuid()[:8] result = db.select_single( - 'SELECT EXISTS(SELECT uuid FROM newsletter_log WHERE uuid = ?) as uuid_exists', [uuid]) + "SELECT EXISTS(SELECT uuid FROM newsletter_log WHERE uuid = ?) as uuid_exists", [uuid]) uuid_exists = result['uuid_exists'] return uuid diff --git a/plexpy/notification_handler.py b/plexpy/notification_handler.py index 2171d2bd..7dd81627 100644 --- a/plexpy/notification_handler.py +++ b/plexpy/notification_handler.py @@ -443,12 +443,12 @@ def notify(notifier_id=None, notify_action=None, stream_data=None, timeline_data def get_notify_state(session): monitor_db = database.MonitorDatabase() - result = monitor_db.select('SELECT timestamp, notify_action, notifier_id ' - 'FROM notify_log ' - 'WHERE session_key = ? ' - 'AND rating_key = ? ' - 'AND user_id = ? ' - 'ORDER BY id DESC', + result = monitor_db.select("SELECT timestamp, notify_action, notifier_id " + "FROM notify_log " + "WHERE session_key = ? " + "AND rating_key = ? " + "AND user_id = ? " + "ORDER BY id DESC", args=[session['session_key'], session['rating_key'], session['user_id']]) notify_states = [] for item in result: @@ -467,16 +467,16 @@ def get_notify_state_enabled(session, notify_action, notified=True): timestamp_where = 'AND timestamp IS NULL' monitor_db = database.MonitorDatabase() - result = monitor_db.select('SELECT id AS notifier_id, timestamp ' - 'FROM notifiers ' - 'LEFT OUTER JOIN (' - 'SELECT timestamp, notifier_id ' - 'FROM notify_log ' - 'WHERE session_key = ? ' - 'AND rating_key = ? ' - 'AND user_id = ? ' - 'AND notify_action = ?) AS t ON notifiers.id = t.notifier_id ' - 'WHERE %s = 1 %s' % (notify_action, timestamp_where), + result = monitor_db.select("SELECT id AS notifier_id, timestamp " + "FROM notifiers " + "LEFT OUTER JOIN (" + "SELECT timestamp, notifier_id " + "FROM notify_log " + "WHERE session_key = ? " + "AND rating_key = ? " + "AND user_id = ? " + "AND notify_action = ?) AS t ON notifiers.id = t.notifier_id " + "WHERE %s = 1 %s" % (notify_action, timestamp_where), args=[session['session_key'], session['rating_key'], session['user_id'], notify_action]) return result @@ -528,8 +528,8 @@ def set_notify_success(notification_id): def check_nofity_tag(notify_action, tag): monitor_db = database.MonitorDatabase() - result = monitor_db.select_single('SELECT * FROM notify_log ' - 'WHERE notify_action = ? AND tag = ?', + result = monitor_db.select_single("SELECT * FROM notify_log " + "WHERE notify_action = ? AND tag = ?", [notify_action, tag]) return bool(result) @@ -1631,7 +1631,7 @@ def set_hash_image_info(img=None, rating_key=None, width=750, height=1000, def get_hash_image_info(img_hash=None): db = database.MonitorDatabase() - query = 'SELECT * FROM image_hash_lookup WHERE img_hash = ?' + query = "SELECT * FROM image_hash_lookup WHERE img_hash = ?" result = db.select_single(query, args=[img_hash]) return result @@ -1640,8 +1640,8 @@ def lookup_tvmaze_by_id(rating_key=None, thetvdb_id=None, imdb_id=None, title=No db = database.MonitorDatabase() try: - query = 'SELECT imdb_id, tvmaze_id, tvmaze_url FROM tvmaze_lookup ' \ - 'WHERE rating_key = ?' + query = "SELECT imdb_id, tvmaze_id, tvmaze_url FROM tvmaze_lookup " \ + "WHERE rating_key = ?" tvmaze_info = db.select_single(query, args=[rating_key]) except Exception as e: logger.warn("Tautulli NotificationHandler :: Unable to execute database query for lookup_tvmaze_by_tvdb_id: %s." % e) @@ -1700,8 +1700,8 @@ def lookup_themoviedb_by_id(rating_key=None, thetvdb_id=None, imdb_id=None, titl db = database.MonitorDatabase() try: - query = 'SELECT thetvdb_id, imdb_id, themoviedb_id, themoviedb_url FROM themoviedb_lookup ' \ - 'WHERE rating_key = ?' + query = "SELECT thetvdb_id, imdb_id, themoviedb_id, themoviedb_url FROM themoviedb_lookup " \ + "WHERE rating_key = ?" themoviedb_info = db.select_single(query, args=[rating_key]) except Exception as e: logger.warn("Tautulli NotificationHandler :: Unable to execute database query for lookup_themoviedb_by_imdb_id: %s." % e) @@ -1778,8 +1778,8 @@ def get_themoviedb_info(rating_key=None, media_type=None, themoviedb_id=None): db = database.MonitorDatabase() try: - query = 'SELECT themoviedb_json FROM themoviedb_lookup ' \ - 'WHERE rating_key = ?' + query = "SELECT themoviedb_json FROM themoviedb_lookup " \ + "WHERE rating_key = ?" result = db.select_single(query, args=[rating_key]) except Exception as e: logger.warn("Tautulli NotificationHandler :: Unable to execute database query for get_themoviedb_info: %s." % e) @@ -1829,8 +1829,8 @@ def lookup_musicbrainz_info(musicbrainz_type=None, rating_key=None, artist=None, db = database.MonitorDatabase() try: - query = 'SELECT musicbrainz_id, musicbrainz_url, musicbrainz_type FROM musicbrainz_lookup ' \ - 'WHERE rating_key = ?' + query = "SELECT musicbrainz_id, musicbrainz_url, musicbrainz_type FROM musicbrainz_lookup " \ + "WHERE rating_key = ?" musicbrainz_info = db.select_single(query, args=[rating_key]) except Exception as e: logger.warn("Tautulli NotificationHandler :: Unable to execute database query for lookup_musicbrainz: %s." % e) diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py index c7580603..a2fa6341 100644 --- a/plexpy/notifiers.py +++ b/plexpy/notifiers.py @@ -507,7 +507,7 @@ def get_notifiers(notifier_id=None, notify_action=None): where += ' AND '.join([w for w in [where_id, where_action] if w]) db = database.MonitorDatabase() - result = db.select('SELECT id, agent_id, agent_name, agent_label, friendly_name, %s FROM notifiers %s' + result = db.select("SELECT id, agent_id, agent_name, agent_label, friendly_name, %s FROM notifiers %s" % (', '.join(notify_actions), where), args=args) for item in result: @@ -522,7 +522,7 @@ def delete_notifier(notifier_id=None): if str(notifier_id).isdigit(): logger.debug("Tautulli Notifiers :: Deleting notifier_id %s from the database." % notifier_id) - result = db.action('DELETE FROM notifiers WHERE id = ?', args=[notifier_id]) + result = db.action("DELETE FROM notifiers WHERE id = ?", args=[notifier_id]) return True else: return False @@ -537,7 +537,7 @@ def get_notifier_config(notifier_id=None, mask_passwords=False): return None db = database.MonitorDatabase() - result = db.select_single('SELECT * FROM notifiers WHERE id = ?', args=[notifier_id]) + result = db.select_single("SELECT * FROM notifiers WHERE id = ?", args=[notifier_id]) if not result: return None @@ -3866,8 +3866,8 @@ class TAUTULLIREMOTEAPP(Notifier): db = database.MonitorDatabase() try: - query = 'SELECT * FROM mobile_devices WHERE official = 1 ' \ - 'AND onesignal_id IS NOT NULL AND onesignal_id != ""' + query = "SELECT * FROM mobile_devices WHERE official = 1 " \ + "AND onesignal_id IS NOT NULL AND onesignal_id != ''" return db.select(query=query) except Exception as e: logger.warn("Tautulli Notifiers :: Unable to retrieve Tautulli Remote app devices list: %s." % e) @@ -4472,8 +4472,8 @@ def check_browser_enabled(): def get_browser_notifications(): db = database.MonitorDatabase() - result = db.select('SELECT notifier_id, subject_text, body_text FROM notify_log ' - 'WHERE agent_id = 17 AND timestamp >= ? ', + result = db.select("SELECT notifier_id, subject_text, body_text FROM notify_log " + "WHERE agent_id = 17 AND timestamp >= ? ", args=[time.time() - 5]) notifications = [] diff --git a/plexpy/plexivity_import.py b/plexpy/plexivity_import.py index 3e5350bc..644782a2 100644 --- a/plexpy/plexivity_import.py +++ b/plexpy/plexivity_import.py @@ -304,27 +304,27 @@ def import_from_plexivity(database_file=None, table_name=None, import_ignore_int logger.debug("Tautulli Importer :: Unable to refresh the users list. Aborting import.") return None - query = 'SELECT id AS id, ' \ - 'time AS started, ' \ - 'stopped, ' \ - 'null AS user_id, ' \ - 'user, ' \ - 'ip_address, ' \ - 'paused_counter, ' \ - 'platform AS player, ' \ - 'null AS platform, ' \ - 'null as machine_id, ' \ - 'null AS media_type, ' \ - 'null AS view_offset, ' \ - 'xml, ' \ - 'rating as content_rating,' \ - 'summary,' \ - 'title AS full_title,' \ - '(case when orig_title_ep = "n/a" then orig_title else ' \ - 'orig_title_ep end) as title,' \ - '(case when orig_title_ep != "n/a" then orig_title else ' \ - 'null end) as grandparent_title ' \ - 'FROM ' + table_name + ' ORDER BY id' + query = "SELECT id AS id, " \ + "time AS started, " \ + "stopped, " \ + "null AS user_id, " \ + "user, " \ + "ip_address, " \ + "paused_counter, " \ + "platform AS player, " \ + "null AS platform, " \ + "null as machine_id, " \ + "null AS media_type, " \ + "null AS view_offset, " \ + "xml, " \ + "rating as content_rating," \ + "summary," \ + "title AS full_title," \ + "(case when orig_title_ep = 'n/a' then orig_title else " \ + "orig_title_ep end) as title," \ + "(case when orig_title_ep != 'n/a' then orig_title else " \ + "null end) as grandparent_title " \ + "FROM " + table_name + " ORDER BY id" result = connection.execute(query) @@ -456,9 +456,9 @@ def import_users(): logger.debug("Tautulli Importer :: Importing Plexivity Users...") monitor_db = database.MonitorDatabase() - query = 'INSERT OR IGNORE INTO users (user_id, username) ' \ - 'SELECT user_id, user ' \ - 'FROM session_history WHERE user_id != 1 GROUP BY user_id' + query = "INSERT OR IGNORE INTO users (user_id, username) " \ + "SELECT user_id, user " \ + "FROM session_history WHERE user_id != 1 GROUP BY user_id" try: monitor_db.action(query) diff --git a/plexpy/plexwatch_import.py b/plexpy/plexwatch_import.py index ac0fd7b0..4d8ec80b 100644 --- a/plexpy/plexwatch_import.py +++ b/plexpy/plexwatch_import.py @@ -295,29 +295,29 @@ def import_from_plexwatch(database_file=None, table_name=None, import_ignore_int logger.debug("Tautulli Importer :: Unable to refresh the users list. Aborting import.") return None - query = 'SELECT time AS started, ' \ - 'stopped, ' \ - 'cast(ratingKey as text) AS rating_key, ' \ - 'null AS user_id, ' \ - 'user, ' \ - 'ip_address, ' \ - 'paused_counter, ' \ - 'platform AS player, ' \ - 'null AS platform, ' \ - 'null as machine_id, ' \ - 'parentRatingKey as parent_rating_key, ' \ - 'grandparentRatingKey as grandparent_rating_key, ' \ - 'null AS media_type, ' \ - 'null AS view_offset, ' \ - 'xml, ' \ - 'rating as content_rating,' \ - 'summary,' \ - 'title AS full_title,' \ - '(case when orig_title_ep = "" then orig_title else ' \ - 'orig_title_ep end) as title,' \ - '(case when orig_title_ep != "" then orig_title else ' \ - 'null end) as grandparent_title ' \ - 'FROM ' + table_name + ' ORDER BY id' + query = "SELECT time AS started, " \ + "stopped, " \ + "cast(ratingKey as text) AS rating_key, " \ + "null AS user_id, " \ + "user, " \ + "ip_address, " \ + "paused_counter, " \ + "platform AS player, " \ + "null AS platform, " \ + "null as machine_id, " \ + "parentRatingKey as parent_rating_key, " \ + "grandparentRatingKey as grandparent_rating_key, " \ + "null AS media_type, " \ + "null AS view_offset, " \ + "xml, " \ + "rating as content_rating," \ + "summary," \ + "title AS full_title," \ + "(case when orig_title_ep = '' then orig_title else " \ + "orig_title_ep end) as title," \ + "(case when orig_title_ep != '' then orig_title else " \ + "null end) as grandparent_title " \ + "FROM " + table_name + " ORDER BY id" result = connection.execute(query) @@ -450,9 +450,9 @@ def import_users(): logger.debug("Tautulli Importer :: Importing PlexWatch Users...") monitor_db = database.MonitorDatabase() - query = 'INSERT OR IGNORE INTO users (user_id, username) ' \ - 'SELECT user_id, user ' \ - 'FROM session_history WHERE user_id != 1 GROUP BY user_id' + query = "INSERT OR IGNORE INTO users (user_id, username) " \ + "SELECT user_id, user " \ + "FROM session_history WHERE user_id != 1 GROUP BY user_id" try: monitor_db.action(query) diff --git a/plexpy/users.py b/plexpy/users.py index 5ec093ff..0e201791 100644 --- a/plexpy/users.py +++ b/plexpy/users.py @@ -75,8 +75,8 @@ def refresh_users(): # Check if we've set a custom avatar if so don't overwrite it. if keys_dict['user_id']: - avatar_urls = monitor_db.select('SELECT thumb, custom_avatar_url ' - 'FROM users WHERE user_id = ?', + avatar_urls = monitor_db.select("SELECT thumb, custom_avatar_url " + "FROM users WHERE user_id = ?", [keys_dict['user_id']]) if avatar_urls: if not avatar_urls[0]['custom_avatar_url'] or \ @@ -98,7 +98,7 @@ def refresh_users(): if result == 'insert': new_users.append(item['username']) - query = 'UPDATE users SET is_active = 0 WHERE user_id NOT IN ({})'.format(', '.join(['?'] * len(user_ids))) + query = "UPDATE users SET is_active = 0 WHERE user_id NOT IN ({})".format(", ".join(["?"] * len(user_ids))) monitor_db.action(query=query, args=user_ids) # Add new users to loger username filter @@ -137,43 +137,43 @@ class Users(object): group_by = 'session_history.reference_id' if grouping else 'session_history.id' - columns = ['users.id AS row_id', - 'users.user_id', - 'users.username', - '(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \ - THEN users.username ELSE users.friendly_name END) AS friendly_name', - 'users.title', - 'users.email', - 'users.thumb AS user_thumb', - 'users.custom_avatar_url AS custom_thumb', - 'COUNT(DISTINCT %s) AS plays' % group_by, - 'SUM(CASE WHEN session_history.stopped > 0 THEN (session_history.stopped - session_history.started) \ + columns = ["users.id AS row_id", + "users.user_id", + "users.username", + "(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = '' \ + THEN users.username ELSE users.friendly_name END) AS friendly_name", + "users.title", + "users.email", + "users.thumb AS user_thumb", + "users.custom_avatar_url AS custom_thumb", + "COUNT(DISTINCT %s) AS plays" % group_by, + "SUM(CASE WHEN session_history.stopped > 0 THEN (session_history.stopped - session_history.started) \ ELSE 0 END) - SUM(CASE WHEN session_history.paused_counter IS NULL THEN 0 ELSE \ - session_history.paused_counter END) AS duration', - 'MAX(session_history.started) AS last_seen', - 'MAX(session_history.id) AS history_row_id', - 'session_history_metadata.full_title AS last_played', - 'session_history.ip_address', - 'session_history.platform', - 'session_history.player', - 'session_history.rating_key', - 'session_history_metadata.media_type', - 'session_history_metadata.thumb', - 'session_history_metadata.parent_thumb', - 'session_history_metadata.grandparent_thumb', - 'session_history_metadata.parent_title', - 'session_history_metadata.year', - 'session_history_metadata.media_index', - 'session_history_metadata.parent_media_index', - 'session_history_metadata.live', - 'session_history_metadata.added_at', - 'session_history_metadata.originally_available_at', - 'session_history_metadata.guid', - 'session_history_media_info.transcode_decision', - 'users.do_notify AS do_notify', - 'users.keep_history AS keep_history', - 'users.allow_guest AS allow_guest', - 'users.is_active AS is_active' + session_history.paused_counter END) AS duration", + "MAX(session_history.started) AS last_seen", + "MAX(session_history.id) AS history_row_id", + "session_history_metadata.full_title AS last_played", + "session_history.ip_address", + "session_history.platform", + "session_history.player", + "session_history.rating_key", + "session_history_metadata.media_type", + "session_history_metadata.thumb", + "session_history_metadata.parent_thumb", + "session_history_metadata.grandparent_thumb", + "session_history_metadata.parent_title", + "session_history_metadata.year", + "session_history_metadata.media_index", + "session_history_metadata.parent_media_index", + "session_history_metadata.live", + "session_history_metadata.added_at", + "session_history_metadata.originally_available_at", + "session_history_metadata.guid", + "session_history_media_info.transcode_decision", + "users.do_notify AS do_notify", + "users.keep_history AS keep_history", + "users.allow_guest AS allow_guest", + "users.is_active AS is_active" ] try: query = data_tables.ssp_query(table_name='users', @@ -270,32 +270,32 @@ class Users(object): custom_where = ['users.user_id', user_id] - columns = ['session_history.id AS history_row_id', - 'MIN(session_history.started) AS first_seen', - 'MAX(session_history.started) AS last_seen', - 'session_history.ip_address', - 'COUNT(session_history.id) AS play_count', - 'session_history.platform', - 'session_history.player', - 'session_history.rating_key', - 'session_history_metadata.full_title AS last_played', - 'session_history_metadata.thumb', - 'session_history_metadata.parent_thumb', - 'session_history_metadata.grandparent_thumb', - 'session_history_metadata.media_type', - 'session_history_metadata.parent_title', - 'session_history_metadata.year', - 'session_history_metadata.media_index', - 'session_history_metadata.parent_media_index', - 'session_history_metadata.live', - 'session_history_metadata.added_at', - 'session_history_metadata.originally_available_at', - 'session_history_metadata.guid', - 'session_history_media_info.transcode_decision', - 'session_history.user', - 'session_history.user_id as custom_user_id', - '(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \ - THEN users.username ELSE users.friendly_name END) AS friendly_name' + columns = ["session_history.id AS history_row_id", + "MIN(session_history.started) AS first_seen", + "MAX(session_history.started) AS last_seen", + "session_history.ip_address", + "COUNT(session_history.id) AS play_count", + "session_history.platform", + "session_history.player", + "session_history.rating_key", + "session_history_metadata.full_title AS last_played", + "session_history_metadata.thumb", + "session_history_metadata.parent_thumb", + "session_history_metadata.grandparent_thumb", + "session_history_metadata.media_type", + "session_history_metadata.parent_title", + "session_history_metadata.year", + "session_history_metadata.media_index", + "session_history_metadata.parent_media_index", + "session_history_metadata.live", + "session_history_metadata.added_at", + "session_history_metadata.originally_available_at", + "session_history_metadata.guid", + "session_history_media_info.transcode_decision", + "session_history.user", + "session_history.user_id as custom_user_id", + "(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = '' \ + THEN users.username ELSE users.friendly_name END) AS friendly_name" ] try: @@ -436,31 +436,31 @@ class Users(object): last_seen = 'NULL' join = '' if include_last_seen: - last_seen = 'MAX(session_history.started)' - join = 'LEFT OUTER JOIN session_history ON users.user_id = session_history.user_id' + last_seen = "MAX(session_history.started)" + join = "LEFT OUTER JOIN session_history ON users.user_id = session_history.user_id" monitor_db = database.MonitorDatabase() try: if str(user_id).isdigit(): - where = 'users.user_id = ?' + where = "users.user_id = ?" args = [user_id] elif user: - where = 'users.username = ?' + where = "users.username = ?" args = [user] elif email: - where = 'users.email = ?' + where = "users.email = ?" args = [email] else: - raise Exception('Missing user_id, username, or email') + raise Exception("Missing user_id, username, or email") - query = 'SELECT users.id AS row_id, users.user_id, username, friendly_name, ' \ - 'thumb AS user_thumb, custom_avatar_url AS custom_thumb, ' \ - 'email, is_active, is_admin, is_home_user, is_allow_sync, is_restricted, ' \ - 'do_notify, keep_history, deleted_user, ' \ - 'allow_guest, shared_libraries, %s AS last_seen ' \ - 'FROM users %s ' \ - 'WHERE %s COLLATE NOCASE' % (last_seen, join, where) + query = "SELECT users.id AS row_id, users.user_id, username, friendly_name, " \ + "thumb AS user_thumb, custom_avatar_url AS custom_thumb, " \ + "email, is_active, is_admin, is_home_user, is_allow_sync, is_restricted, " \ + "do_notify, keep_history, deleted_user, " \ + "allow_guest, shared_libraries, %s AS last_seen " \ + "FROM users %s " \ + "WHERE %s COLLATE NOCASE" % (last_seen, join, where) result = monitor_db.select(query, args=args) except Exception as e: logger.warn("Tautulli Users :: Unable to execute database query for get_user_details: %s." % e) @@ -531,22 +531,22 @@ class Users(object): try: if days > 0: if str(user_id).isdigit(): - query = 'SELECT (SUM(stopped - started) - ' \ - ' SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, ' \ - 'COUNT(DISTINCT %s) AS total_plays ' \ - 'FROM session_history ' \ - 'WHERE stopped >= %s ' \ - 'AND user_id = ? ' % (group_by, timestamp_query) + query = "SELECT (SUM(stopped - started) - " \ + " SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, " \ + "COUNT(DISTINCT %s) AS total_plays " \ + "FROM session_history " \ + "WHERE stopped >= %s " \ + "AND user_id = ? " % (group_by, timestamp_query) result = monitor_db.select(query, args=[user_id]) else: result = [] else: if str(user_id).isdigit(): - query = 'SELECT (SUM(stopped - started) - ' \ - ' SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, ' \ - 'COUNT(DISTINCT %s) AS total_plays ' \ - 'FROM session_history ' \ - 'WHERE user_id = ? ' % group_by + query = "SELECT (SUM(stopped - started) - " \ + " SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, " \ + "COUNT(DISTINCT %s) AS total_plays " \ + "FROM session_history " \ + "WHERE user_id = ? " % group_by result = monitor_db.select(query, args=[user_id]) else: result = [] @@ -587,13 +587,13 @@ class Users(object): try: if str(user_id).isdigit(): - query = 'SELECT player, COUNT(DISTINCT %s) as total_plays, (SUM(stopped - started) - ' \ - 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, ' \ - 'platform ' \ - 'FROM session_history ' \ - 'WHERE user_id = ? ' \ - 'GROUP BY player ' \ - 'ORDER BY total_plays DESC, total_time DESC' % group_by + query = "SELECT player, COUNT(DISTINCT %s) as total_plays, (SUM(stopped - started) - " \ + "SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, " \ + "platform " \ + "FROM session_history " \ + "WHERE user_id = ? " \ + "GROUP BY player " \ + "ORDER BY total_plays DESC, total_time DESC" % group_by result = monitor_db.select(query, args=[user_id]) else: result = [] @@ -630,17 +630,17 @@ class Users(object): try: if str(user_id).isdigit(): - query = 'SELECT session_history.id, session_history.media_type, guid, ' \ - 'session_history.rating_key, session_history.parent_rating_key, session_history.grandparent_rating_key, ' \ - 'title, parent_title, grandparent_title, original_title, ' \ - 'thumb, parent_thumb, grandparent_thumb, media_index, parent_media_index, ' \ - 'year, originally_available_at, added_at, live, started, user ' \ - 'FROM session_history_metadata ' \ - 'JOIN session_history ON session_history_metadata.id = session_history.id ' \ - 'WHERE user_id = ? ' \ - 'GROUP BY (CASE WHEN session_history.media_type = "track" THEN session_history.parent_rating_key ' \ - ' ELSE session_history.rating_key END) ' \ - 'ORDER BY MAX(started) DESC LIMIT ?' + query = "SELECT session_history.id, session_history.media_type, guid, " \ + "session_history.rating_key, session_history.parent_rating_key, session_history.grandparent_rating_key, " \ + "title, parent_title, grandparent_title, original_title, " \ + "thumb, parent_thumb, grandparent_thumb, media_index, parent_media_index, " \ + "year, originally_available_at, added_at, live, started, user " \ + "FROM session_history_metadata " \ + "JOIN session_history ON session_history_metadata.id = session_history.id " \ + "WHERE user_id = ? " \ + "GROUP BY (CASE WHEN session_history.media_type = 'track' THEN session_history.parent_rating_key " \ + " ELSE session_history.rating_key END) " \ + "ORDER BY MAX(started) DESC LIMIT ?" result = monitor_db.select(query, args=[user_id, limit]) else: result = [] @@ -683,11 +683,11 @@ class Users(object): monitor_db = database.MonitorDatabase() try: - query = 'SELECT id AS row_id, user_id, username, friendly_name, thumb, custom_avatar_url, email, ' \ - 'is_active, is_admin, is_home_user, is_allow_sync, is_restricted, ' \ - 'do_notify, keep_history, allow_guest, shared_libraries, ' \ - 'filter_all, filter_movies, filter_tv, filter_music, filter_photos ' \ - 'FROM users WHERE deleted_user = 0' + query = "SELECT id AS row_id, user_id, username, friendly_name, thumb, custom_avatar_url, email, " \ + "is_active, is_admin, is_home_user, is_allow_sync, is_restricted, " \ + "do_notify, keep_history, allow_guest, shared_libraries, " \ + "filter_all, filter_movies, filter_tv, filter_music, filter_photos " \ + "FROM users WHERE deleted_user = 0" result = monitor_db.select(query=query) except Exception as e: logger.warn("Tautulli Users :: Unable to execute database query for get_users: %s." % e) @@ -729,8 +729,8 @@ class Users(object): row_ids = list(map(helpers.cast_to_int, row_ids.split(','))) # Get the user_ids corresponding to the row_ids - result = monitor_db.select('SELECT user_id FROM users ' - 'WHERE id IN ({})'.format(','.join(['?'] * len(row_ids))), row_ids) + result = monitor_db.select("SELECT user_id FROM users " + "WHERE id IN ({})".format(",".join(["?"] * len(row_ids))), row_ids) success = [] for user in result: @@ -747,9 +747,9 @@ class Users(object): logger.info("Tautulli Users :: Deleting user with user_id %s from database." % user_id) try: - monitor_db.action('UPDATE users ' - 'SET deleted_user = 1, keep_history = 0, do_notify = 0 ' - 'WHERE user_id = ?', [user_id]) + monitor_db.action("UPDATE users " + "SET deleted_user = 1, keep_history = 0, do_notify = 0 " + "WHERE user_id = ?", [user_id]) return delete_success except Exception as e: logger.warn("Tautulli Users :: Unable to execute database query for delete: %s." % e) @@ -762,25 +762,25 @@ class Users(object): try: if user_id and str(user_id).isdigit(): - query = 'SELECT * FROM users WHERE user_id = ?' + query = "SELECT * FROM users WHERE user_id = ?" result = monitor_db.select(query=query, args=[user_id]) if result: logger.info("Tautulli Users :: Re-adding user with id %s to database." % user_id) - monitor_db.action('UPDATE users ' - 'SET deleted_user = 0, keep_history = 1, do_notify = 1 ' - 'WHERE user_id = ?', [user_id]) + monitor_db.action("UPDATE users " + "SET deleted_user = 0, keep_history = 1, do_notify = 1 " + "WHERE user_id = ?", [user_id]) return True else: return False elif username: - query = 'SELECT * FROM users WHERE username = ?' + query = "SELECT * FROM users WHERE username = ?" result = monitor_db.select(query=query, args=[username]) if result: logger.info("Tautulli Users :: Re-adding user with username %s to database." % username) - monitor_db.action('UPDATE users ' - 'SET deleted_user = 0, keep_history = 1, do_notify = 1 ' - 'WHERE username = ?', [username]) + monitor_db.action("UPDATE users " + "SET deleted_user = 0, keep_history = 1, do_notify = 1 " + "WHERE username = ?", [username]) return True else: return False @@ -793,7 +793,7 @@ class Users(object): if user: try: monitor_db = database.MonitorDatabase() - query = 'SELECT user_id FROM users WHERE username = ?' + query = "SELECT user_id FROM users WHERE username = ?" result = monitor_db.select_single(query, args=[user]) if result: return result['user_id'] @@ -809,14 +809,14 @@ class Users(object): user_cond = '' if session.get_session_user_id(): - user_cond = 'AND user_id = %s ' % session.get_session_user_id() + user_cond = "AND user_id = %s " % session.get_session_user_id() try: - query = 'SELECT user_id, ' \ - '(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \ - THEN users.username ELSE users.friendly_name END) AS friendly_name ' \ - 'FROM users ' \ - 'WHERE deleted_user = 0 %s' % user_cond + query = "SELECT user_id, " \ + "(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = '' \ + THEN users.username ELSE users.friendly_name END) AS friendly_name " \ + "FROM users " \ + "WHERE deleted_user = 0 %s" % user_cond result = monitor_db.select(query) except Exception as e: @@ -835,8 +835,8 @@ class Users(object): if user_id: try: monitor_db = database.MonitorDatabase() - query = 'SELECT allow_guest, user_token, server_token FROM users ' \ - 'WHERE user_id = ? AND deleted_user = 0' + query = "SELECT allow_guest, user_token, server_token FROM users " \ + "WHERE user_id = ? AND deleted_user = 0" result = monitor_db.select_single(query, args=[user_id]) if result: tokens = {'allow_guest': result['allow_guest'], @@ -857,8 +857,8 @@ class Users(object): try: monitor_db = database.MonitorDatabase() - query = 'SELECT filter_all, filter_movies, filter_tv, filter_music, filter_photos FROM users ' \ - 'WHERE user_id = ?' + query = "SELECT filter_all, filter_movies, filter_tv, filter_music, filter_photos FROM users " \ + "WHERE user_id = ?" result = monitor_db.select_single(query, args=[user_id]) except Exception as e: logger.warn("Tautulli Users :: Unable to execute database query for get_filters: %s." % e) @@ -907,8 +907,8 @@ class Users(object): def get_user_login(self, jwt_token): monitor_db = database.MonitorDatabase() - result = monitor_db.select_single('SELECT * FROM user_login ' - 'WHERE jwt_token = ?', + result = monitor_db.select_single("SELECT * FROM user_login " + "WHERE jwt_token = ?", [jwt_token]) return result @@ -918,8 +918,8 @@ class Users(object): if jwt_token: logger.debug("Tautulli Users :: Clearing user JWT token.") try: - monitor_db.action('UPDATE user_login SET jwt_token = NULL ' - 'WHERE jwt_token = ?', + monitor_db.action("UPDATE user_login SET jwt_token = NULL " + "WHERE jwt_token = ?", [jwt_token]) except Exception as e: logger.error("Tautulli Users :: Unable to clear user JWT token: %s.", e) @@ -929,8 +929,8 @@ class Users(object): row_ids = list(map(helpers.cast_to_int, row_ids.split(','))) logger.debug("Tautulli Users :: Clearing JWT tokens for row_ids %s.", row_ids) try: - monitor_db.action('UPDATE user_login SET jwt_token = NULL ' - 'WHERE id in ({})'.format(','.join(['?'] * len(row_ids))), + monitor_db.action("UPDATE user_login SET jwt_token = NULL " + "WHERE id in ({})".format(",".join(["?"] * len(row_ids))), row_ids) except Exception as e: logger.error("Tautulli Users :: Unable to clear JWT tokens: %s.", e) @@ -954,19 +954,19 @@ class Users(object): else: custom_where = [['user_login.user_id', user_id]] if user_id else [] - columns = ['user_login.id AS row_id', - 'user_login.timestamp', - 'user_login.user_id', - 'user_login.user', - 'user_login.user_group', - 'user_login.ip_address', - 'user_login.host', - 'user_login.user_agent', - 'user_login.success', - 'user_login.expiry', - 'user_login.jwt_token', - '(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \ - THEN users.username ELSE users.friendly_name END) AS friendly_name' + columns = ["user_login.id AS row_id", + "user_login.timestamp", + "user_login.user_id", + "user_login.user", + "user_login.user_group", + "user_login.ip_address", + "user_login.host", + "user_login.user_agent", + "user_login.success", + "user_login.expiry", + "user_login.jwt_token", + "(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = '' \ + THEN users.username ELSE users.friendly_name END) AS friendly_name" ] try: diff --git a/plexpy/webauth.py b/plexpy/webauth.py index 50ce2d2c..d105a8c2 100644 --- a/plexpy/webauth.py +++ b/plexpy/webauth.py @@ -99,7 +99,7 @@ def plex_user_login(token=None, headers=None): try: logger.debug("Tautulli WebAuth :: Registering token for user '%s' in the database." % user_details['username']) - result = monitor_db.action('UPDATE users SET server_token = ? WHERE user_id = ?', + result = monitor_db.action("UPDATE users SET server_token = ? WHERE user_id = ?", [server_token, user_details['user_id']]) if result: @@ -246,12 +246,12 @@ def all_of(*conditions): def check_rate_limit(ip_address): monitor_db = MonitorDatabase() - result = monitor_db.select('SELECT timestamp, success FROM user_login ' - 'WHERE ip_address = ? ' - 'AND timestamp >= ( ' - 'SELECT CASE WHEN MAX(timestamp) IS NULL THEN 0 ELSE MAX(timestamp) END ' - 'FROM user_login WHERE ip_address = ? AND success = 1) ' - 'ORDER BY timestamp DESC', + result = monitor_db.select("SELECT timestamp, success FROM user_login " + "WHERE ip_address = ? " + "AND timestamp >= ( " + "SELECT CASE WHEN MAX(timestamp) IS NULL THEN 0 ELSE MAX(timestamp) END " + "FROM user_login WHERE ip_address = ? AND success = 1) " + "ORDER BY timestamp DESC", [ip_address, ip_address]) try: From ea6c6078df410f333a060016dfce18c21ad134c9 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Tue, 23 May 2023 10:03:36 -0700 Subject: [PATCH 014/361] v2.12.4 --- CHANGELOG.md | 11 +++++++++++ plexpy/version.py | 2 +- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b3c6c4a1..24baf072 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,16 @@ # Changelog +## v2.12.4 (2023-05-23) + +* History: + * Fix: Set view offset equal to duration if a stream is stopped within the last 10 sec. +* Other: + * Fix: Database import may fail for some older databases. + * Fix: Double-quoted strings for newer versions of SQLite. (#2015, #2057) +* API: + * Change: Return the ID for async API calls (export_metadata, notify, notify_newsletter). + + ## v2.12.3 (2023-04-14) * Activity: diff --git a/plexpy/version.py b/plexpy/version.py index 47ba56cb..119e0b07 100644 --- a/plexpy/version.py +++ b/plexpy/version.py @@ -18,4 +18,4 @@ from __future__ import unicode_literals PLEXPY_BRANCH = "master" -PLEXPY_RELEASE_VERSION = "v2.12.3" \ No newline at end of file +PLEXPY_RELEASE_VERSION = "v2.12.4" \ No newline at end of file From 2a48e3375a5ab3d8f9a031ef0949664b4444dc39 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sun, 25 Jun 2023 17:35:16 -0700 Subject: [PATCH 015/361] Add `d3d11va` hardware decoder --- plexpy/common.py | 1 + 1 file changed, 1 insertion(+) diff --git a/plexpy/common.py b/plexpy/common.py index cf1180dc..889d3f73 100644 --- a/plexpy/common.py +++ b/plexpy/common.py @@ -216,6 +216,7 @@ AUDIO_QUALITY_PROFILES = { AUDIO_QUALITY_PROFILES = OrderedDict(sorted(list(AUDIO_QUALITY_PROFILES.items()), key=lambda k: k[0], reverse=True)) HW_DECODERS = [ + 'd3d11va', 'dxva2', 'videotoolbox', 'mediacodecndk', From d9b3b311b947ccf8430fadee3c051a49bda0750c Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Tue, 27 Jun 2023 14:23:02 -0700 Subject: [PATCH 016/361] Only initialize mako TemplateLookup once * Ref: sqlalchemy/mako#378 --- plexpy/webserve.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/plexpy/webserve.py b/plexpy/webserve.py index 88b65174..b893b661 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -119,12 +119,16 @@ else: from plexpy import macos -def serve_template(templatename, **kwargs): - interface_dir = os.path.join(str(plexpy.PROG_DIR), 'data/interfaces/') - template_dir = os.path.join(str(interface_dir), plexpy.CONFIG.INTERFACE) +TEMPLATE_LOOKUP = None - _hplookup = TemplateLookup(directories=[template_dir], default_filters=['unicode', 'h'], - error_handler=mako_error_handler) + +def serve_template(template_name, **kwargs): + global TEMPLATE_LOOKUP + if TEMPLATE_LOOKUP is None: + interface_dir = os.path.join(str(plexpy.PROG_DIR), 'data/interfaces/') + template_dir = os.path.join(str(interface_dir), plexpy.CONFIG.INTERFACE) + TEMPLATE_LOOKUP = TemplateLookup(directories=[template_dir], default_filters=['unicode', 'h'], + error_handler=mako_error_handler) http_root = plexpy.HTTP_ROOT server_name = helpers.pms_name() @@ -133,7 +137,7 @@ def serve_template(templatename, **kwargs): _session = get_session_info() try: - template = _hplookup.get_template(templatename) + template = TEMPLATE_LOOKUP.get_template(template_name) return template.render(http_root=http_root, server_name=server_name, cache_param=cache_param, _session=_session, **kwargs) except Exception as e: From 7ff3abe8b73921ce5a5a05fd7a5821b1b6575869 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Tue, 27 Jun 2023 17:20:21 -0700 Subject: [PATCH 017/361] Rename template_name argument --- plexpy/newsletters.py | 8 +- plexpy/webserve.py | 170 +++++++++++++++++++++--------------------- 2 files changed, 89 insertions(+), 89 deletions(-) diff --git a/plexpy/newsletters.py b/plexpy/newsletters.py index 94f73c8f..661a2b42 100644 --- a/plexpy/newsletters.py +++ b/plexpy/newsletters.py @@ -318,7 +318,7 @@ def blacklist_logger(): logger.blacklist_config(email_config) -def serve_template(templatename, **kwargs): +def serve_template(template_name, **kwargs): if plexpy.CONFIG.NEWSLETTER_CUSTOM_DIR: logger.info("Tautulli Newsletters :: Using custom newsletter template directory.") template_dir = plexpy.CONFIG.NEWSLETTER_CUSTOM_DIR @@ -327,12 +327,12 @@ def serve_template(templatename, **kwargs): template_dir = os.path.join(str(interface_dir), plexpy.CONFIG.NEWSLETTER_TEMPLATES) if not plexpy.CONFIG.NEWSLETTER_INLINE_STYLES: - templatename = templatename.replace('.html', '.internal.html') + template_name = template_name.replace('.html', '.internal.html') _hplookup = TemplateLookup(directories=[template_dir], default_filters=['unicode', 'h']) try: - template = _hplookup.get_template(templatename) + template = _hplookup.get_template(template_name) return template.render(**kwargs), False except: return exceptions.html_error_template().render(), True @@ -477,7 +477,7 @@ class Newsletter(object): logger.info("Tautulli Newsletters :: Generating newsletter%s." % (' preview' if self.is_preview else '')) newsletter_rendered, self.template_error = serve_template( - templatename=self._TEMPLATE, + template_name=self._TEMPLATE, uuid=self.uuid, subject=self.subject_formatted, body=self.body_formatted, diff --git a/plexpy/webserve.py b/plexpy/webserve.py index b893b661..72eb9170 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -226,7 +226,7 @@ class WebInterface(object): plexpy.initialize_scheduler() raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT + "home") else: - return serve_template(templatename="welcome.html", title="Welcome", config=config) + return serve_template(template_name="welcome.html", title="Welcome", config=config) @cherrypy.expose @cherrypy.tools.json_out() @@ -291,7 +291,7 @@ class WebInterface(object): "update_show_changelog": plexpy.CONFIG.UPDATE_SHOW_CHANGELOG, "first_run_complete": plexpy.CONFIG.FIRST_RUN_COMPLETE } - return serve_template(templatename="index.html", title="Home", config=config) + return serve_template(template_name="index.html", title="Home", config=config) @cherrypy.expose @cherrypy.tools.json_out() @@ -336,10 +336,10 @@ class WebInterface(object): result = pms_connect.get_current_activity() if result: - return serve_template(templatename="current_activity.html", data=result) + return serve_template(template_name="current_activity.html", data=result) else: logger.warn("Unable to retrieve data for get_current_activity.") - return serve_template(templatename="current_activity.html", data=None) + return serve_template(template_name="current_activity.html", data=None) @cherrypy.expose @requireAuth() @@ -350,9 +350,9 @@ class WebInterface(object): if result: session = next((s for s in result['sessions'] if s['session_key'] == session_key), None) - return serve_template(templatename="current_activity_instance.html", session=session) + return serve_template(template_name="current_activity_instance.html", session=session) else: - return serve_template(templatename="current_activity_instance.html", session=None) + return serve_template(template_name="current_activity_instance.html", session=None) @cherrypy.expose @cherrypy.tools.json_out() @@ -395,7 +395,7 @@ class WebInterface(object): endpoint = endpoint.format(machine_id=plexpy.CONFIG.PMS_IDENTIFIER) url = base_url + endpoint + ('?' + urlencode(kwargs) if kwargs else '') - return serve_template(templatename="xml_shortcut.html", title="Plex XML", url=url) + return serve_template(template_name="xml_shortcut.html", title="Plex XML", url=url) @cherrypy.expose @requireAuth() @@ -405,7 +405,7 @@ class WebInterface(object): stats_type=stats_type, stats_count=stats_count) - return serve_template(templatename="home_stats.html", title="Stats", data=stats_data) + return serve_template(template_name="home_stats.html", title="Stats", data=stats_data) @cherrypy.expose @requireAuth() @@ -416,7 +416,7 @@ class WebInterface(object): stats_data = data_factory.get_library_stats(library_cards=library_cards) - return serve_template(templatename="library_stats.html", title="Library Stats", data=stats_data) + return serve_template(template_name="library_stats.html", title="Library Stats", data=stats_data) @cherrypy.expose @requireAuth() @@ -426,13 +426,13 @@ class WebInterface(object): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_recently_added_details(count=count, media_type=media_type) except IOError as e: - return serve_template(templatename="recently_added.html", data=None) + return serve_template(template_name="recently_added.html", data=None) if result and 'recently_added' in result: - return serve_template(templatename="recently_added.html", data=result['recently_added']) + return serve_template(template_name="recently_added.html", data=result['recently_added']) else: logger.warn("Unable to retrieve data for get_recently_added.") - return serve_template(templatename="recently_added.html", data=None) + return serve_template(template_name="recently_added.html", data=None) @cherrypy.expose @cherrypy.tools.json_out() @@ -467,7 +467,7 @@ class WebInterface(object): @cherrypy.expose @requireAuth() def libraries(self, **kwargs): - return serve_template(templatename="libraries.html", title="Libraries") + return serve_template(template_name="libraries.html", title="Libraries") @cherrypy.expose @cherrypy.tools.json_out() @@ -620,12 +620,12 @@ class WebInterface(object): library_details = library_data.get_details(section_id=section_id) except: logger.warn("Unable to retrieve library details for section_id %s " % section_id) - return serve_template(templatename="library.html", title="Library", data=None, config=config) + return serve_template(template_name="library.html", title="Library", data=None, config=config) else: logger.debug("Library page requested but no section_id received.") - return serve_template(templatename="library.html", title="Library", data=None, config=config) + return serve_template(template_name="library.html", title="Library", data=None, config=config) - return serve_template(templatename="library.html", title="Library", data=library_details, config=config) + return serve_template(template_name="library.html", title="Library", data=library_details, config=config) @cherrypy.expose @requireAuth(member_of("admin")) @@ -638,7 +638,7 @@ class WebInterface(object): result = None status_message = 'An error occured.' - return serve_template(templatename="edit_library.html", title="Edit Library", + return serve_template(template_name="edit_library.html", title="Edit Library", data=result, server_id=plexpy.CONFIG.PMS_IDENTIFIER, status_message=status_message) @cherrypy.expose @@ -685,7 +685,7 @@ class WebInterface(object): @requireAuth() def library_watch_time_stats(self, section_id=None, **kwargs): if not allow_session_library(section_id): - return serve_template(templatename="user_watch_time_stats.html", data=None, title="Watch Stats") + return serve_template(template_name="user_watch_time_stats.html", data=None, title="Watch Stats") if section_id: library_data = libraries.Libraries() @@ -694,16 +694,16 @@ class WebInterface(object): result = None if result: - return serve_template(templatename="user_watch_time_stats.html", data=result, title="Watch Stats") + return serve_template(template_name="user_watch_time_stats.html", data=result, title="Watch Stats") else: logger.warn("Unable to retrieve data for library_watch_time_stats.") - return serve_template(templatename="user_watch_time_stats.html", data=None, title="Watch Stats") + return serve_template(template_name="user_watch_time_stats.html", data=None, title="Watch Stats") @cherrypy.expose @requireAuth() def library_user_stats(self, section_id=None, **kwargs): if not allow_session_library(section_id): - return serve_template(templatename="library_user_stats.html", data=None, title="Player Stats") + return serve_template(template_name="library_user_stats.html", data=None, title="Player Stats") if section_id: library_data = libraries.Libraries() @@ -712,16 +712,16 @@ class WebInterface(object): result = None if result: - return serve_template(templatename="library_user_stats.html", data=result, title="Player Stats") + return serve_template(template_name="library_user_stats.html", data=result, title="Player Stats") else: logger.warn("Unable to retrieve data for library_user_stats.") - return serve_template(templatename="library_user_stats.html", data=None, title="Player Stats") + return serve_template(template_name="library_user_stats.html", data=None, title="Player Stats") @cherrypy.expose @requireAuth() def library_recently_watched(self, section_id=None, limit='10', **kwargs): if not allow_session_library(section_id): - return serve_template(templatename="user_recently_watched.html", data=None, title="Recently Watched") + return serve_template(template_name="user_recently_watched.html", data=None, title="Recently Watched") if section_id: library_data = libraries.Libraries() @@ -730,16 +730,16 @@ class WebInterface(object): result = None if result: - return serve_template(templatename="user_recently_watched.html", data=result, title="Recently Watched") + return serve_template(template_name="user_recently_watched.html", data=result, title="Recently Watched") else: logger.warn("Unable to retrieve data for library_recently_watched.") - return serve_template(templatename="user_recently_watched.html", data=None, title="Recently Watched") + return serve_template(template_name="user_recently_watched.html", data=None, title="Recently Watched") @cherrypy.expose @requireAuth() def library_recently_added(self, section_id=None, limit='10', **kwargs): if not allow_session_library(section_id): - return serve_template(templatename="library_recently_added.html", data=None, title="Recently Added") + return serve_template(template_name="library_recently_added.html", data=None, title="Recently Added") if section_id: pms_connect = pmsconnect.PmsConnect() @@ -748,10 +748,10 @@ class WebInterface(object): result = None if result and result['recently_added']: - return serve_template(templatename="library_recently_added.html", data=result['recently_added'], title="Recently Added") + return serve_template(template_name="library_recently_added.html", data=result['recently_added'], title="Recently Added") else: logger.warn("Unable to retrieve data for library_recently_added.") - return serve_template(templatename="library_recently_added.html", data=None, title="Recently Added") + return serve_template(template_name="library_recently_added.html", data=None, title="Recently Added") @cherrypy.expose @cherrypy.tools.json_out() @@ -1243,7 +1243,7 @@ class WebInterface(object): @cherrypy.expose @requireAuth() def users(self, **kwargs): - return serve_template(templatename="users.html", title="Users") + return serve_template(template_name="users.html", title="Users") @cherrypy.expose @cherrypy.tools.json_out() @@ -1359,12 +1359,12 @@ class WebInterface(object): user_details = user_data.get_details(user_id=user_id) except: logger.warn("Unable to retrieve user details for user_id %s " % user_id) - return serve_template(templatename="user.html", title="User", data=None) + return serve_template(template_name="user.html", title="User", data=None) else: logger.debug("User page requested but no user_id received.") - return serve_template(templatename="user.html", title="User", data=None) + return serve_template(template_name="user.html", title="User", data=None) - return serve_template(templatename="user.html", title="User", data=user_details) + return serve_template(template_name="user.html", title="User", data=user_details) @cherrypy.expose @requireAuth(member_of("admin")) @@ -1377,7 +1377,7 @@ class WebInterface(object): result = None status_message = 'An error occured.' - return serve_template(templatename="edit_user.html", title="Edit User", data=result, status_message=status_message) + return serve_template(template_name="edit_user.html", title="Edit User", data=result, status_message=status_message) @cherrypy.expose @requireAuth(member_of("admin")) @@ -1425,7 +1425,7 @@ class WebInterface(object): @requireAuth() def user_watch_time_stats(self, user=None, user_id=None, **kwargs): if not allow_session_user(user_id): - return serve_template(templatename="user_watch_time_stats.html", data=None, title="Watch Stats") + return serve_template(template_name="user_watch_time_stats.html", data=None, title="Watch Stats") if user_id or user: user_data = users.Users() @@ -1434,16 +1434,16 @@ class WebInterface(object): result = None if result: - return serve_template(templatename="user_watch_time_stats.html", data=result, title="Watch Stats") + return serve_template(template_name="user_watch_time_stats.html", data=result, title="Watch Stats") else: logger.warn("Unable to retrieve data for user_watch_time_stats.") - return serve_template(templatename="user_watch_time_stats.html", data=None, title="Watch Stats") + return serve_template(template_name="user_watch_time_stats.html", data=None, title="Watch Stats") @cherrypy.expose @requireAuth() def user_player_stats(self, user=None, user_id=None, **kwargs): if not allow_session_user(user_id): - return serve_template(templatename="user_player_stats.html", data=None, title="Player Stats") + return serve_template(template_name="user_player_stats.html", data=None, title="Player Stats") if user_id or user: user_data = users.Users() @@ -1452,16 +1452,16 @@ class WebInterface(object): result = None if result: - return serve_template(templatename="user_player_stats.html", data=result, title="Player Stats") + return serve_template(template_name="user_player_stats.html", data=result, title="Player Stats") else: logger.warn("Unable to retrieve data for user_player_stats.") - return serve_template(templatename="user_player_stats.html", data=None, title="Player Stats") + return serve_template(template_name="user_player_stats.html", data=None, title="Player Stats") @cherrypy.expose @requireAuth() def get_user_recently_watched(self, user=None, user_id=None, limit='10', **kwargs): if not allow_session_user(user_id): - return serve_template(templatename="user_recently_watched.html", data=None, title="Recently Watched") + return serve_template(template_name="user_recently_watched.html", data=None, title="Recently Watched") if user_id or user: user_data = users.Users() @@ -1470,10 +1470,10 @@ class WebInterface(object): result = None if result: - return serve_template(templatename="user_recently_watched.html", data=result, title="Recently Watched") + return serve_template(template_name="user_recently_watched.html", data=result, title="Recently Watched") else: logger.warn("Unable to retrieve data for get_user_recently_watched.") - return serve_template(templatename="user_recently_watched.html", data=None, title="Recently Watched") + return serve_template(template_name="user_recently_watched.html", data=None, title="Recently Watched") @cherrypy.expose @cherrypy.tools.json_out() @@ -1879,7 +1879,7 @@ class WebInterface(object): "database_is_importing": database.IS_IMPORTING, } - return serve_template(templatename="history.html", title="History", config=config) + return serve_template(template_name="history.html", title="History", config=config) @cherrypy.expose @cherrypy.tools.json_out() @@ -2067,7 +2067,7 @@ class WebInterface(object): data_factory = datafactory.DataFactory() stream_data = data_factory.get_stream_details(row_id, session_key) - return serve_template(templatename="stream_data.html", title="Stream Data", data=stream_data, user=user) + return serve_template(template_name="stream_data.html", title="Stream Data", data=stream_data, user=user) @cherrypy.expose @cherrypy.tools.json_out() @@ -2158,7 +2158,7 @@ class WebInterface(object): public = helpers.is_public_ip(ip_address) - return serve_template(templatename="ip_address_modal.html", title="IP Address Details", + return serve_template(template_name="ip_address_modal.html", title="IP Address Details", data=ip_address, public=public, kwargs=kwargs) @cherrypy.expose @@ -2197,7 +2197,7 @@ class WebInterface(object): @cherrypy.expose @requireAuth() def graphs(self, **kwargs): - return serve_template(templatename="graphs.html", title="Graphs") + return serve_template(template_name="graphs.html", title="Graphs") @cherrypy.expose @cherrypy.tools.json_out() @@ -2711,9 +2711,9 @@ class WebInterface(object): @requireAuth() def history_table_modal(self, **kwargs): if kwargs.get('user_id') and not allow_session_user(kwargs['user_id']): - return serve_template(templatename="history_table_modal.html", title="History Data", data=None) + return serve_template(template_name="history_table_modal.html", title="History Data", data=None) - return serve_template(templatename="history_table_modal.html", title="History Data", data=kwargs) + return serve_template(template_name="history_table_modal.html", title="History Data", data=kwargs) ##### Sync ##### @@ -2721,7 +2721,7 @@ class WebInterface(object): @cherrypy.expose @requireAuth() def sync(self, **kwargs): - return serve_template(templatename="sync.html", title="Synced Items") + return serve_template(template_name="sync.html", title="Synced Items") @cherrypy.expose @cherrypy.tools.json_out() @@ -2780,7 +2780,7 @@ class WebInterface(object): @requireAuth(member_of("admin")) def logs(self, **kwargs): plex_log_files = log_reader.list_plex_logs() - return serve_template(templatename="logs.html", title="Log", plex_log_files=plex_log_files) + return serve_template(template_name="logs.html", title="Log", plex_log_files=plex_log_files) @cherrypy.expose @requireAuth(member_of("admin")) @@ -3174,7 +3174,7 @@ class WebInterface(object): for key in ('home_sections', 'home_stats_cards', 'home_library_cards'): settings_dict[key] = json.dumps(settings_dict[key]) - return serve_template(templatename="settings.html", title="Settings", config=settings_dict) + return serve_template(template_name="settings.html", title="Settings", config=settings_dict) @cherrypy.expose @cherrypy.tools.json_out() @@ -3365,17 +3365,17 @@ class WebInterface(object): @cherrypy.expose @requireAuth(member_of("admin")) def get_configuration_table(self, **kwargs): - return serve_template(templatename="configuration_table.html") + return serve_template(template_name="configuration_table.html") @cherrypy.expose @requireAuth(member_of("admin")) def get_scheduler_table(self, **kwargs): - return serve_template(templatename="scheduler_table.html") + return serve_template(template_name="scheduler_table.html") @cherrypy.expose @requireAuth(member_of("admin")) def get_queue_modal(self, queue=None, **kwargs): - return serve_template(templatename="queue_modal.html", queue=queue) + return serve_template(template_name="queue_modal.html", queue=queue) @cherrypy.expose @cherrypy.tools.json_out() @@ -3439,7 +3439,7 @@ class WebInterface(object): @requireAuth(member_of("admin")) def get_notifiers_table(self, **kwargs): result = notifiers.get_notifiers() - return serve_template(templatename="notifiers_table.html", notifiers_list=result) + return serve_template(template_name="notifiers_table.html", notifiers_list=result) @cherrypy.expose @cherrypy.tools.json_out() @@ -3522,7 +3522,7 @@ class WebInterface(object): for category in common.NOTIFICATION_PARAMETERS for param in category['parameters'] ] - return serve_template(templatename="notifier_config.html", notifier=result, parameters=parameters) + return serve_template(template_name="notifier_config.html", notifier=result, parameters=parameters) @cherrypy.expose @cherrypy.tools.json_out() @@ -3605,7 +3605,7 @@ class WebInterface(object): text.append({'media_type': media_type, 'subject': test_subject, 'body': test_body}) - return serve_template(templatename="notifier_text_preview.html", text=text, agent=agent_name) + return serve_template(template_name="notifier_text_preview.html", text=text, agent=agent_name) @cherrypy.expose @cherrypy.tools.json_out() @@ -3783,7 +3783,7 @@ class WebInterface(object): @requireAuth(member_of("admin")) def get_mobile_devices_table(self, **kwargs): result = mobile_app.get_mobile_devices() - return serve_template(templatename="mobile_devices_table.html", devices_list=result) + return serve_template(template_name="mobile_devices_table.html", devices_list=result) @cherrypy.expose @cherrypy.tools.json_out() @@ -3806,7 +3806,7 @@ class WebInterface(object): def get_mobile_device_config_modal(self, mobile_device_id=None, **kwargs): result = mobile_app.get_mobile_device_config(mobile_device_id=mobile_device_id) - return serve_template(templatename="mobile_device_config.html", device=result) + return serve_template(template_name="mobile_device_config.html", device=result) @cherrypy.expose @cherrypy.tools.json_out() @@ -4016,11 +4016,11 @@ class WebInterface(object): @requireAuth(member_of("admin")) def import_database_tool(self, app=None, **kwargs): if app == 'tautulli': - return serve_template(templatename="app_import.html", title="Import Tautulli Database", app="Tautulli") + return serve_template(template_name="app_import.html", title="Import Tautulli Database", app="Tautulli") elif app == 'plexwatch': - return serve_template(templatename="app_import.html", title="Import PlexWatch Database", app="PlexWatch") + return serve_template(template_name="app_import.html", title="Import PlexWatch Database", app="PlexWatch") elif app == 'plexivity': - return serve_template(templatename="app_import.html", title="Import Plexivity Database", app="Plexivity") + return serve_template(template_name="app_import.html", title="Import Plexivity Database", app="Plexivity") logger.warn("No app specified for import.") return @@ -4028,7 +4028,7 @@ class WebInterface(object): @cherrypy.expose @requireAuth(member_of("admin")) def import_config_tool(self, **kwargs): - return serve_template(templatename="config_import.html", title="Import Tautulli Configuration") + return serve_template(template_name="config_import.html", title="Import Tautulli Configuration") @cherrypy.expose @cherrypy.tools.json_out() @@ -4301,7 +4301,7 @@ class WebInterface(object): else: new_http_root = '/' - return serve_template(templatename="shutdown.html", signal=signal, title=title, + return serve_template(template_name="shutdown.html", signal=signal, title=title, new_http_root=new_http_root, message=message, timer=timer, quote=quote) @cherrypy.expose @@ -4411,7 +4411,7 @@ class WebInterface(object): if metadata['section_id'] and not allow_session_library(metadata['section_id']): raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT) - return serve_template(templatename="info.html", metadata=metadata, title="Info", + return serve_template(template_name="info.html", metadata=metadata, title="Info", config=config, source=source, user_info=user_info) else: if get_session_user_id(): @@ -4427,11 +4427,11 @@ class WebInterface(object): result = pms_connect.get_item_children(rating_key=rating_key, media_type=media_type) if result: - return serve_template(templatename="info_children_list.html", data=result, + return serve_template(template_name="info_children_list.html", data=result, media_type=media_type, title="Children List") else: logger.warn("Unable to retrieve data for get_item_children.") - return serve_template(templatename="info_children_list.html", data=None, title="Children List") + return serve_template(template_name="info_children_list.html", data=None, title="Children List") @cherrypy.expose @requireAuth() @@ -4441,9 +4441,9 @@ class WebInterface(object): result = pms_connect.get_item_children_related(rating_key=rating_key) if result: - return serve_template(templatename="info_collection_list.html", data=result, title=title) + return serve_template(template_name="info_collection_list.html", data=result, title=title) else: - return serve_template(templatename="info_collection_list.html", data=None, title=title) + return serve_template(template_name="info_collection_list.html", data=None, title=title) @cherrypy.expose @requireAuth() @@ -4455,10 +4455,10 @@ class WebInterface(object): result = None if result: - return serve_template(templatename="user_watch_time_stats.html", data=result, title="Watch Stats") + return serve_template(template_name="user_watch_time_stats.html", data=result, title="Watch Stats") else: logger.warn("Unable to retrieve data for item_watch_time_stats.") - return serve_template(templatename="user_watch_time_stats.html", data=None, title="Watch Stats") + return serve_template(template_name="user_watch_time_stats.html", data=None, title="Watch Stats") @cherrypy.expose @requireAuth() @@ -4470,10 +4470,10 @@ class WebInterface(object): result = None if result: - return serve_template(templatename="library_user_stats.html", data=result, title="Player Stats") + return serve_template(template_name="library_user_stats.html", data=result, title="Player Stats") else: logger.warn("Unable to retrieve data for item_user_stats.") - return serve_template(templatename="library_user_stats.html", data=None, title="Player Stats") + return serve_template(template_name="library_user_stats.html", data=None, title="Player Stats") @cherrypy.expose @cherrypy.tools.json_out() @@ -5095,7 +5095,7 @@ class WebInterface(object): @cherrypy.expose @requireAuth() def search(self, query='', **kwargs): - return serve_template(templatename="search.html", title="Search", query=query) + return serve_template(template_name="search.html", title="Search", query=query) @cherrypy.expose @cherrypy.tools.json_out() @@ -5152,10 +5152,10 @@ class WebInterface(object): if season['media_index'] == season_index] if result: - return serve_template(templatename="info_search_results_list.html", data=result, title="Search Result List") + return serve_template(template_name="info_search_results_list.html", data=result, title="Search Result List") else: logger.warn("Unable to retrieve data for get_search_results_children.") - return serve_template(templatename="info_search_results_list.html", data=None, title="Search Result List") + return serve_template(template_name="info_search_results_list.html", data=None, title="Search Result List") ##### Update Metadata ##### @@ -5172,10 +5172,10 @@ class WebInterface(object): query['query_string'] = query_string if query: - return serve_template(templatename="update_metadata.html", query=query, update=update, title="Info") + return serve_template(template_name="update_metadata.html", query=query, update=update, title="Info") else: logger.warn("Unable to retrieve data for update_metadata.") - return serve_template(templatename="update_metadata.html", query=query, update=update, title="Info") + return serve_template(template_name="update_metadata.html", query=query, update=update, title="Info") @cherrypy.expose @cherrypy.tools.json_out() @@ -6578,7 +6578,7 @@ class WebInterface(object): @requireAuth(member_of("admin")) def get_newsletters_table(self, **kwargs): result = newsletters.get_newsletters() - return serve_template(templatename="newsletters_table.html", newsletters_list=result) + return serve_template(template_name="newsletters_table.html", newsletters_list=result) @cherrypy.expose @cherrypy.tools.json_out() @@ -6653,7 +6653,7 @@ class WebInterface(object): @requireAuth(member_of("admin")) def get_newsletter_config_modal(self, newsletter_id=None, **kwargs): result = newsletters.get_newsletter_config(newsletter_id=newsletter_id, mask_passwords=True) - return serve_template(templatename="newsletter_config.html", newsletter=result) + return serve_template(template_name="newsletter_config.html", newsletter=result) @cherrypy.expose @cherrypy.tools.json_out() @@ -6761,7 +6761,7 @@ class WebInterface(object): elif kwargs.pop('key', None) == plexpy.CONFIG.NEWSLETTER_PASSWORD: return self.newsletter_auth(*args, **kwargs) else: - return serve_template(templatename="newsletter_auth.html", + return serve_template(template_name="newsletter_auth.html", title="Newsletter Login", uri=request_uri) @@ -6798,7 +6798,7 @@ class WebInterface(object): @requireAuth(member_of("admin")) def newsletter_preview(self, **kwargs): kwargs['preview'] = 'true' - return serve_template(templatename="newsletter_preview.html", + return serve_template(template_name="newsletter_preview.html", title="Newsletter", kwargs=kwargs) @@ -6837,7 +6837,7 @@ class WebInterface(object): @cherrypy.expose @requireAuth(member_of("admin")) def support(self, **kwargs): - return serve_template(templatename="support.html", title="Support") + return serve_template(template_name="support.html", title="Support") @cherrypy.expose @cherrypy.tools.json_out() @@ -6992,7 +6992,7 @@ class WebInterface(object): if media_type == 'photo_album': media_type = 'photoalbum' - return serve_template(templatename="export_modal.html", title="Export Metadata", + return serve_template(template_name="export_modal.html", title="Export Metadata", section_id=section_id, user_id=user_id, rating_key=rating_key, media_type=media_type, sub_media_type=sub_media_type, export_type=export_type, file_formats=file_formats) From c761e6e8d09491886bcfc8b056da0abcb70b401d Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 29 Jun 2023 00:07:43 -0700 Subject: [PATCH 018/361] Fix `template_name` argument for login page --- plexpy/webauth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plexpy/webauth.py b/plexpy/webauth.py index d105a8c2..5487f2ea 100644 --- a/plexpy/webauth.py +++ b/plexpy/webauth.py @@ -314,7 +314,7 @@ class AuthController(object): def get_loginform(self, redirect_uri=''): from plexpy.webserve import serve_template - return serve_template(templatename="login.html", title="Login", redirect_uri=unquote(redirect_uri)) + return serve_template(template_name="login.html", title="Login", redirect_uri=unquote(redirect_uri)) @cherrypy.expose def index(self, *args, **kwargs): From 085cfa4bef90e7c3847a51789f2c18ed174c5bee Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 7 Jul 2023 11:51:54 -0700 Subject: [PATCH 019/361] Fix history grouping incorrect for watched content --- plexpy/activity_processor.py | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/plexpy/activity_processor.py b/plexpy/activity_processor.py index 588e91ce..2d752104 100644 --- a/plexpy/activity_processor.py +++ b/plexpy/activity_processor.py @@ -1,4 +1,4 @@ -# This file is part of Tautulli. +# This file is part of Tautulli. # # Tautulli is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -327,7 +327,7 @@ class ActivityProcessor(object): # Get the last insert row id last_id = self.db.last_insert_id() new_session = prev_session = None - watched = False + prev_watched = None if session['live']: # Check if we should group the session, select the last guid from the user @@ -370,24 +370,25 @@ class ActivityProcessor(object): 'reference_id': result[1]['reference_id']} marker_first, marker_final = helpers.get_first_final_marker(metadata['markers']) - watched = helpers.check_watched( - session['media_type'], session['view_offset'], session['duration'], + prev_watched = helpers.check_watched( + session['media_type'], prev_session['view_offset'], session['duration'], marker_first, marker_final ) query = "UPDATE session_history SET reference_id = ? WHERE id = ? " - # If previous session view offset less than watched percent, + # If previous session view offset less than watched threshold, # and new session view offset is greater, # then set the reference_id to the previous row, # else set the reference_id to the new id - if prev_session is None and new_session is None: - args = [last_id, last_id] - elif watched and prev_session['view_offset'] <= new_session['view_offset'] or \ - session['live'] and prev_session['guid'] == new_session['guid']: + if (prev_watched is False and prev_session['view_offset'] <= new_session['view_offset'] or + session['live'] and prev_session['guid'] == new_session['guid']): + logger.debug("Tautulli ActivityProcessor :: Grouping history for sessionKey %s", session['session_key']) args = [prev_session['reference_id'], new_session['id']] + else: - args = [new_session['id'], new_session['id']] + logger.debug("Tautulli ActivityProcessor :: Not grouping history for sessionKey %s", session['session_key']) + args = [last_id, last_id] self.db.action(query=query, args=args) From 1fe6d1505f5524c2706934093b77034b00c94374 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 7 Jul 2023 13:02:45 -0700 Subject: [PATCH 020/361] Add method to regroup history --- plexpy/activity_processor.py | 159 ++++++++++++++++++++--------------- 1 file changed, 93 insertions(+), 66 deletions(-) diff --git a/plexpy/activity_processor.py b/plexpy/activity_processor.py index 2d752104..6851263b 100644 --- a/plexpy/activity_processor.py +++ b/plexpy/activity_processor.py @@ -1,4 +1,4 @@ -# This file is part of Tautulli. +# This file is part of Tautulli. # # Tautulli is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -326,71 +326,7 @@ class ActivityProcessor(object): # Get the last insert row id last_id = self.db.last_insert_id() - new_session = prev_session = None - prev_watched = None - - if session['live']: - # Check if we should group the session, select the last guid from the user - query = "SELECT session_history.id, session_history_metadata.guid, session_history.reference_id " \ - "FROM session_history " \ - "JOIN session_history_metadata ON session_history.id == session_history_metadata.id " \ - "WHERE session_history.user_id = ? ORDER BY session_history.id DESC LIMIT 1 " - - args = [session['user_id']] - - result = self.db.select(query=query, args=args) - - if len(result) > 0: - new_session = {'id': last_id, - 'guid': metadata['guid'], - 'reference_id': last_id} - - prev_session = {'id': result[0]['id'], - 'guid': result[0]['guid'], - 'reference_id': result[0]['reference_id']} - - else: - # Check if we should group the session, select the last two rows from the user - query = "SELECT id, rating_key, view_offset, reference_id FROM session_history " \ - "WHERE user_id = ? AND rating_key = ? ORDER BY id DESC LIMIT 2 " - - args = [session['user_id'], session['rating_key']] - - result = self.db.select(query=query, args=args) - - if len(result) > 1: - new_session = {'id': result[0]['id'], - 'rating_key': result[0]['rating_key'], - 'view_offset': result[0]['view_offset'], - 'reference_id': result[0]['reference_id']} - - prev_session = {'id': result[1]['id'], - 'rating_key': result[1]['rating_key'], - 'view_offset': result[1]['view_offset'], - 'reference_id': result[1]['reference_id']} - - marker_first, marker_final = helpers.get_first_final_marker(metadata['markers']) - prev_watched = helpers.check_watched( - session['media_type'], prev_session['view_offset'], session['duration'], - marker_first, marker_final - ) - - query = "UPDATE session_history SET reference_id = ? WHERE id = ? " - - # If previous session view offset less than watched threshold, - # and new session view offset is greater, - # then set the reference_id to the previous row, - # else set the reference_id to the new id - if (prev_watched is False and prev_session['view_offset'] <= new_session['view_offset'] or - session['live'] and prev_session['guid'] == new_session['guid']): - logger.debug("Tautulli ActivityProcessor :: Grouping history for sessionKey %s", session['session_key']) - args = [prev_session['reference_id'], new_session['id']] - - else: - logger.debug("Tautulli ActivityProcessor :: Not grouping history for sessionKey %s", session['session_key']) - args = [last_id, last_id] - - self.db.action(query=query, args=args) + self.group_history(last_id, session, metadata) # logger.debug("Tautulli ActivityProcessor :: Successfully written history item, last id for session_history is %s" # % last_id) @@ -547,6 +483,80 @@ class ActivityProcessor(object): # Return the session row id when the session is successfully written to the database return session['id'] + def group_history(self, last_id, session, metadata=None): + new_session = prev_session = None + prev_watched = None + + if session['live']: + # Check if we should group the session, select the last guid from the user + query = "SELECT session_history.id, session_history_metadata.guid, session_history.reference_id " \ + "FROM session_history " \ + "JOIN session_history_metadata ON session_history.id == session_history_metadata.id " \ + "WHERE session_history.id <= ? AND session_history.user_id = ? ORDER BY session_history.id DESC LIMIT 1 " + + args = [last_id, session['user_id']] + + result = self.db.select(query=query, args=args) + + if len(result) > 0: + new_session = {'id': last_id, + 'guid': metadata['guid'] if metadata else session['guid'], + 'reference_id': last_id} + + prev_session = {'id': result[0]['id'], + 'guid': result[0]['guid'], + 'reference_id': result[0]['reference_id']} + + else: + # Check if we should group the session, select the last two rows from the user + query = "SELECT id, rating_key, view_offset, reference_id FROM session_history " \ + "WHERE id <= ? AND user_id = ? AND rating_key = ? ORDER BY id DESC LIMIT 2 " + + args = [last_id, session['user_id'], session['rating_key']] + + result = self.db.select(query=query, args=args) + + if len(result) > 1: + new_session = {'id': result[0]['id'], + 'rating_key': result[0]['rating_key'], + 'view_offset': result[0]['view_offset'], + 'reference_id': result[0]['reference_id']} + + prev_session = {'id': result[1]['id'], + 'rating_key': result[1]['rating_key'], + 'view_offset': result[1]['view_offset'], + 'reference_id': result[1]['reference_id']} + + if metadata: + marker_first, marker_final = helpers.get_first_final_marker(metadata['markers']) + else: + marker_first = session['marker_credits_first'] + marker_final = session['marker_credits_final'] + + prev_watched = helpers.check_watched( + session['media_type'], prev_session['view_offset'], session['duration'], + marker_first, marker_final + ) + + query = "UPDATE session_history SET reference_id = ? WHERE id = ? " + + # If previous session view offset less than watched threshold, + # and new session view offset is greater, + # then set the reference_id to the previous row, + # else set the reference_id to the new id + if (prev_watched is False and prev_session['view_offset'] <= new_session['view_offset'] or + session['live'] and prev_session['guid'] == new_session['guid']): + if metadata: + logger.debug("Tautulli ActivityProcessor :: Grouping history for sessionKey %s", session['session_key']) + args = [prev_session['reference_id'], new_session['id']] + + else: + if metadata: + logger.debug("Tautulli ActivityProcessor :: Not grouping history for sessionKey %s", session['session_key']) + args = [last_id, last_id] + + self.db.action(query=query, args=args) + def get_sessions(self, user_id=None, ip_address=None): query = "SELECT * FROM sessions" args = [] @@ -696,3 +706,20 @@ class ActivityProcessor(object): "ORDER BY stopped DESC", [user_id, machine_id, media_type]) return int(started - last_session.get('stopped', 0) >= plexpy.CONFIG.NOTIFY_CONTINUED_SESSION_THRESHOLD) + + def regroup_history(self): + logger.info("Tautulli ActivityProcessor :: Creating database backup...") + database.make_backup() + + logger.info("Tautulli ActivityProcessor :: Regrouping session history...") + + query = ( + "SELECT * FROM session_history " + "JOIN session_history_metadata ON session_history.id = session_history_metadata.id" + ) + results = self.db.select(query) + + for session in results: + self.group_history(session['id'], session) + + logger.info("Tautulli ActivityProcessor :: Regrouping session history complete.") From b144e6527fabb13cad9eb90bac49985f8dc89ac8 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 7 Jul 2023 13:26:11 -0700 Subject: [PATCH 021/361] Add button to regroup play history --- data/interfaces/default/settings.html | 31 +++++++++++++++++++++------ plexpy/activity_processor.py | 10 +++++++-- plexpy/webserve.py | 16 ++++++++++++++ 3 files changed, 49 insertions(+), 8 deletions(-) diff --git a/data/interfaces/default/settings.html b/data/interfaces/default/settings.html index fd234da2..7cd614e0 100644 --- a/data/interfaces/default/settings.html +++ b/data/interfaces/default/settings.html @@ -132,12 +132,6 @@

Change the "Play by day of week" graph to start on Monday. Default is start on Sunday.

-
- -

Group play history for the same item and user as a single entry when progress is less than the watched percent.

-

Decide whether to use end credits markers to determine the 'watched' state of video items. When markers are not available the selected threshold percentage will be used.

+
+ +

Group play history for the same item and user as a single entry when progress is less than the watched percent.

+
+
+ +

+ Fix grouping of play history in the database.
+

+
+
+
+ +
+
+
+

@@ -2484,6 +2497,12 @@ $(document).ready(function() { confirmAjaxCall(url, msg); }); + $("#regroup_history").click(function () { + var msg = 'Are you sure you want to regroup play history in the database?'; + var url = 'regroup_history'; + confirmAjaxCall(url, msg, null, 'Regrouping play history...'); + }); + $("#delete_temp_sessions").click(function () { var msg = 'Are you sure you want to flush the temporary sessions?

This will reset all currently active sessions.'; var url = 'delete_temp_sessions'; diff --git a/plexpy/activity_processor.py b/plexpy/activity_processor.py index 6851263b..b1558a56 100644 --- a/plexpy/activity_processor.py +++ b/plexpy/activity_processor.py @@ -709,7 +709,8 @@ class ActivityProcessor(object): def regroup_history(self): logger.info("Tautulli ActivityProcessor :: Creating database backup...") - database.make_backup() + if not database.make_backup(): + return False logger.info("Tautulli ActivityProcessor :: Regrouping session history...") @@ -720,6 +721,11 @@ class ActivityProcessor(object): results = self.db.select(query) for session in results: - self.group_history(session['id'], session) + try: + self.group_history(session['id'], session) + except Exception as e: + logger.error("Tautulli ActivityProcessor :: Error regrouping session history: %s", e) + return False logger.info("Tautulli ActivityProcessor :: Regrouping session history complete.") + return True diff --git a/plexpy/webserve.py b/plexpy/webserve.py index 72eb9170..06a9a802 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -51,6 +51,7 @@ if sys.version_info >= (3, 6): import plexpy if plexpy.PYTHON2: import activity_pinger + import activity_processor import common import config import database @@ -85,6 +86,7 @@ if plexpy.PYTHON2: import macos else: from plexpy import activity_pinger + from plexpy import activity_processor from plexpy import common from plexpy import config from plexpy import database @@ -434,6 +436,20 @@ class WebInterface(object): logger.warn("Unable to retrieve data for get_recently_added.") return serve_template(template_name="recently_added.html", data=None) + @cherrypy.expose + @cherrypy.tools.json_out() + @requireAuth(member_of("admin")) + @addtoapi() + def regroup_history(self, **kwargs): + """ Regroup play history in the database.""" + + result = activity_processor.ActivityProcessor().regroup_history() + + if result: + return {'result': 'success', 'message': 'Regrouped play history.'} + else: + return {'result': 'error', 'message': 'Regrouping play history failed.'} + @cherrypy.expose @cherrypy.tools.json_out() @requireAuth(member_of("admin")) From 343a3e928169843d458b435a81637b736b5261e6 Mon Sep 17 00:00:00 2001 From: Tom Niget Date: Sat, 8 Jul 2023 02:15:16 +0200 Subject: [PATCH 022/361] Multiselect user filters (#2090) * Extract user filter generation code into method * Extend make_user_cond to allow lists of user IDs * Update documentation for stats APIs to indicate handling of ID lists * Use multiselect dropdown for user filter on graphs page Use standard concatenation Fix select style Move settings to JS constructor Change text for no users checked Don't call selectAll on page init Add it back Remove attributes Fix emptiness check Allow deselect all Only refresh if user id changed * Show "N users" starting at 2 users Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> * Use helper function split_strip Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> * Move make_user_cond at bottom and make private * Add new user picker to history page * Fix copy-paste error * Again * Add CSS for bootstrap-select --------- Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> --- .../default/css/bootstrap-select.min.css | 6 ++ data/interfaces/default/css/tautulli.css | 2 +- data/interfaces/default/graphs.html | 47 ++++++++--- data/interfaces/default/history.html | 42 ++++++++-- .../default/js/bootstrap-select.min.js | 9 +++ plexpy/graphs.py | 79 ++++++------------- plexpy/webserve.py | 22 +++--- 7 files changed, 125 insertions(+), 82 deletions(-) create mode 100644 data/interfaces/default/css/bootstrap-select.min.css create mode 100644 data/interfaces/default/js/bootstrap-select.min.js diff --git a/data/interfaces/default/css/bootstrap-select.min.css b/data/interfaces/default/css/bootstrap-select.min.css new file mode 100644 index 00000000..d22faa63 --- /dev/null +++ b/data/interfaces/default/css/bootstrap-select.min.css @@ -0,0 +1,6 @@ +/*! + * Bootstrap-select v1.13.14 (https://developer.snapappointments.com/bootstrap-select) + * + * Copyright 2012-2020 SnapAppointments, LLC + * Licensed under MIT (https://github.com/snapappointments/bootstrap-select/blob/master/LICENSE) + */@-webkit-keyframes bs-notify-fadeOut{0%{opacity:.9}100%{opacity:0}}@-o-keyframes bs-notify-fadeOut{0%{opacity:.9}100%{opacity:0}}@keyframes bs-notify-fadeOut{0%{opacity:.9}100%{opacity:0}}.bootstrap-select>select.bs-select-hidden,select.bs-select-hidden,select.selectpicker{display:none!important}.bootstrap-select{width:220px\0;vertical-align:middle}.bootstrap-select>.dropdown-toggle{position:relative;width:100%;text-align:right;white-space:nowrap;display:-webkit-inline-box;display:-webkit-inline-flex;display:-ms-inline-flexbox;display:inline-flex;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between}.bootstrap-select>.dropdown-toggle:after{margin-top:-1px}.bootstrap-select>.dropdown-toggle.bs-placeholder,.bootstrap-select>.dropdown-toggle.bs-placeholder:active,.bootstrap-select>.dropdown-toggle.bs-placeholder:focus,.bootstrap-select>.dropdown-toggle.bs-placeholder:hover{color:#999}.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-danger,.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-danger:active,.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-danger:focus,.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-danger:hover,.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-dark,.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-dark:active,.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-dark:focus,.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-dark:hover,.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-info,.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-info:active,.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-info:focus,.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-info:hover,.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-primary,.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-primary:active,.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-primary:focus,.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-primary:hover,.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-secondary,.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-secondary:active,.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-secondary:focus,.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-secondary:hover,.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-success,.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-success:active,.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-success:focus,.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-success:hover{color:rgba(255,255,255,.5)}.bootstrap-select>select{position:absolute!important;bottom:0;left:50%;display:block!important;width:.5px!important;height:100%!important;padding:0!important;opacity:0!important;border:none;z-index:0!important}.bootstrap-select>select.mobile-device{top:0;left:0;display:block!important;width:100%!important;z-index:2!important}.bootstrap-select.is-invalid .dropdown-toggle,.error .bootstrap-select .dropdown-toggle,.has-error .bootstrap-select .dropdown-toggle,.was-validated .bootstrap-select select:invalid+.dropdown-toggle{border-color:#b94a48}.bootstrap-select.is-valid .dropdown-toggle,.was-validated .bootstrap-select select:valid+.dropdown-toggle{border-color:#28a745}.bootstrap-select.fit-width{width:auto!important}.bootstrap-select:not([class*=col-]):not([class*=form-control]):not(.input-group-btn){width:220px}.bootstrap-select .dropdown-toggle:focus,.bootstrap-select>select.mobile-device:focus+.dropdown-toggle{outline:thin dotted #333!important;outline:5px auto -webkit-focus-ring-color!important;outline-offset:-2px}.bootstrap-select.form-control{margin-bottom:0;padding:0;border:none;height:auto}:not(.input-group)>.bootstrap-select.form-control:not([class*=col-]){width:100%}.bootstrap-select.form-control.input-group-btn{float:none;z-index:auto}.form-inline .bootstrap-select,.form-inline .bootstrap-select.form-control:not([class*=col-]){width:auto}.bootstrap-select:not(.input-group-btn),.bootstrap-select[class*=col-]{float:none;display:inline-block;margin-left:0}.bootstrap-select.dropdown-menu-right,.bootstrap-select[class*=col-].dropdown-menu-right,.row .bootstrap-select[class*=col-].dropdown-menu-right{float:right}.form-group .bootstrap-select,.form-horizontal .bootstrap-select,.form-inline .bootstrap-select{margin-bottom:0}.form-group-lg .bootstrap-select.form-control,.form-group-sm .bootstrap-select.form-control{padding:0}.form-group-lg .bootstrap-select.form-control .dropdown-toggle,.form-group-sm .bootstrap-select.form-control .dropdown-toggle{height:100%;font-size:inherit;line-height:inherit;border-radius:inherit}.bootstrap-select.form-control-lg .dropdown-toggle,.bootstrap-select.form-control-sm .dropdown-toggle{font-size:inherit;line-height:inherit;border-radius:inherit}.bootstrap-select.form-control-sm .dropdown-toggle{padding:.25rem .5rem}.bootstrap-select.form-control-lg .dropdown-toggle{padding:.5rem 1rem}.form-inline .bootstrap-select .form-control{width:100%}.bootstrap-select.disabled,.bootstrap-select>.disabled{cursor:not-allowed}.bootstrap-select.disabled:focus,.bootstrap-select>.disabled:focus{outline:0!important}.bootstrap-select.bs-container{position:absolute;top:0;left:0;height:0!important;padding:0!important}.bootstrap-select.bs-container .dropdown-menu{z-index:1060}.bootstrap-select .dropdown-toggle .filter-option{position:static;top:0;left:0;float:left;height:100%;width:100%;text-align:left;overflow:hidden;-webkit-box-flex:0;-webkit-flex:0 1 auto;-ms-flex:0 1 auto;flex:0 1 auto}.bs3.bootstrap-select .dropdown-toggle .filter-option{padding-right:inherit}.input-group .bs3-has-addon.bootstrap-select .dropdown-toggle .filter-option{position:absolute;padding-top:inherit;padding-bottom:inherit;padding-left:inherit;float:none}.input-group .bs3-has-addon.bootstrap-select .dropdown-toggle .filter-option .filter-option-inner{padding-right:inherit}.bootstrap-select .dropdown-toggle .filter-option-inner-inner{overflow:hidden}.bootstrap-select .dropdown-toggle .filter-expand{width:0!important;float:left;opacity:0!important;overflow:hidden}.bootstrap-select .dropdown-toggle .caret{position:absolute;top:50%;right:12px;margin-top:-2px;vertical-align:middle}.input-group .bootstrap-select.form-control .dropdown-toggle{border-radius:inherit}.bootstrap-select[class*=col-] .dropdown-toggle{width:100%}.bootstrap-select .dropdown-menu{min-width:100%;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.bootstrap-select .dropdown-menu>.inner:focus{outline:0!important}.bootstrap-select .dropdown-menu.inner{position:static;float:none;border:0;padding:0;margin:0;border-radius:0;-webkit-box-shadow:none;box-shadow:none}.bootstrap-select .dropdown-menu li{position:relative}.bootstrap-select .dropdown-menu li.active small{color:rgba(255,255,255,.5)!important}.bootstrap-select .dropdown-menu li.disabled a{cursor:not-allowed}.bootstrap-select .dropdown-menu li a{cursor:pointer;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.bootstrap-select .dropdown-menu li a.opt{position:relative;padding-left:2.25em}.bootstrap-select .dropdown-menu li a span.check-mark{display:none}.bootstrap-select .dropdown-menu li a span.text{display:inline-block}.bootstrap-select .dropdown-menu li small{padding-left:.5em}.bootstrap-select .dropdown-menu .notify{position:absolute;bottom:5px;width:96%;margin:0 2%;min-height:26px;padding:3px 5px;background:#f5f5f5;border:1px solid #e3e3e3;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.05);box-shadow:inset 0 1px 1px rgba(0,0,0,.05);pointer-events:none;opacity:.9;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.bootstrap-select .dropdown-menu .notify.fadeOut{-webkit-animation:.3s linear 750ms forwards bs-notify-fadeOut;-o-animation:.3s linear 750ms forwards bs-notify-fadeOut;animation:.3s linear 750ms forwards bs-notify-fadeOut}.bootstrap-select .no-results{padding:3px;background:#f5f5f5;margin:0 5px;white-space:nowrap}.bootstrap-select.fit-width .dropdown-toggle .filter-option{position:static;display:inline;padding:0}.bootstrap-select.fit-width .dropdown-toggle .filter-option-inner,.bootstrap-select.fit-width .dropdown-toggle .filter-option-inner-inner{display:inline}.bootstrap-select.fit-width .dropdown-toggle .bs-caret:before{content:'\00a0'}.bootstrap-select.fit-width .dropdown-toggle .caret{position:static;top:auto;margin-top:-1px}.bootstrap-select.show-tick .dropdown-menu .selected span.check-mark{position:absolute;display:inline-block;right:15px;top:5px}.bootstrap-select.show-tick .dropdown-menu li a span.text{margin-right:34px}.bootstrap-select .bs-ok-default:after{content:'';display:block;width:.5em;height:1em;border-style:solid;border-width:0 .26em .26em 0;-webkit-transform:rotate(45deg);-ms-transform:rotate(45deg);-o-transform:rotate(45deg);transform:rotate(45deg)}.bootstrap-select.show-menu-arrow.open>.dropdown-toggle,.bootstrap-select.show-menu-arrow.show>.dropdown-toggle{z-index:1061}.bootstrap-select.show-menu-arrow .dropdown-toggle .filter-option:before{content:'';border-left:7px solid transparent;border-right:7px solid transparent;border-bottom:7px solid rgba(204,204,204,.2);position:absolute;bottom:-4px;left:9px;display:none}.bootstrap-select.show-menu-arrow .dropdown-toggle .filter-option:after{content:'';border-left:6px solid transparent;border-right:6px solid transparent;border-bottom:6px solid #fff;position:absolute;bottom:-4px;left:10px;display:none}.bootstrap-select.show-menu-arrow.dropup .dropdown-toggle .filter-option:before{bottom:auto;top:-4px;border-top:7px solid rgba(204,204,204,.2);border-bottom:0}.bootstrap-select.show-menu-arrow.dropup .dropdown-toggle .filter-option:after{bottom:auto;top:-4px;border-top:6px solid #fff;border-bottom:0}.bootstrap-select.show-menu-arrow.pull-right .dropdown-toggle .filter-option:before{right:12px;left:auto}.bootstrap-select.show-menu-arrow.pull-right .dropdown-toggle .filter-option:after{right:13px;left:auto}.bootstrap-select.show-menu-arrow.open>.dropdown-toggle .filter-option:after,.bootstrap-select.show-menu-arrow.open>.dropdown-toggle .filter-option:before,.bootstrap-select.show-menu-arrow.show>.dropdown-toggle .filter-option:after,.bootstrap-select.show-menu-arrow.show>.dropdown-toggle .filter-option:before{display:block}.bs-actionsbox,.bs-donebutton,.bs-searchbox{padding:4px 8px}.bs-actionsbox{width:100%;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.bs-actionsbox .btn-group button{width:50%}.bs-donebutton{float:left;width:100%;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.bs-donebutton .btn-group button{width:100%}.bs-searchbox+.bs-actionsbox{padding:0 8px 4px}.bs-searchbox .form-control{margin-bottom:0;width:100%;float:none} \ No newline at end of file diff --git a/data/interfaces/default/css/tautulli.css b/data/interfaces/default/css/tautulli.css index 5f1d90a0..e256d2d7 100644 --- a/data/interfaces/default/css/tautulli.css +++ b/data/interfaces/default/css/tautulli.css @@ -2914,7 +2914,7 @@ a .home-platforms-list-cover-face:hover margin-bottom: -20px; width: 100%; max-width: 1750px; - overflow: hidden; + display: flow-root; } .table-card-back td { font-size: 12px; diff --git a/data/interfaces/default/graphs.html b/data/interfaces/default/graphs.html index 3f189112..8435df20 100644 --- a/data/interfaces/default/graphs.html +++ b/data/interfaces/default/graphs.html @@ -1,6 +1,7 @@ <%inherit file="base.html"/> <%def name="headIncludes()"> + @@ -14,9 +15,7 @@

@@ -225,6 +224,7 @@ <%def name="javascriptIncludes()"> + @@ -373,14 +373,35 @@ type: 'get', dataType: "json", success: function (data) { - var select = $('#graph-user'); + let select = $('#graph-user'); + let by_id = {}; data.sort(function(a, b) { return a.friendly_name.localeCompare(b.friendly_name); }); data.forEach(function(item) { select.append(''); + by_id[item.user_id] = item.friendly_name; }); + select.selectpicker({ + countSelectedText: function(sel, total) { + if (sel === 0 || sel === total) { + return 'All users'; + } else if (sel > 1) { + return sel + ' users'; + } else { + return select.val().map(function(id) { + return by_id[id]; + }).join(', '); + } + }, + style: 'btn-dark', + actionsBox: true, + selectedTextFormat: 'count', + noneSelectedText: 'All users' + }); + select.selectpicker('render'); + select.selectpicker('selectAll'); } }); @@ -602,11 +623,6 @@ $('#nav-tabs-total').tab('show'); } - // Set initial state - if (current_tab === '#tabs-plays') { loadGraphsTab1(current_day_range, yaxis); } - if (current_tab === '#tabs-stream') { loadGraphsTab2(current_day_range, yaxis); } - if (current_tab === '#tabs-total') { loadGraphsTab3(current_month_range, yaxis); } - // Tab1 opened $('#nav-tabs-plays').on('shown.bs.tab', function (e) { e.preventDefault(); @@ -652,9 +668,20 @@ $('.months').text(current_month_range); }); + let graph_user_last_id = undefined; + // User changed $('#graph-user').on('change', function() { - selected_user_id = $(this).val() || null; + let val = $(this).val(); + if (val.length === 0 || val.length === $(this).children().length) { + selected_user_id = null; // if all users are selected, just send an empty list + } else { + selected_user_id = val.join(","); + } + if (selected_user_id === graph_user_last_id) { + return; + } + graph_user_last_id = selected_user_id; if (current_tab === '#tabs-plays') { loadGraphsTab1(current_day_range, yaxis); } if (current_tab === '#tabs-stream') { loadGraphsTab2(current_day_range, yaxis); } if (current_tab === '#tabs-total') { loadGraphsTab3(current_month_range, yaxis); } diff --git a/data/interfaces/default/history.html b/data/interfaces/default/history.html index 327b99b7..8ab8b19e 100644 --- a/data/interfaces/default/history.html +++ b/data/interfaces/default/history.html @@ -1,6 +1,7 @@ <%inherit file="base.html"/> <%def name="headIncludes()"> + @@ -31,9 +32,7 @@ % if _session['user_group'] == 'admin':
@@ -121,6 +120,7 @@ <%def name="javascriptIncludes()"> + @@ -134,17 +134,40 @@ type: 'GET', dataType: 'json', success: function (data) { - var select = $('#history-user'); + let select = $('#history-user'); + let by_id = {}; data.sort(function (a, b) { return a.friendly_name.localeCompare(b.friendly_name); }); data.forEach(function (item) { select.append(''); + by_id[item.user_id] = item.friendly_name; }); + select.selectpicker({ + countSelectedText: function(sel, total) { + if (sel === 0 || sel === total) { + return 'All users'; + } else if (sel > 1) { + return sel + ' users'; + } else { + return select.val().map(function(id) { + return by_id[id]; + }).join(', '); + } + }, + style: 'btn-dark', + actionsBox: true, + selectedTextFormat: 'count', + noneSelectedText: 'All users' + }); + select.selectpicker('render'); + select.selectpicker('selectAll'); } }); + let history_user_last_id = undefined; + function loadHistoryTable(media_type, transcode_decision, selected_user_id) { history_table_options.ajax = { url: 'get_history', @@ -187,7 +210,16 @@ }); $('#history-user').on('change', function () { - selected_user_id = $(this).val() || null; + let val = $(this).val(); + if (val.length === 0 || val.length === $(this).children().length) { + selected_user_id = null; // if all users are selected, just send an empty list + } else { + selected_user_id = val.join(","); + } + if (selected_user_id === history_user_last_id) { + return; + } + history_user_last_id = selected_user_id; history_table.draw(); }); } diff --git a/data/interfaces/default/js/bootstrap-select.min.js b/data/interfaces/default/js/bootstrap-select.min.js new file mode 100644 index 00000000..92e3a32e --- /dev/null +++ b/data/interfaces/default/js/bootstrap-select.min.js @@ -0,0 +1,9 @@ +/*! + * Bootstrap-select v1.13.14 (https://developer.snapappointments.com/bootstrap-select) + * + * Copyright 2012-2020 SnapAppointments, LLC + * Licensed under MIT (https://github.com/snapappointments/bootstrap-select/blob/master/LICENSE) + */ + +!function(e,t){void 0===e&&void 0!==window&&(e=window),"function"==typeof define&&define.amd?define(["jquery"],function(e){return t(e)}):"object"==typeof module&&module.exports?module.exports=t(require("jquery")):t(e.jQuery)}(this,function(e){!function(z){"use strict";var d=["sanitize","whiteList","sanitizeFn"],r=["background","cite","href","itemtype","longdesc","poster","src","xlink:href"],e={"*":["class","dir","id","lang","role","tabindex","style",/^aria-[\w-]*$/i],a:["target","href","title","rel"],area:[],b:[],br:[],col:[],code:[],div:[],em:[],hr:[],h1:[],h2:[],h3:[],h4:[],h5:[],h6:[],i:[],img:["src","alt","title","width","height"],li:[],ol:[],p:[],pre:[],s:[],small:[],span:[],sub:[],sup:[],strong:[],u:[],ul:[]},l=/^(?:(?:https?|mailto|ftp|tel|file):|[^&:/?#]*(?:[/?#]|$))/gi,a=/^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[a-z0-9+/]+=*$/i;function v(e,t){var i=e.nodeName.toLowerCase();if(-1!==z.inArray(i,t))return-1===z.inArray(i,r)||Boolean(e.nodeValue.match(l)||e.nodeValue.match(a));for(var s=z(t).filter(function(e,t){return t instanceof RegExp}),n=0,o=s.length;n]+>/g,"")),s&&(a=w(a)),a=a.toUpperCase(),o="contains"===i?0<=a.indexOf(t):a.startsWith(t)))break}return o}function L(e){return parseInt(e,10)||0}z.fn.triggerNative=function(e){var t,i=this[0];i.dispatchEvent?(u?t=new Event(e,{bubbles:!0}):(t=document.createEvent("Event")).initEvent(e,!0,!1),i.dispatchEvent(t)):i.fireEvent?((t=document.createEventObject()).eventType=e,i.fireEvent("on"+e,t)):this.trigger(e)};var f={"\xc0":"A","\xc1":"A","\xc2":"A","\xc3":"A","\xc4":"A","\xc5":"A","\xe0":"a","\xe1":"a","\xe2":"a","\xe3":"a","\xe4":"a","\xe5":"a","\xc7":"C","\xe7":"c","\xd0":"D","\xf0":"d","\xc8":"E","\xc9":"E","\xca":"E","\xcb":"E","\xe8":"e","\xe9":"e","\xea":"e","\xeb":"e","\xcc":"I","\xcd":"I","\xce":"I","\xcf":"I","\xec":"i","\xed":"i","\xee":"i","\xef":"i","\xd1":"N","\xf1":"n","\xd2":"O","\xd3":"O","\xd4":"O","\xd5":"O","\xd6":"O","\xd8":"O","\xf2":"o","\xf3":"o","\xf4":"o","\xf5":"o","\xf6":"o","\xf8":"o","\xd9":"U","\xda":"U","\xdb":"U","\xdc":"U","\xf9":"u","\xfa":"u","\xfb":"u","\xfc":"u","\xdd":"Y","\xfd":"y","\xff":"y","\xc6":"Ae","\xe6":"ae","\xde":"Th","\xfe":"th","\xdf":"ss","\u0100":"A","\u0102":"A","\u0104":"A","\u0101":"a","\u0103":"a","\u0105":"a","\u0106":"C","\u0108":"C","\u010a":"C","\u010c":"C","\u0107":"c","\u0109":"c","\u010b":"c","\u010d":"c","\u010e":"D","\u0110":"D","\u010f":"d","\u0111":"d","\u0112":"E","\u0114":"E","\u0116":"E","\u0118":"E","\u011a":"E","\u0113":"e","\u0115":"e","\u0117":"e","\u0119":"e","\u011b":"e","\u011c":"G","\u011e":"G","\u0120":"G","\u0122":"G","\u011d":"g","\u011f":"g","\u0121":"g","\u0123":"g","\u0124":"H","\u0126":"H","\u0125":"h","\u0127":"h","\u0128":"I","\u012a":"I","\u012c":"I","\u012e":"I","\u0130":"I","\u0129":"i","\u012b":"i","\u012d":"i","\u012f":"i","\u0131":"i","\u0134":"J","\u0135":"j","\u0136":"K","\u0137":"k","\u0138":"k","\u0139":"L","\u013b":"L","\u013d":"L","\u013f":"L","\u0141":"L","\u013a":"l","\u013c":"l","\u013e":"l","\u0140":"l","\u0142":"l","\u0143":"N","\u0145":"N","\u0147":"N","\u014a":"N","\u0144":"n","\u0146":"n","\u0148":"n","\u014b":"n","\u014c":"O","\u014e":"O","\u0150":"O","\u014d":"o","\u014f":"o","\u0151":"o","\u0154":"R","\u0156":"R","\u0158":"R","\u0155":"r","\u0157":"r","\u0159":"r","\u015a":"S","\u015c":"S","\u015e":"S","\u0160":"S","\u015b":"s","\u015d":"s","\u015f":"s","\u0161":"s","\u0162":"T","\u0164":"T","\u0166":"T","\u0163":"t","\u0165":"t","\u0167":"t","\u0168":"U","\u016a":"U","\u016c":"U","\u016e":"U","\u0170":"U","\u0172":"U","\u0169":"u","\u016b":"u","\u016d":"u","\u016f":"u","\u0171":"u","\u0173":"u","\u0174":"W","\u0175":"w","\u0176":"Y","\u0177":"y","\u0178":"Y","\u0179":"Z","\u017b":"Z","\u017d":"Z","\u017a":"z","\u017c":"z","\u017e":"z","\u0132":"IJ","\u0133":"ij","\u0152":"Oe","\u0153":"oe","\u0149":"'n","\u017f":"s"},m=/[\xc0-\xd6\xd8-\xf6\xf8-\xff\u0100-\u017f]/g,g=RegExp("[\\u0300-\\u036f\\ufe20-\\ufe2f\\u20d0-\\u20ff\\u1ab0-\\u1aff\\u1dc0-\\u1dff]","g");function b(e){return f[e]}function w(e){return(e=e.toString())&&e.replace(m,b).replace(g,"")}var I,x,y,$,S=(I={"&":"&","<":"<",">":">",'"':""","'":"'","`":"`"},x="(?:"+Object.keys(I).join("|")+")",y=RegExp(x),$=RegExp(x,"g"),function(e){return e=null==e?"":""+e,y.test(e)?e.replace($,E):e});function E(e){return I[e]}var C={32:" ",48:"0",49:"1",50:"2",51:"3",52:"4",53:"5",54:"6",55:"7",56:"8",57:"9",59:";",65:"A",66:"B",67:"C",68:"D",69:"E",70:"F",71:"G",72:"H",73:"I",74:"J",75:"K",76:"L",77:"M",78:"N",79:"O",80:"P",81:"Q",82:"R",83:"S",84:"T",85:"U",86:"V",87:"W",88:"X",89:"Y",90:"Z",96:"0",97:"1",98:"2",99:"3",100:"4",101:"5",102:"6",103:"7",104:"8",105:"9"},N=27,D=13,H=32,W=9,B=38,M=40,R={success:!1,major:"3"};try{R.full=(z.fn.dropdown.Constructor.VERSION||"").split(" ")[0].split("."),R.major=R.full[0],R.success=!0}catch(e){}var U=0,j=".bs.select",V={DISABLED:"disabled",DIVIDER:"divider",SHOW:"open",DROPUP:"dropup",MENU:"dropdown-menu",MENURIGHT:"dropdown-menu-right",MENULEFT:"dropdown-menu-left",BUTTONCLASS:"btn-default",POPOVERHEADER:"popover-title",ICONBASE:"glyphicon",TICKICON:"glyphicon-ok"},F={MENU:"."+V.MENU},_={span:document.createElement("span"),i:document.createElement("i"),subtext:document.createElement("small"),a:document.createElement("a"),li:document.createElement("li"),whitespace:document.createTextNode("\xa0"),fragment:document.createDocumentFragment()};_.a.setAttribute("role","option"),"4"===R.major&&(_.a.className="dropdown-item"),_.subtext.className="text-muted",_.text=_.span.cloneNode(!1),_.text.className="text",_.checkMark=_.span.cloneNode(!1);var G=new RegExp(B+"|"+M),q=new RegExp("^"+W+"$|"+N),K={li:function(e,t,i){var s=_.li.cloneNode(!1);return e&&(1===e.nodeType||11===e.nodeType?s.appendChild(e):s.innerHTML=e),void 0!==t&&""!==t&&(s.className=t),null!=i&&s.classList.add("optgroup-"+i),s},a:function(e,t,i){var s=_.a.cloneNode(!0);return e&&(11===e.nodeType?s.appendChild(e):s.insertAdjacentHTML("beforeend",e)),void 0!==t&&""!==t&&s.classList.add.apply(s.classList,t.split(" ")),i&&s.setAttribute("style",i),s},text:function(e,t){var i,s,n=_.text.cloneNode(!1);if(e.content)n.innerHTML=e.content;else{if(n.textContent=e.text,e.icon){var o=_.whitespace.cloneNode(!1);(s=(!0===t?_.i:_.span).cloneNode(!1)).className=this.options.iconBase+" "+e.icon,_.fragment.appendChild(s),_.fragment.appendChild(o)}e.subtext&&((i=_.subtext.cloneNode(!1)).textContent=e.subtext,n.appendChild(i))}if(!0===t)for(;0'},maxOptions:!1,mobile:!1,selectOnTab:!1,dropdownAlignRight:!1,windowPadding:0,virtualScroll:600,display:!1,sanitize:!0,sanitizeFn:null,whiteList:e},Y.prototype={constructor:Y,init:function(){var i=this,e=this.$element.attr("id");U++,this.selectId="bs-select-"+U,this.$element[0].classList.add("bs-select-hidden"),this.multiple=this.$element.prop("multiple"),this.autofocus=this.$element.prop("autofocus"),this.$element[0].classList.contains("show-tick")&&(this.options.showTick=!0),this.$newElement=this.createDropdown(),this.buildData(),this.$element.after(this.$newElement).prependTo(this.$newElement),this.$button=this.$newElement.children("button"),this.$menu=this.$newElement.children(F.MENU),this.$menuInner=this.$menu.children(".inner"),this.$searchbox=this.$menu.find("input"),this.$element[0].classList.remove("bs-select-hidden"),!0===this.options.dropdownAlignRight&&this.$menu[0].classList.add(V.MENURIGHT),void 0!==e&&this.$button.attr("data-id",e),this.checkDisabled(),this.clickListener(),this.options.liveSearch?(this.liveSearchListener(),this.focusedParent=this.$searchbox[0]):this.focusedParent=this.$menuInner[0],this.setStyle(),this.render(),this.setWidth(),this.options.container?this.selectPosition():this.$element.on("hide"+j,function(){if(i.isVirtual()){var e=i.$menuInner[0],t=e.firstChild.cloneNode(!1);e.replaceChild(t,e.firstChild),e.scrollTop=0}}),this.$menu.data("this",this),this.$newElement.data("this",this),this.options.mobile&&this.mobile(),this.$newElement.on({"hide.bs.dropdown":function(e){i.$element.trigger("hide"+j,e)},"hidden.bs.dropdown":function(e){i.$element.trigger("hidden"+j,e)},"show.bs.dropdown":function(e){i.$element.trigger("show"+j,e)},"shown.bs.dropdown":function(e){i.$element.trigger("shown"+j,e)}}),i.$element[0].hasAttribute("required")&&this.$element.on("invalid"+j,function(){i.$button[0].classList.add("bs-invalid"),i.$element.on("shown"+j+".invalid",function(){i.$element.val(i.$element.val()).off("shown"+j+".invalid")}).on("rendered"+j,function(){this.validity.valid&&i.$button[0].classList.remove("bs-invalid"),i.$element.off("rendered"+j)}),i.$button.on("blur"+j,function(){i.$element.trigger("focus").trigger("blur"),i.$button.off("blur"+j)})}),setTimeout(function(){i.buildList(),i.$element.trigger("loaded"+j)})},createDropdown:function(){var e=this.multiple||this.options.showTick?" show-tick":"",t=this.multiple?' aria-multiselectable="true"':"",i="",s=this.autofocus?" autofocus":"";R.major<4&&this.$element.parent().hasClass("input-group")&&(i=" input-group-btn");var n,o="",r="",l="",a="";return this.options.header&&(o='
'+this.options.header+"
"),this.options.liveSearch&&(r=''),this.multiple&&this.options.actionsBox&&(l='
"),this.multiple&&this.options.doneButton&&(a='
"),n='",z(n)},setPositionData:function(){this.selectpicker.view.canHighlight=[];for(var e=this.selectpicker.view.size=0;e=this.options.virtualScroll||!0===this.options.virtualScroll},createView:function(A,e,t){var L,N,D=this,i=0,H=[];if(this.selectpicker.isSearching=A,this.selectpicker.current=A?this.selectpicker.search:this.selectpicker.main,this.setPositionData(),e)if(t)i=this.$menuInner[0].scrollTop;else if(!D.multiple){var s=D.$element[0],n=(s.options[s.selectedIndex]||{}).liIndex;if("number"==typeof n&&!1!==D.options.size){var o=D.selectpicker.main.data[n],r=o&&o.position;r&&(i=r-(D.sizeInfo.menuInnerHeight+D.sizeInfo.liHeight)/2)}}function l(e,t){var i,s,n,o,r,l,a,c,d=D.selectpicker.current.elements.length,h=[],p=!0,u=D.isVirtual();D.selectpicker.view.scrollTop=e,i=Math.ceil(D.sizeInfo.menuInnerHeight/D.sizeInfo.liHeight*1.5),s=Math.round(d/i)||1;for(var f=0;fd-1?0:D.selectpicker.current.data[d-1].position-D.selectpicker.current.data[D.selectpicker.view.position1-1].position,b.firstChild.style.marginTop=v+"px",b.firstChild.style.marginBottom=g+"px"):(b.firstChild.style.marginTop=0,b.firstChild.style.marginBottom=0),b.firstChild.appendChild(w),!0===u&&D.sizeInfo.hasScrollBar){var C=b.firstChild.offsetWidth;if(t&&CD.sizeInfo.selectWidth)b.firstChild.style.minWidth=D.sizeInfo.menuInnerInnerWidth+"px";else if(C>D.sizeInfo.menuInnerInnerWidth){D.$menu[0].style.minWidth=0;var O=b.firstChild.offsetWidth;O>D.sizeInfo.menuInnerInnerWidth&&(D.sizeInfo.menuInnerInnerWidth=O,b.firstChild.style.minWidth=D.sizeInfo.menuInnerInnerWidth+"px"),D.$menu[0].style.minWidth=""}}}if(D.prevActiveIndex=D.activeIndex,D.options.liveSearch){if(A&&t){var z,T=0;D.selectpicker.view.canHighlight[T]||(T=1+D.selectpicker.view.canHighlight.slice(1).indexOf(!0)),z=D.selectpicker.view.visibleElements[T],D.defocusItem(D.selectpicker.view.currentActive),D.activeIndex=(D.selectpicker.current.data[T]||{}).index,D.focusItem(z)}}else D.$menuInner.trigger("focus")}l(i,!0),this.$menuInner.off("scroll.createView").on("scroll.createView",function(e,t){D.noScroll||l(this.scrollTop,t),D.noScroll=!1}),z(window).off("resize"+j+"."+this.selectId+".createView").on("resize"+j+"."+this.selectId+".createView",function(){D.$newElement.hasClass(V.SHOW)&&l(D.$menuInner[0].scrollTop)})},focusItem:function(e,t,i){if(e){t=t||this.selectpicker.main.data[this.activeIndex];var s=e.firstChild;s&&(s.setAttribute("aria-setsize",this.selectpicker.view.size),s.setAttribute("aria-posinset",t.posinset),!0!==i&&(this.focusedParent.setAttribute("aria-activedescendant",s.id),e.classList.add("active"),s.classList.add("active")))}},defocusItem:function(e){e&&(e.classList.remove("active"),e.firstChild&&e.firstChild.classList.remove("active"))},setPlaceholder:function(){var e=!1;if(this.options.title&&!this.multiple){this.selectpicker.view.titleOption||(this.selectpicker.view.titleOption=document.createElement("option")),e=!0;var t=this.$element[0],i=!1,s=!this.selectpicker.view.titleOption.parentNode;if(s)this.selectpicker.view.titleOption.className="bs-title-option",this.selectpicker.view.titleOption.value="",i=void 0===z(t.options[t.selectedIndex]).attr("selected")&&void 0===this.$element.data("selected");!s&&0===this.selectpicker.view.titleOption.index||t.insertBefore(this.selectpicker.view.titleOption,t.firstChild),i&&(t.selectedIndex=0)}return e},buildData:function(){var p=':not([hidden]):not([data-hidden="true"])',u=[],f=0,e=this.setPlaceholder()?1:0;this.options.hideDisabled&&(p+=":not(:disabled)");var t=this.$element[0].querySelectorAll("select > *"+p);function m(e){var t=u[u.length-1];t&&"divider"===t.type&&(t.optID||e.optID)||((e=e||{}).type="divider",u.push(e))}function v(e,t){if((t=t||{}).divider="true"===e.getAttribute("data-divider"),t.divider)m({optID:t.optID});else{var i=u.length,s=e.style.cssText,n=s?S(s):"",o=(e.className||"")+(t.optgroupClass||"");t.optID&&(o="opt "+o),t.optionClass=o.trim(),t.inlineStyle=n,t.text=e.textContent,t.content=e.getAttribute("data-content"),t.tokens=e.getAttribute("data-tokens"),t.subtext=e.getAttribute("data-subtext"),t.icon=e.getAttribute("data-icon"),e.liIndex=i,t.display=t.content||t.text,t.type="option",t.index=i,t.option=e,t.selected=!!e.selected,t.disabled=t.disabled||!!e.disabled,u.push(t)}}function i(e,t){var i=t[e],s=t[e-1],n=t[e+1],o=i.querySelectorAll("option"+p);if(o.length){var r,l,a={display:S(i.label),subtext:i.getAttribute("data-subtext"),icon:i.getAttribute("data-icon"),type:"optgroup-label",optgroupClass:" "+(i.className||"")};f++,s&&m({optID:f}),a.optID=f,u.push(a);for(var c=0,d=o.length;c li")},render:function(){var e,t=this,i=this.$element[0],s=this.setPlaceholder()&&0===i.selectedIndex,n=O(i,this.options.hideDisabled),o=n.length,r=this.$button[0],l=r.querySelector(".filter-option-inner-inner"),a=document.createTextNode(this.options.multipleSeparator),c=_.fragment.cloneNode(!1),d=!1;if(r.classList.toggle("bs-placeholder",t.multiple?!o:!T(i,n)),this.tabIndex(),"static"===this.options.selectedTextFormat)c=K.text.call(this,{text:this.options.title},!0);else if(!1===(this.multiple&&-1!==this.options.selectedTextFormat.indexOf("count")&&1")).length&&o>e[1]||1===e.length&&2<=o))){if(!s){for(var h=0;h option"+m+", optgroup"+m+" option"+m).length,g="function"==typeof this.options.countSelectedText?this.options.countSelectedText(o,v):this.options.countSelectedText;c=K.text.call(this,{text:g.replace("{0}",o.toString()).replace("{1}",v.toString())},!0)}if(null==this.options.title&&(this.options.title=this.$element.attr("title")),c.childNodes.length||(c=K.text.call(this,{text:void 0!==this.options.title?this.options.title:this.options.noneSelectedText},!0)),r.title=c.textContent.replace(/<[^>]*>?/g,"").trim(),this.options.sanitize&&d&&P([c],t.options.whiteList,t.options.sanitizeFn),l.innerHTML="",l.appendChild(c),R.major<4&&this.$newElement[0].classList.contains("bs3-has-addon")){var b=r.querySelector(".filter-expand"),w=l.cloneNode(!0);w.className="filter-expand",b?r.replaceChild(w,b):r.appendChild(w)}this.$element.trigger("rendered"+j)},setStyle:function(e,t){var i,s=this.$button[0],n=this.$newElement[0],o=this.options.style.trim();this.$element.attr("class")&&this.$newElement.addClass(this.$element.attr("class").replace(/selectpicker|mobile-device|bs-select-hidden|validate\[.*\]/gi,"")),R.major<4&&(n.classList.add("bs3"),n.parentNode.classList.contains("input-group")&&(n.previousElementSibling||n.nextElementSibling)&&(n.previousElementSibling||n.nextElementSibling).classList.contains("input-group-addon")&&n.classList.add("bs3-has-addon")),i=e?e.trim():o,"add"==t?i&&s.classList.add.apply(s.classList,i.split(" ")):"remove"==t?i&&s.classList.remove.apply(s.classList,i.split(" ")):(o&&s.classList.remove.apply(s.classList,o.split(" ")),i&&s.classList.add.apply(s.classList,i.split(" ")))},liHeight:function(e){if(e||!1!==this.options.size&&!Object.keys(this.sizeInfo).length){var t=document.createElement("div"),i=document.createElement("div"),s=document.createElement("div"),n=document.createElement("ul"),o=document.createElement("li"),r=document.createElement("li"),l=document.createElement("li"),a=document.createElement("a"),c=document.createElement("span"),d=this.options.header&&0this.sizeInfo.menuExtras.vert&&l+this.sizeInfo.menuExtras.vert+50>this.sizeInfo.selectOffsetBot,!0===this.selectpicker.isSearching&&(a=this.selectpicker.dropup),this.$newElement.toggleClass(V.DROPUP,a),this.selectpicker.dropup=a),"auto"===this.options.size)n=3this.options.size){for(var b=0;bthis.sizeInfo.menuInnerHeight&&(this.sizeInfo.hasScrollBar=!0,this.sizeInfo.totalMenuWidth=this.sizeInfo.menuWidth+this.sizeInfo.scrollBarWidth),"auto"===this.options.dropdownAlignRight&&this.$menu.toggleClass(V.MENURIGHT,this.sizeInfo.selectOffsetLeft>this.sizeInfo.selectOffsetRight&&this.sizeInfo.selectOffsetRightthis.options.size&&i.off("resize"+j+"."+this.selectId+".setMenuSize scroll"+j+"."+this.selectId+".setMenuSize")}this.createView(!1,!0,e)},setWidth:function(){var i=this;"auto"===this.options.width?requestAnimationFrame(function(){i.$menu.css("min-width","0"),i.$element.on("loaded"+j,function(){i.liHeight(),i.setMenuSize();var e=i.$newElement.clone().appendTo("body"),t=e.css("width","auto").children("button").outerWidth();e.remove(),i.sizeInfo.selectWidth=Math.max(i.sizeInfo.totalMenuWidth,t),i.$newElement.css("width",i.sizeInfo.selectWidth+"px")})}):"fit"===this.options.width?(this.$menu.css("min-width",""),this.$newElement.css("width","").addClass("fit-width")):this.options.width?(this.$menu.css("min-width",""),this.$newElement.css("width",this.options.width)):(this.$menu.css("min-width",""),this.$newElement.css("width","")),this.$newElement.hasClass("fit-width")&&"fit"!==this.options.width&&this.$newElement[0].classList.remove("fit-width")},selectPosition:function(){this.$bsContainer=z('
');function e(e){var t={},i=r.options.display||!!z.fn.dropdown.Constructor.Default&&z.fn.dropdown.Constructor.Default.display;r.$bsContainer.addClass(e.attr("class").replace(/form-control|fit-width/gi,"")).toggleClass(V.DROPUP,e.hasClass(V.DROPUP)),s=e.offset(),l.is("body")?n={top:0,left:0}:((n=l.offset()).top+=parseInt(l.css("borderTopWidth"))-l.scrollTop(),n.left+=parseInt(l.css("borderLeftWidth"))-l.scrollLeft()),o=e.hasClass(V.DROPUP)?0:e[0].offsetHeight,(R.major<4||"static"===i)&&(t.top=s.top-n.top+o,t.left=s.left-n.left),t.width=e[0].offsetWidth,r.$bsContainer.css(t)}var s,n,o,r=this,l=z(this.options.container);this.$button.on("click.bs.dropdown.data-api",function(){r.isDisabled()||(e(r.$newElement),r.$bsContainer.appendTo(r.options.container).toggleClass(V.SHOW,!r.$button.hasClass(V.SHOW)).append(r.$menu))}),z(window).off("resize"+j+"."+this.selectId+" scroll"+j+"."+this.selectId).on("resize"+j+"."+this.selectId+" scroll"+j+"."+this.selectId,function(){r.$newElement.hasClass(V.SHOW)&&e(r.$newElement)}),this.$element.on("hide"+j,function(){r.$menu.data("height",r.$menu.height()),r.$bsContainer.detach()})},setOptionStatus:function(e){var t=this;if(t.noScroll=!1,t.selectpicker.view.visibleElements&&t.selectpicker.view.visibleElements.length)for(var i=0;i
');y[2]&&($=$.replace("{var}",y[2][1"+$+"
")),d=!1,C.$element.trigger("maxReached"+j)),g&&w&&(E.append(z("
"+S+"
")),d=!1,C.$element.trigger("maxReachedGrp"+j)),setTimeout(function(){C.setSelected(r,!1)},10),E[0].classList.add("fadeOut"),setTimeout(function(){E.remove()},1050)}}}else c&&(c.selected=!1),h.selected=!0,C.setSelected(r,!0);!C.multiple||C.multiple&&1===C.options.maxOptions?C.$button.trigger("focus"):C.options.liveSearch&&C.$searchbox.trigger("focus"),d&&(!C.multiple&&a===s.selectedIndex||(A=[h.index,p.prop("selected"),l],C.$element.triggerNative("change")))}}),this.$menu.on("click","li."+V.DISABLED+" a, ."+V.POPOVERHEADER+", ."+V.POPOVERHEADER+" :not(.close)",function(e){e.currentTarget==this&&(e.preventDefault(),e.stopPropagation(),C.options.liveSearch&&!z(e.target).hasClass("close")?C.$searchbox.trigger("focus"):C.$button.trigger("focus"))}),this.$menuInner.on("click",".divider, .dropdown-header",function(e){e.preventDefault(),e.stopPropagation(),C.options.liveSearch?C.$searchbox.trigger("focus"):C.$button.trigger("focus")}),this.$menu.on("click","."+V.POPOVERHEADER+" .close",function(){C.$button.trigger("click")}),this.$searchbox.on("click",function(e){e.stopPropagation()}),this.$menu.on("click",".actions-btn",function(e){C.options.liveSearch?C.$searchbox.trigger("focus"):C.$button.trigger("focus"),e.preventDefault(),e.stopPropagation(),z(this).hasClass("bs-select-all")?C.selectAll():C.deselectAll()}),this.$element.on("change"+j,function(){C.render(),C.$element.trigger("changed"+j,A),A=null}).on("focus"+j,function(){C.options.mobile||C.$button.trigger("focus")})},liveSearchListener:function(){var u=this,f=document.createElement("li");this.$button.on("click.bs.dropdown.data-api",function(){u.$searchbox.val()&&u.$searchbox.val("")}),this.$searchbox.on("click.bs.dropdown.data-api focus.bs.dropdown.data-api touchend.bs.dropdown.data-api",function(e){e.stopPropagation()}),this.$searchbox.on("input propertychange",function(){var e=u.$searchbox.val();if(u.selectpicker.search.elements=[],u.selectpicker.search.data=[],e){var t=[],i=e.toUpperCase(),s={},n=[],o=u._searchStyle(),r=u.options.liveSearchNormalize;r&&(i=w(i));for(var l=0;l=a.selectpicker.view.canHighlight.length&&(t=0),a.selectpicker.view.canHighlight[t+f]||(t=t+1+a.selectpicker.view.canHighlight.slice(t+f+1).indexOf(!0))),e.preventDefault();var m=f+t;e.which===B?0===f&&t===c.length-1?(a.$menuInner[0].scrollTop=a.$menuInner[0].scrollHeight,m=a.selectpicker.current.elements.length-1):d=(o=(n=a.selectpicker.current.data[m]).position-n.height)u+a.sizeInfo.menuInnerHeight),s=a.selectpicker.main.elements[v],a.activeIndex=b[x],a.focusItem(s),s&&s.firstChild.focus(),d&&(a.$menuInner[0].scrollTop=o),r.trigger("focus")}}i&&(e.which===H&&!a.selectpicker.keydown.keyHistory||e.which===D||e.which===W&&a.options.selectOnTab)&&(e.which!==H&&e.preventDefault(),a.options.liveSearch&&e.which===H||(a.$menuInner.find(".active a").trigger("click",!0),r.trigger("focus"),a.options.liveSearch||(e.preventDefault(),z(document).data("spaceSelect",!0))))}},mobile:function(){this.$element[0].classList.add("mobile-device")},refresh:function(){var e=z.extend({},this.options,this.$element.data());this.options=e,this.checkDisabled(),this.setStyle(),this.render(),this.buildData(),this.buildList(),this.setWidth(),this.setSize(!0),this.$element.trigger("refreshed"+j)},hide:function(){this.$newElement.hide()},show:function(){this.$newElement.show()},remove:function(){this.$newElement.remove(),this.$element.remove()},destroy:function(){this.$newElement.before(this.$element).remove(),this.$bsContainer?this.$bsContainer.remove():this.$menu.remove(),this.$element.off(j).removeData("selectpicker").removeClass("bs-select-hidden selectpicker"),z(window).off(j+"."+this.selectId)}};var J=z.fn.selectpicker;z.fn.selectpicker=Z,z.fn.selectpicker.Constructor=Y,z.fn.selectpicker.noConflict=function(){return z.fn.selectpicker=J,this};var Q=z.fn.dropdown.Constructor._dataApiKeydownHandler||z.fn.dropdown.Constructor.prototype.keydown;z(document).off("keydown.bs.dropdown.data-api").on("keydown.bs.dropdown.data-api",':not(.bootstrap-select) > [data-toggle="dropdown"]',Q).on("keydown.bs.dropdown.data-api",":not(.bootstrap-select) > .dropdown-menu",Q).on("keydown"+j,'.bootstrap-select [data-toggle="dropdown"], .bootstrap-select [role="listbox"], .bootstrap-select .bs-searchbox input',Y.prototype.keydown).on("focusin.modal",'.bootstrap-select [data-toggle="dropdown"], .bootstrap-select [role="listbox"], .bootstrap-select .bs-searchbox input',function(e){e.stopPropagation()}),z(window).on("load"+j+".data-api",function(){z(".selectpicker").each(function(){var e=z(this);Z.call(e,e.data())})})}(e)}); +//# sourceMappingURL=bootstrap-select.min.js.map \ No newline at end of file diff --git a/plexpy/graphs.py b/plexpy/graphs.py index 49dfee57..58a199c0 100644 --- a/plexpy/graphs.py +++ b/plexpy/graphs.py @@ -51,11 +51,7 @@ class Graphs(object): time_range = helpers.cast_to_int(time_range) or 30 timestamp = helpers.timestamp() - time_range * 24 * 60 * 60 - user_cond = '' - if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() - elif user_id and user_id.isdigit(): - user_cond = 'AND session_history.user_id = %s ' % user_id + user_cond = self._make_user_cond(user_id) if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -171,11 +167,7 @@ class Graphs(object): time_range = helpers.cast_to_int(time_range) or 30 timestamp = helpers.timestamp() - time_range * 24 * 60 * 60 - user_cond = '' - if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = "AND session_history.user_id = %s " % session.get_session_user_id() - elif user_id and user_id.isdigit(): - user_cond = "AND session_history.user_id = %s " % user_id + user_cond = self._make_user_cond(user_id) if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -308,11 +300,7 @@ class Graphs(object): time_range = helpers.cast_to_int(time_range) or 30 timestamp = helpers.timestamp() - time_range * 24 * 60 * 60 - user_cond = '' - if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() - elif user_id and user_id.isdigit(): - user_cond = 'AND session_history.user_id = %s ' % user_id + user_cond = self._make_user_cond(user_id) if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -427,11 +415,7 @@ class Graphs(object): time_range = helpers.cast_to_int(time_range) or 12 timestamp = arrow.get(helpers.timestamp()).shift(months=-time_range).floor('month').timestamp() - user_cond = '' - if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() - elif user_id and user_id.isdigit(): - user_cond = 'AND session_history.user_id = %s ' % user_id + user_cond = self._make_user_cond(user_id) if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -554,11 +538,7 @@ class Graphs(object): time_range = helpers.cast_to_int(time_range) or 30 timestamp = helpers.timestamp() - time_range * 24 * 60 * 60 - user_cond = '' - if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() - elif user_id and user_id.isdigit(): - user_cond = 'AND session_history.user_id = %s ' % user_id + user_cond = self._make_user_cond(user_id) if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -653,11 +633,7 @@ class Graphs(object): time_range = helpers.cast_to_int(time_range) or 30 timestamp = helpers.timestamp() - time_range * 24 * 60 * 60 - user_cond = '' - if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() - elif user_id and user_id.isdigit(): - user_cond = 'AND session_history.user_id = %s ' % user_id + user_cond = self._make_user_cond(user_id) if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -763,11 +739,7 @@ class Graphs(object): time_range = helpers.cast_to_int(time_range) or 30 timestamp = helpers.timestamp() - time_range * 24 * 60 * 60 - user_cond = '' - if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() - elif user_id and user_id.isdigit(): - user_cond = 'AND session_history.user_id = %s ' % user_id + user_cond = self._make_user_cond(user_id) if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -860,11 +832,7 @@ class Graphs(object): time_range = helpers.cast_to_int(time_range) or 30 timestamp = helpers.timestamp() - time_range * 24 * 60 * 60 - user_cond = '' - if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() - elif user_id and user_id.isdigit(): - user_cond = 'AND session_history.user_id = %s ' % user_id + user_cond = self._make_user_cond(user_id) if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -941,11 +909,7 @@ class Graphs(object): time_range = helpers.cast_to_int(time_range) or 30 timestamp = helpers.timestamp() - time_range * 24 * 60 * 60 - user_cond = '' - if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() - elif user_id and user_id.isdigit(): - user_cond = 'AND session_history.user_id = %s ' % user_id + user_cond = self._make_user_cond(user_id) if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -1048,11 +1012,7 @@ class Graphs(object): time_range = helpers.cast_to_int(time_range) or 30 timestamp = helpers.timestamp() - time_range * 24 * 60 * 60 - user_cond = '' - if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() - elif user_id and user_id.isdigit(): - user_cond = 'AND session_history.user_id = %s ' % user_id + user_cond = self._make_user_cond(user_id) if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -1128,11 +1088,7 @@ class Graphs(object): time_range = helpers.cast_to_int(time_range) or 30 timestamp = helpers.timestamp() - time_range * 24 * 60 * 60 - user_cond = '' - if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() - elif user_id and user_id.isdigit(): - user_cond = 'AND session_history.user_id = %s ' % user_id + user_cond = self._make_user_cond(user_id) if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -1212,3 +1168,16 @@ class Graphs(object): 'series': [series_1_output, series_2_output, series_3_output]} return output + + def _make_user_cond(self, user_id): + """ + Expects user_id to be a comma-separated list of ints. + """ + user_cond = '' + if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): + user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() + elif user_id: + user_ids = helpers.split_strip(user_id) + if all(id.isdigit() for id in user_ids): + user_cond = 'AND session_history.user_id IN (%s) ' % ','.join(user_ids) + return user_cond diff --git a/plexpy/webserve.py b/plexpy/webserve.py index 06a9a802..7549aefa 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -2258,7 +2258,7 @@ class WebInterface(object): Optional parameters: time_range (str): The number of days of data to return y_axis (str): "plays" or "duration" - user_id (str): The user id to filter the data + user_id (str): Comma separated list of user id to filter the data grouping (int): 0 or 1 Returns: @@ -2302,7 +2302,7 @@ class WebInterface(object): Optional parameters: time_range (str): The number of days of data to return y_axis (str): "plays" or "duration" - user_id (str): The user id to filter the data + user_id (str): Comma separated list of user id to filter the data grouping (int): 0 or 1 Returns: @@ -2346,7 +2346,7 @@ class WebInterface(object): Optional parameters: time_range (str): The number of days of data to return y_axis (str): "plays" or "duration" - user_id (str): The user id to filter the data + user_id (str): Comma separated list of user id to filter the data grouping (int): 0 or 1 Returns: @@ -2390,7 +2390,7 @@ class WebInterface(object): Optional parameters: time_range (str): The number of months of data to return y_axis (str): "plays" or "duration" - user_id (str): The user id to filter the data + user_id (str): Comma separated list of user id to filter the data grouping (int): 0 or 1 Returns: @@ -2434,7 +2434,7 @@ class WebInterface(object): Optional parameters: time_range (str): The number of days of data to return y_axis (str): "plays" or "duration" - user_id (str): The user id to filter the data + user_id (str): Comma separated list of user id to filter the data grouping (int): 0 or 1 Returns: @@ -2478,7 +2478,7 @@ class WebInterface(object): Optional parameters: time_range (str): The number of days of data to return y_axis (str): "plays" or "duration" - user_id (str): The user id to filter the data + user_id (str): Comma separated list of user id to filter the data grouping (int): 0 or 1 Returns: @@ -2522,7 +2522,7 @@ class WebInterface(object): Optional parameters: time_range (str): The number of days of data to return y_axis (str): "plays" or "duration" - user_id (str): The user id to filter the data + user_id (str): Comma separated list of user id to filter the data grouping (int): 0 or 1 Returns: @@ -2565,7 +2565,7 @@ class WebInterface(object): Optional parameters: time_range (str): The number of days of data to return y_axis (str): "plays" or "duration" - user_id (str): The user id to filter the data + user_id (str): Comma separated list of user id to filter the data grouping (int): 0 or 1 Returns: @@ -2608,7 +2608,7 @@ class WebInterface(object): Optional parameters: time_range (str): The number of days of data to return y_axis (str): "plays" or "duration" - user_id (str): The user id to filter the data + user_id (str): Comma separated list of user id to filter the data grouping (int): 0 or 1 Returns: @@ -2651,7 +2651,7 @@ class WebInterface(object): Optional parameters: time_range (str): The number of days of data to return y_axis (str): "plays" or "duration" - user_id (str): The user id to filter the data + user_id (str): Comma separated list of user id to filter the data grouping (int): 0 or 1 Returns: @@ -2694,7 +2694,7 @@ class WebInterface(object): Optional parameters: time_range (str): The number of days of data to return y_axis (str): "plays" or "duration" - user_id (str): The user id to filter the data + user_id (str): Comma separated list of user id to filter the data grouping (int): 0 or 1 Returns: From d91e561a56a14728970b3f581a3329a40cff0abf Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 7 Jul 2023 17:47:38 -0700 Subject: [PATCH 023/361] Regroup history in separate thread and improve logging --- data/interfaces/default/settings.html | 4 ++-- plexpy/activity_processor.py | 12 +++++++++++- plexpy/webserve.py | 8 +++----- 3 files changed, 16 insertions(+), 8 deletions(-) diff --git a/data/interfaces/default/settings.html b/data/interfaces/default/settings.html index 7cd614e0..c5d8fe37 100644 --- a/data/interfaces/default/settings.html +++ b/data/interfaces/default/settings.html @@ -2498,9 +2498,9 @@ $(document).ready(function() { }); $("#regroup_history").click(function () { - var msg = 'Are you sure you want to regroup play history in the database?'; + var msg = 'Are you sure you want to regroup play history in the database?

This make take a long time for large databases.
Regrouping will continue in the background.
'; var url = 'regroup_history'; - confirmAjaxCall(url, msg, null, 'Regrouping play history...'); + confirmAjaxCall(url, msg); }); $("#delete_temp_sessions").click(function () { diff --git a/plexpy/activity_processor.py b/plexpy/activity_processor.py index b1558a56..0437d2d5 100644 --- a/plexpy/activity_processor.py +++ b/plexpy/activity_processor.py @@ -719,8 +719,14 @@ class ActivityProcessor(object): "JOIN session_history_metadata ON session_history.id = session_history_metadata.id" ) results = self.db.select(query) + count = len(results) + progress = 0 + + for i, session in enumerate(results, start=1): + if int(i / count * 10) > progress: + progress = int(i / count * 10) + logger.info("Tautulli ActivityProcessor :: Regrouping session history: %d%%", progress * 10) - for session in results: try: self.group_history(session['id'], session) except Exception as e: @@ -729,3 +735,7 @@ class ActivityProcessor(object): logger.info("Tautulli ActivityProcessor :: Regrouping session history complete.") return True + + +def regroup_history(): + ActivityProcessor().regroup_history() diff --git a/plexpy/webserve.py b/plexpy/webserve.py index 7549aefa..b98c9e7c 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -443,12 +443,10 @@ class WebInterface(object): def regroup_history(self, **kwargs): """ Regroup play history in the database.""" - result = activity_processor.ActivityProcessor().regroup_history() + threading.Thread(target=activity_processor.regroup_history).start() - if result: - return {'result': 'success', 'message': 'Regrouped play history.'} - else: - return {'result': 'error', 'message': 'Regrouping play history failed.'} + return {'result': 'success', + 'message': 'Regrouping play history started. Check the logs to monitor any problems.'} @cherrypy.expose @cherrypy.tools.json_out() From 6010e406c817ecfb11d873824e0adcc22f5bcc80 Mon Sep 17 00:00:00 2001 From: David Pooley Date: Sun, 9 Jul 2023 00:32:42 +0100 Subject: [PATCH 024/361] Fix simultaneous streams per IP not behaving as expected with IPv6 (#2096) * Fix IPv6 comparisson for concurrent streams * Update regex to allow numbers in config variables * Remove additional logging for local testing * Update plexpy/notification_handler.py Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> --------- Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> --- plexpy/config.py | 3 ++- plexpy/helpers.py | 16 ++++++++++++++++ plexpy/notification_handler.py | 9 ++++++++- 3 files changed, 26 insertions(+), 2 deletions(-) diff --git a/plexpy/config.py b/plexpy/config.py index 7b583d8d..6f2926d9 100644 --- a/plexpy/config.py +++ b/plexpy/config.py @@ -177,6 +177,7 @@ _CONFIG_DEFINITIONS = { 'NOTIFY_RECENTLY_ADDED_UPGRADE': (int, 'Monitoring', 0), 'NOTIFY_REMOTE_ACCESS_THRESHOLD': (int, 'Monitoring', 60), 'NOTIFY_CONCURRENT_BY_IP': (int, 'Monitoring', 0), + 'NOTIFY_CONCURRENT_IPV6_CIDR': (str, 'Monitoring', '/64'), 'NOTIFY_CONCURRENT_THRESHOLD': (int, 'Monitoring', 2), 'NOTIFY_NEW_DEVICE_INITIAL_ONLY': (int, 'Monitoring', 1), 'NOTIFY_SERVER_CONNECTION_THRESHOLD': (int, 'Monitoring', 60), @@ -536,7 +537,7 @@ class Config(object): Returns something from the ini unless it is a real property of the configuration object or is not all caps. """ - if not re.match(r'[A-Z_]+$', name): + if not re.match(r'[A-Z0-9_]+$', name): return super(Config, self).__getattr__(name) else: return self.check_setting(name) diff --git a/plexpy/helpers.py b/plexpy/helpers.py index 9cfb9c45..085dfc12 100644 --- a/plexpy/helpers.py +++ b/plexpy/helpers.py @@ -33,6 +33,7 @@ from functools import reduce, wraps import hashlib import imghdr from future.moves.itertools import islice, zip_longest +from ipaddress import ip_address, ip_network, IPv4Address import ipwhois import ipwhois.exceptions import ipwhois.utils @@ -1777,3 +1778,18 @@ def check_watched(media_type, view_offset, duration, marker_credits_first=None, def pms_name(): return plexpy.CONFIG.PMS_NAME_OVERRIDE or plexpy.CONFIG.PMS_NAME + + +def ip_type(ip: str) -> str: + try: + return "IPv4" if type(ip_address(ip)) is IPv4Address else "IPv6" + except ValueError: + return "Invalid" + + +def get_ipv6_network_address(ip: str) -> str: + cidr = "/64" + cidr_pattern = re.compile(r'^/(1([0-1]\d|2[0-8]))$|^/(\d\d)$|^/[1-9]$') + if cidr_pattern.match(plexpy.CONFIG.NOTIFY_CONCURRENT_IPV6_CIDR): + cidr = plexpy.CONFIG.NOTIFY_CONCURRENT_IPV6_CIDR + return str(ip_network(ip+cidr, strict=False).network_address) diff --git a/plexpy/notification_handler.py b/plexpy/notification_handler.py index 7dd81627..8b4b8583 100644 --- a/plexpy/notification_handler.py +++ b/plexpy/notification_handler.py @@ -160,6 +160,7 @@ def add_notifier_each(notifier_id=None, notify_action=None, stream_data=None, ti def notify_conditions(notify_action=None, stream_data=None, timeline_data=None, **kwargs): logger.debug("Tautulli NotificationHandler :: Checking global notification conditions.") + evaluated = False # Activity notifications if stream_data: @@ -187,7 +188,13 @@ def notify_conditions(notify_action=None, stream_data=None, timeline_data=None, user_sessions = [s for s in result['sessions'] if s['user_id'] == stream_data['user_id']] if plexpy.CONFIG.NOTIFY_CONCURRENT_BY_IP: - evaluated = len(Counter(s['ip_address'] for s in user_sessions)) >= plexpy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD + ip_addresses = set() + for s in user_sessions: + if helpers.ip_type(s['ip_address']) == 'IPv6': + ip_addresses.add(helpers.get_ipv6_network_address(s['ip_address'])) + elif helpers.ip_type(s['ip_address']) == 'IPv4': + ip_addresses.add(s['ip_address']) + evaluated = len(ip_addresses) >= plexpy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD else: evaluated = len(user_sessions) >= plexpy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD From 571a6b6d2df91d209907800af3f1b5c7356e2577 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Mon, 10 Jul 2023 08:56:27 -0700 Subject: [PATCH 025/361] Cast view_offset to int for regrouping history --- plexpy/activity_processor.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plexpy/activity_processor.py b/plexpy/activity_processor.py index 0437d2d5..9115f332 100644 --- a/plexpy/activity_processor.py +++ b/plexpy/activity_processor.py @@ -519,12 +519,12 @@ class ActivityProcessor(object): if len(result) > 1: new_session = {'id': result[0]['id'], 'rating_key': result[0]['rating_key'], - 'view_offset': result[0]['view_offset'], + 'view_offset': helpers.cast_to_int(result[0]['view_offset']), 'reference_id': result[0]['reference_id']} prev_session = {'id': result[1]['id'], 'rating_key': result[1]['rating_key'], - 'view_offset': result[1]['view_offset'], + 'view_offset': helpers.cast_to_int(result[1]['view_offset']), 'reference_id': result[1]['reference_id']} if metadata: From b953b951fb46400f7d30a22cdc2791faa8d6e233 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 13 Jul 2023 15:50:39 -0700 Subject: [PATCH 026/361] v2.12.6 --- CHANGELOG.md | 17 +++++++++++++++++ plexpy/version.py | 2 +- 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 24baf072..974e69ce 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,22 @@ # Changelog +## v2.12.5 (2023-07-13) + +* Activity: + * New: Added d3d11va to list of hardware decoders. +* History: + * Fix: Incorrect grouping of play history. + * New: Added button in settings to regroup play history. +* Notifications: + * Fix: Incorrect concurrent streams notifications by IP addresss for IPv6 addresses (#2096) (Thanks @pooley182) +* UI: + * Fix: Occasional UI crashing on Python 3.11. + * New: Added multiselect user filters to History and Graphs pages. (#2090) (Thanks @zdimension) +* API: + * New: Added regroup_history API command. + * Change: Updated graph API commands to accept a comma separated list of user IDs. + + ## v2.12.4 (2023-05-23) * History: diff --git a/plexpy/version.py b/plexpy/version.py index 119e0b07..116f4687 100644 --- a/plexpy/version.py +++ b/plexpy/version.py @@ -18,4 +18,4 @@ from __future__ import unicode_literals PLEXPY_BRANCH = "master" -PLEXPY_RELEASE_VERSION = "v2.12.4" \ No newline at end of file +PLEXPY_RELEASE_VERSION = "v2.12.5" \ No newline at end of file From 765804c93b8304629b19140f1ed58b7dbace1c4e Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 20 Jul 2023 14:19:05 -0700 Subject: [PATCH 027/361] Don't expose do_state_change --- plexpy/webserve.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/plexpy/webserve.py b/plexpy/webserve.py index b98c9e7c..b643f84b 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -4302,8 +4302,6 @@ class WebInterface(object): return update - @cherrypy.expose - @requireAuth(member_of("admin")) def do_state_change(self, signal, title, timer, **kwargs): message = title quote = self.random_arnold_quotes() From d701d18a813246eff84034205c6ced99f7c09c63 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 27 Jul 2023 20:04:03 -0700 Subject: [PATCH 028/361] Update workflows action version refs --- .github/workflows/publish-installers.yml | 8 ++++---- .github/workflows/publish-snap.yml | 2 +- .github/workflows/pull-requests.yml | 1 - 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/.github/workflows/publish-installers.yml b/.github/workflows/publish-installers.yml index 49d53233..0b6eec36 100644 --- a/.github/workflows/publish-installers.yml +++ b/.github/workflows/publish-installers.yml @@ -68,7 +68,7 @@ jobs: pyinstaller -y ./package/Tautulli-${{ matrix.os }}.spec - name: Create Windows Installer - uses: joncloud/makensis-action@v3.7 + uses: joncloud/makensis-action@v4 if: matrix.os == 'windows' with: script-file: ./package/Tautulli.nsi @@ -100,10 +100,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Get Build Job Status - uses: technote-space/workflow-conclusion-action@v3.0 + uses: technote-space/workflow-conclusion-action@v3 - name: Checkout Code - uses: actions/checkout@v3.2.0 + uses: actions/checkout@v3 - name: Set Release Version id: get_version @@ -168,7 +168,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Get Build Job Status - uses: technote-space/workflow-conclusion-action@v3.0 + uses: technote-space/workflow-conclusion-action@v3 - name: Combine Job Status id: status diff --git a/.github/workflows/publish-snap.yml b/.github/workflows/publish-snap.yml index 9df4d2fd..dd74c3a3 100644 --- a/.github/workflows/publish-snap.yml +++ b/.github/workflows/publish-snap.yml @@ -70,7 +70,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Get Build Job Status - uses: technote-space/workflow-conclusion-action@v3.0 + uses: technote-space/workflow-conclusion-action@v3 - name: Combine Job Status id: status diff --git a/.github/workflows/pull-requests.yml b/.github/workflows/pull-requests.yml index 58cb4ee4..1a24cf24 100644 --- a/.github/workflows/pull-requests.yml +++ b/.github/workflows/pull-requests.yml @@ -18,7 +18,6 @@ jobs: with: message: Pull requests must be made to the `nightly` branch. Thanks. repo-token: ${{ secrets.GITHUB_TOKEN }} - repo-token-user-login: 'github-actions[bot]' - name: Fail Workflow if: github.base_ref != 'nightly' From b984a99d512b32795031fd6ed3816afe11d516db Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 27 Jul 2023 20:04:27 -0700 Subject: [PATCH 029/361] Update workflows action version refs --- .github/workflows/publish-docker.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish-docker.yml b/.github/workflows/publish-docker.yml index 6480575f..6d91bbf6 100644 --- a/.github/workflows/publish-docker.yml +++ b/.github/workflows/publish-docker.yml @@ -95,7 +95,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Get Build Job Status - uses: technote-space/workflow-conclusion-action@v3.0 + uses: technote-space/workflow-conclusion-action@v3 - name: Combine Job Status id: status From e2cb15ef49ed6df5493b35e95f51835829879446 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Wed, 2 Aug 2023 16:51:20 -0700 Subject: [PATCH 030/361] Add notification image option for iOS Tautulli Remote App --- plexpy/notifiers.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py index a2fa6341..2611aaea 100644 --- a/plexpy/notifiers.py +++ b/plexpy/notifiers.py @@ -3967,6 +3967,14 @@ class TAUTULLIREMOTEAPP(Notifier): 2: 'Large image (Non-expandable text)' } }) + elif platform == 'ios': + config_option.append({ + 'label': 'Include Poster Image', + 'value': self.config['notification_type'], + 'name': 'remoteapp_notification_type', + 'description': 'Include a poster with the notifications.', + 'input_type': 'checkbox' + }) return config_option From 842e36485a6b4e718ad870cf0024c23bdc7d8d16 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 15 Aug 2023 08:42:01 -0700 Subject: [PATCH 031/361] Bump actions/stale from 7 to 8 (#2025) Bumps [actions/stale](https://github.com/actions/stale) from 7 to 8. - [Release notes](https://github.com/actions/stale/releases) - [Changelog](https://github.com/actions/stale/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/stale/compare/v7...v8) --- updated-dependencies: - dependency-name: actions/stale dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> [skip ci] --- .github/workflows/issues-stale.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/issues-stale.yml b/.github/workflows/issues-stale.yml index 0643cb0a..26b8aa5f 100644 --- a/.github/workflows/issues-stale.yml +++ b/.github/workflows/issues-stale.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Stale - uses: actions/stale@v7 + uses: actions/stale@v8 with: stale-issue-message: > This issue is stale because it has been open for 30 days with no activity. @@ -30,7 +30,7 @@ jobs: days-before-close: 5 - name: Invalid Template - uses: actions/stale@v7 + uses: actions/stale@v8 with: stale-issue-message: > Invalid issues template. From 31543d267f4f793689be599fa66a1ce17879bab6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Aug 2023 21:19:39 -0700 Subject: [PATCH 032/361] Bump pywin32 from 305 to 306 (#2028) Bumps [pywin32](https://github.com/mhammond/pywin32) from 305 to 306. - [Release notes](https://github.com/mhammond/pywin32/releases) - [Changelog](https://github.com/mhammond/pywin32/blob/main/CHANGES.txt) - [Commits](https://github.com/mhammond/pywin32/commits) --- updated-dependencies: - dependency-name: pywin32 dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> [skip ci] --- package/requirements-package.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package/requirements-package.txt b/package/requirements-package.txt index e8ccd0b8..d3c6d57b 100644 --- a/package/requirements-package.txt +++ b/package/requirements-package.txt @@ -8,4 +8,4 @@ pycryptodomex==3.17 pyobjc-core==9.0.1; platform_system == "Darwin" pyobjc-framework-Cocoa==9.0.1; platform_system == "Darwin" -pywin32==305; platform_system == "Windows" +pywin32==306; platform_system == "Windows" From 6fa8bb376828cdb954ecf08eda804d70747e7502 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Aug 2023 21:20:05 -0700 Subject: [PATCH 033/361] Bump pyopenssl from 23.0.0 to 23.2.0 (#2081) Bumps [pyopenssl](https://github.com/pyca/pyopenssl) from 23.0.0 to 23.2.0. - [Changelog](https://github.com/pyca/pyopenssl/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/pyopenssl/compare/23.0.0...23.2.0) --- updated-dependencies: - dependency-name: pyopenssl dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> [skip ci] --- package/requirements-package.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package/requirements-package.txt b/package/requirements-package.txt index d3c6d57b..f432c59b 100644 --- a/package/requirements-package.txt +++ b/package/requirements-package.txt @@ -2,7 +2,7 @@ apscheduler==3.10.1 importlib-metadata==6.0.0 importlib-resources==5.12.0 pyinstaller==5.8.0 -pyopenssl==23.0.0 +pyopenssl==23.2.0 pycryptodomex==3.17 pyobjc-core==9.0.1; platform_system == "Darwin" From b7c0b104e94b9ea77a6230192d67c22d0ab08325 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Aug 2023 21:20:54 -0700 Subject: [PATCH 034/361] Bump pycryptodomex from 3.17 to 3.18.0 (#2076) Bumps [pycryptodomex](https://github.com/Legrandin/pycryptodome) from 3.17 to 3.18.0. - [Release notes](https://github.com/Legrandin/pycryptodome/releases) - [Changelog](https://github.com/Legrandin/pycryptodome/blob/master/Changelog.rst) - [Commits](https://github.com/Legrandin/pycryptodome/compare/v3.17.0...v3.18.0) --- updated-dependencies: - dependency-name: pycryptodomex dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci] --- package/requirements-package.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package/requirements-package.txt b/package/requirements-package.txt index f432c59b..ee3cc800 100644 --- a/package/requirements-package.txt +++ b/package/requirements-package.txt @@ -3,7 +3,7 @@ importlib-metadata==6.0.0 importlib-resources==5.12.0 pyinstaller==5.8.0 pyopenssl==23.2.0 -pycryptodomex==3.17 +pycryptodomex==3.18.0 pyobjc-core==9.0.1; platform_system == "Darwin" pyobjc-framework-Cocoa==9.0.1; platform_system == "Darwin" From a84b5b51ed23354cff8903b0dab038fa384b6a32 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Aug 2023 21:21:16 -0700 Subject: [PATCH 035/361] Bump pyobjc-core from 9.0.1 to 9.2 (#2082) Bumps [pyobjc-core](https://github.com/ronaldoussoren/pyobjc) from 9.0.1 to 9.2. - [Release notes](https://github.com/ronaldoussoren/pyobjc/releases) - [Changelog](https://github.com/ronaldoussoren/pyobjc/blob/master/docs/changelog.rst) - [Commits](https://github.com/ronaldoussoren/pyobjc/compare/v9.0.1...v9.2) --- updated-dependencies: - dependency-name: pyobjc-core dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> [skip ci] --- package/requirements-package.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package/requirements-package.txt b/package/requirements-package.txt index ee3cc800..70cd4cb0 100644 --- a/package/requirements-package.txt +++ b/package/requirements-package.txt @@ -5,7 +5,7 @@ pyinstaller==5.8.0 pyopenssl==23.2.0 pycryptodomex==3.18.0 -pyobjc-core==9.0.1; platform_system == "Darwin" +pyobjc-core==9.2; platform_system == "Darwin" pyobjc-framework-Cocoa==9.0.1; platform_system == "Darwin" pywin32==306; platform_system == "Windows" From e11a4c50bafacd0dad8ef192a6b9533a033d883b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Aug 2023 21:21:55 -0700 Subject: [PATCH 036/361] Bump pyobjc-framework-cocoa from 9.0.1 to 9.2 (#2083) Bumps [pyobjc-framework-cocoa](https://github.com/ronaldoussoren/pyobjc) from 9.0.1 to 9.2. - [Release notes](https://github.com/ronaldoussoren/pyobjc/releases) - [Changelog](https://github.com/ronaldoussoren/pyobjc/blob/master/docs/changelog.rst) - [Commits](https://github.com/ronaldoussoren/pyobjc/compare/v9.0.1...v9.2) --- updated-dependencies: - dependency-name: pyobjc-framework-cocoa dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci] --- package/requirements-package.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package/requirements-package.txt b/package/requirements-package.txt index 70cd4cb0..064d2246 100644 --- a/package/requirements-package.txt +++ b/package/requirements-package.txt @@ -6,6 +6,6 @@ pyopenssl==23.2.0 pycryptodomex==3.18.0 pyobjc-core==9.2; platform_system == "Darwin" -pyobjc-framework-Cocoa==9.0.1; platform_system == "Darwin" +pyobjc-framework-Cocoa==9.2; platform_system == "Darwin" pywin32==306; platform_system == "Windows" From f80cd739825051224a48ff4288cc9b148d32b7cd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Aug 2023 21:22:24 -0700 Subject: [PATCH 037/361] Bump importlib-metadata from 6.0.0 to 6.8.0 (#2112) Bumps [importlib-metadata](https://github.com/python/importlib_metadata) from 6.0.0 to 6.8.0. - [Release notes](https://github.com/python/importlib_metadata/releases) - [Changelog](https://github.com/python/importlib_metadata/blob/main/NEWS.rst) - [Commits](https://github.com/python/importlib_metadata/compare/v6.0.0...v6.8.0) --- updated-dependencies: - dependency-name: importlib-metadata dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> [skip ci] --- package/requirements-package.txt | 2 +- requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package/requirements-package.txt b/package/requirements-package.txt index 064d2246..075aa916 100644 --- a/package/requirements-package.txt +++ b/package/requirements-package.txt @@ -1,5 +1,5 @@ apscheduler==3.10.1 -importlib-metadata==6.0.0 +importlib-metadata==6.8.0 importlib-resources==5.12.0 pyinstaller==5.8.0 pyopenssl==23.2.0 diff --git a/requirements.txt b/requirements.txt index 49c32ff6..ee495cb7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -19,7 +19,7 @@ gntp==1.0.3 html5lib==1.1 httpagentparser==1.9.5 idna==3.4 -importlib-metadata==6.0.0 +importlib-metadata==6.8.0 importlib-resources==5.12.0 git+https://github.com/Tautulli/ipwhois.git@master#egg=ipwhois IPy==1.01 From a21fffd227617d17f3c184965797c0a864df433a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Aug 2023 21:23:35 -0700 Subject: [PATCH 038/361] Bump pyinstaller from 5.8.0 to 5.13.0 (#2114) Bumps [pyinstaller](https://github.com/pyinstaller/pyinstaller) from 5.8.0 to 5.13.0. - [Release notes](https://github.com/pyinstaller/pyinstaller/releases) - [Changelog](https://github.com/pyinstaller/pyinstaller/blob/develop/doc/CHANGES.rst) - [Commits](https://github.com/pyinstaller/pyinstaller/compare/v5.8.0...v5.13.0) --- updated-dependencies: - dependency-name: pyinstaller dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package/requirements-package.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package/requirements-package.txt b/package/requirements-package.txt index 075aa916..5847d597 100644 --- a/package/requirements-package.txt +++ b/package/requirements-package.txt @@ -1,7 +1,7 @@ apscheduler==3.10.1 importlib-metadata==6.8.0 importlib-resources==5.12.0 -pyinstaller==5.8.0 +pyinstaller==5.13.0 pyopenssl==23.2.0 pycryptodomex==3.18.0 From 835ea34bea88fa9f2fb31d26e210b75e42952b80 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Aug 2023 21:38:17 -0700 Subject: [PATCH 039/361] Bump pytz from 2022.7.1 to 2023.3 (#2031) * Bump pytz from 2022.7.1 to 2023.3 Bumps [pytz](https://github.com/stub42/pytz) from 2022.7.1 to 2023.3. - [Release notes](https://github.com/stub42/pytz/releases) - [Commits](https://github.com/stub42/pytz/compare/release_2022.7.1...release_2023.3) --- updated-dependencies: - dependency-name: pytz dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Update pytz==2023.3 --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci] --- lib/pytz/__init__.py | 5 +- lib/pytz/zoneinfo/Africa/Cairo | Bin 1955 -> 2399 bytes lib/pytz/zoneinfo/Africa/Casablanca | Bin 2429 -> 2429 bytes lib/pytz/zoneinfo/Africa/El_Aaiun | Bin 2295 -> 2295 bytes lib/pytz/zoneinfo/America/Godthab | Bin 1448 -> 1903 bytes lib/pytz/zoneinfo/America/Nuuk | Bin 1448 -> 1903 bytes lib/pytz/zoneinfo/America/Yellowknife | Bin 2136 -> 2332 bytes lib/pytz/zoneinfo/Asia/Gaza | Bin 2422 -> 3808 bytes lib/pytz/zoneinfo/Asia/Hebron | Bin 2450 -> 3836 bytes lib/pytz/zoneinfo/Egypt | Bin 1955 -> 2399 bytes lib/pytz/zoneinfo/Europe/Kirov | Bin 1139 -> 1185 bytes lib/pytz/zoneinfo/Europe/Volgograd | Bin 1151 -> 1193 bytes lib/pytz/zoneinfo/iso3166.tab | 2 +- lib/pytz/zoneinfo/leapseconds | 8 +- lib/pytz/zoneinfo/tzdata.zi | 130 ++++++++++++++++++++------ lib/pytz/zoneinfo/zone.tab | 29 +++--- lib/pytz/zoneinfo/zone1970.tab | 58 ++++++------ requirements.txt | 2 +- 18 files changed, 156 insertions(+), 78 deletions(-) diff --git a/lib/pytz/__init__.py b/lib/pytz/__init__.py index 11d2e2ca..98b66553 100644 --- a/lib/pytz/__init__.py +++ b/lib/pytz/__init__.py @@ -22,8 +22,8 @@ from pytz.tzfile import build_tzinfo # The IANA (nee Olson) database is updated several times a year. -OLSON_VERSION = '2022g' -VERSION = '2022.7.1' # pip compatible version number. +OLSON_VERSION = '2023c' +VERSION = '2023.3' # pip compatible version number. __version__ = VERSION OLSEN_VERSION = OLSON_VERSION # Old releases had this misspelling @@ -1311,7 +1311,6 @@ common_timezones = \ 'America/Whitehorse', 'America/Winnipeg', 'America/Yakutat', - 'America/Yellowknife', 'Antarctica/Casey', 'Antarctica/Davis', 'Antarctica/DumontDUrville', diff --git a/lib/pytz/zoneinfo/Africa/Cairo b/lib/pytz/zoneinfo/Africa/Cairo index d3f819623fc9ef90d327380fad15341ec1a0e202..dd538c65db6ed0a0e47feb7b6001640516958e19 100644 GIT binary patch delta 474 zcmZ9|ze@sP9LMqQ5()`|)}#g_C`?V1NTPf2+OCzEiJOxf*im=0fa4E=C(-Y4lMpd*9WH zyDl@G4=VF|E>~Y_YOQf?XYbC`daZ14REuiUD%e{m^J+Vnws*1xkxNGHd^|07r{-kA z7ZpX%q%8G#M5)y;_ujt6{?nH{xVOdOO;aA#Tk81YR+{B!rEBfqn$Tm`jNWY}x`v(i zfOVW7BY-v%9jZHBmPJr=u3^jBzuE+ezcM1{!)jJC}8WEX5MVEn`c7D#2O1qn=k3Z%47&TP?GTKadZk@7@9O*@5aZfP8je=4qA+lTA5vnFYQoY)<8nU<3g6DJ6&i delta 105 zcmew>^jBzuE+ez==>?Mw7;TxKM<{GAVEn`c7D#2O1qn=k3ZxsJESQ|iTFWd|2;>9l u&3}P>wpwQ2E+8LBKNkb?*@5a71NrQ}%=3#EOg81vWtP%kusM}Of)N0ak|(nO diff --git a/lib/pytz/zoneinfo/Africa/El_Aaiun b/lib/pytz/zoneinfo/Africa/El_Aaiun index 64f1b7694418a8c284febe195659c4dd53359b1a..066fbed008cf662455eeca2c012ab8cb5bf1731a 100644 GIT binary patch delta 105 zcmew^_+4-VBO|lRM1{#LjJC}8WEX69VVubX7I?s13lf-|z*5UxzDi;810XGE2IRBW vGOs!e?Nn7;TxKM<{G|VVubX7I?s13lf-|z*5WH@MOW{2S8e?5Xfh( uW#0T3$OqECT|hn?P@Nc%52PCw1NrQ}%=3#EOy0n*%Pgh8VDkfZaYg`5l_!q? diff --git a/lib/pytz/zoneinfo/America/Godthab b/lib/pytz/zoneinfo/America/Godthab index 883dfa055532c8319a833d80c82707b3ca5d4d5a..adb7934aadf5f45928ce67e82efa92ca92391703 100644 GIT binary patch delta 653 zcmb8sJ4nMo9LMooeNv^IgE(m?x3oEdD^)>o@7jjwFtB!9>t9o(dJ zDvCI{h=@3etAmR!PFg2%koy0nZX(oNF5k-yf%~K}^yX8|*?h)Oeq95+gNdum6Ki>@ z&I@}X)}+kfIc3L*vM+~}`zlfX)}qBnlM0PFTDmF9<^33~)YEb`=cl#3h%C-TWhpZz z*Yydxk@%G55k*!)qf}`<$*S*zs`uB@dVQeHle*lxyP)mb740;SXxBO>`@kmA>*@I! zPEAbt`K7P_@8LX)$?10cy(NZ|DN_k(2G1~0olhYI?oR%5__L(v(qr=n6bPVDKq29l zHBgA4P(dMsLI;Hq3MCX$D6~+Bp-@91hq7M5U4Rf9U_?RbmN`+7x@A@rv?zE{5Tjs5 qL5+eN?T|Yp(%ncWx{>xq`47ay{M6%W$_Pe+S}35aDP41yn)e%>`jLMC delta 176 zcmaFQw}M+IBq}q_hyeqz0YyL<1R{VK#OB$klfXE6B3t-mR(4evAKwrLT?1nv25Dzx c1QKA96azQEWIDhEGf@GTiCi{7JM4_O00VIpA^-pY diff --git a/lib/pytz/zoneinfo/America/Nuuk b/lib/pytz/zoneinfo/America/Nuuk index 883dfa055532c8319a833d80c82707b3ca5d4d5a..adb7934aadf5f45928ce67e82efa92ca92391703 100644 GIT binary patch delta 653 zcmb8sJ4nMo9LMooeNv^IgE(m?x3oEdD^)>o@7jjwFtB!9>t9o(dJ zDvCI{h=@3etAmR!PFg2%koy0nZX(oNF5k-yf%~K}^yX8|*?h)Oeq95+gNdum6Ki>@ z&I@}X)}+kfIc3L*vM+~}`zlfX)}qBnlM0PFTDmF9<^33~)YEb`=cl#3h%C-TWhpZz z*Yydxk@%G55k*!)qf}`<$*S*zs`uB@dVQeHle*lxyP)mb740;SXxBO>`@kmA>*@I! zPEAbt`K7P_@8LX)$?10cy(NZ|DN_k(2G1~0olhYI?oR%5__L(v(qr=n6bPVDKq29l zHBgA4P(dMsLI;Hq3MCX$D6~+Bp-@91hq7M5U4Rf9U_?RbmN`+7x@A@rv?zE{5Tjs5 qL5+eN?T|Yp(%ncWx{>xq`47ay{M6%W$_Pe+S}35aDP41yn)e%>`jLMC delta 176 zcmaFQw}M+IBq}q_hyeqz0YyL<1R{VK#OB$klfXE6B3t-mR(4evAKwrLT?1nv25Dzx c1QKA96azQEWIDhEGf@GTiCi{7JM4_O00VIpA^-pY diff --git a/lib/pytz/zoneinfo/America/Yellowknife b/lib/pytz/zoneinfo/America/Yellowknife index efc9ab037fea7aa699c70b59d011150043c51cd9..cd78a6f8be1dd55ac5afd25bbae39bd5706e42d1 100644 GIT binary patch delta 680 zcmca1Fh^*Da=nF_L&EBwJ#>5E=F10o+>UuF@aSI#6lB`n` zmYN=JSQh+TV7chhh84yq1y=k!#BjQkXTl}rF9nwpB^a)TJ{Gv9aA%{nAX7aPBNAk0 zLE<1w(2O07!@~dnr!HV%`2ThWX}bW<{3a80eE7I3xgGqsjdM delta 503 zcmbOubVFc*vP1_10|P4%i|o^qU^v~$GvSi*mx4=)5*v#Jnd+GtnV4Ay4!Xjv209Q3 z{{LUSfRP1Ea{T{4cLDU-o0K~yS>=FW0%?Q#0Bqk_pz&-gN2tXbM y(IC%)Xpo1&bfUy&K^8@pf%bJQRea3_3lx`8==d5M>KW=8Z~*|6j!`oJ diff --git a/lib/pytz/zoneinfo/Asia/Gaza b/lib/pytz/zoneinfo/Asia/Gaza index 22c47593d4698743bc60ad98e60e45e3c48da7b1..c9b2ff90823a5809d877fb68c10494a38ab183b1 100644 GIT binary patch delta 1402 zcmYMycT5vu7zXf^N6 zDp}*!%t9tjl|$qsqU5RAc}JkQ_?|`w?V1hA#V2$iqf8Al&7U{orQkfn%z_rSezjRh4_78D8%=rppf9EgcDAM zBTw8IM++>m!k4>Wi~M3xAT057nWW3-uAYc~(h46qS>+EW@AlWyO>&IR{##%yWyUTH zQU;cwkh=IM7E<@m6=|S%T#j7-a6Ixfra_+OZQ*`98F_lqVvz>v){$_=(ACH@8vnqV z7hREO=A7W}ekaoBvrAX|FffFDL&0!@KO=^s1RRhRRgXMd_zhn^{6eOiD*-toy&JdG z!8y}L!?_ELaPF_W6tioibnrD>2AuaP1ioH*5}(iVyCSue6r}G(;YNi9EU%erqfiywNPDb0@)h!$&^-9Q|5mtma|~QJ^E>)=J7$YCF#X9wUO%rL`GXez z2Q~~jhP=^OjDBOA(2s#x<&6ci#Sd-@^F*O3k3V!x18&G4mdr=~Xnhm>Nburr_d>rp z$pwDwaTtE=cZIuX0^D-+3*7R)2Yw=`H!-@NB=U!@bzS397^ZDr%zTzv8K{o|2tay4G)O-PfXo2XAaf?OZxsfb1LA|s0s$oRS^=Ow BCfNW0 diff --git a/lib/pytz/zoneinfo/Asia/Hebron b/lib/pytz/zoneinfo/Asia/Hebron index 0ee464803029ae4885814acae9e3291771401e62..64194fd85c676a5407878a197f6f9a011f2ff067 100644 GIT binary patch delta 1395 zcmX}sc}SCS7zgm5n=pFjLpXd91-YI)oy7R=Go31kF;0Sgo zaE+vL?_rU%=Sh*$QBk`OB| z+nh`bEUht!drXZyJt7R&^7}Harh>b57Wx^h0^v+W2%H%nqNa{|DD~o8TK^ zO5`_V7Qt2PSBo^L4xIwmD81lX|BrB;oPUJ%UXBv=>R-M=-pD?{jYgAzL6ao|3(b}M zslIhR0EL#MPTFIwaj%fKMU}(1L$u3%;Y@zI%~K1LL0p604ZN6{l#=nwo1_vb9; zHbsau7}(JY56&J$KB&@h>nFqJLq7EJkTBD{5VJPYt29G^jFnIhl8@4zaV9PoW zkp`AudU&|yIP&4qof2I0isfRS2jb#0QxH0zh*ncQgH)e1}f~0Lm;Q AHvj+t diff --git a/lib/pytz/zoneinfo/Egypt b/lib/pytz/zoneinfo/Egypt index d3f819623fc9ef90d327380fad15341ec1a0e202..dd538c65db6ed0a0e47feb7b6001640516958e19 100644 GIT binary patch delta 474 zcmZ9|ze@sP9LMqQ5()`|)}#g_C`?V1NTPf2+OCzEiJOxf*im=0fa4E=C(-Y4lMpd*9WH zyDl@G4=VF|E>~Y_YOQf?XYbC`daZ14REuiUD%e{m^J+Vnws*1xkxNGHd^|07r{-kA z7ZpX%q%8G#M5)y;_ujt6{?nH{xVOdOO;aA#Tk81YR+{B!rEBfqn$Tm`jNWY}x`v(i zfOVW7BY-v%9jZHBmPJr=u39=HHB_Oc0|Ch%}1J256C;t}zz?p*aw~ diff --git a/lib/pytz/zoneinfo/Europe/Volgograd b/lib/pytz/zoneinfo/Europe/Volgograd index 5539bac6ced7d584136fc705e8dcb80b300ad6b4..9d51a38c0ea719c5042837d47e0f427057b845f0 100644 GIT binary patch delta 180 zcmey*v66Fwv>+D)1ULdI5L;rSjw2%@$K*Ul9a8}Y1}y^y5e5be2M`Hj`}l@1Xd4&< nu?Y~HGWZ6&0I@gN3`Q`(X6ELnjIK-&Lv6@1lnZEwt}zz?S>6{H delta 138 zcmZ3<`JZEgv>*oq1ULdI5L;lQjw2%@%Vb9u9eE&M%YcE!$2WvQ+rSuzO@P=Gtez1F Zu<73Xo6(gCqFsZ4b}k#Bp?12)TmZ?I5as{? diff --git a/lib/pytz/zoneinfo/iso3166.tab b/lib/pytz/zoneinfo/iso3166.tab index 911af5e8..be3348d1 100644 --- a/lib/pytz/zoneinfo/iso3166.tab +++ b/lib/pytz/zoneinfo/iso3166.tab @@ -238,7 +238,7 @@ SY Syria SZ Eswatini (Swaziland) TC Turks & Caicos Is TD Chad -TF French Southern Territories +TF French S. Terr. TG Togo TH Thailand TJ Tajikistan diff --git a/lib/pytz/zoneinfo/leapseconds b/lib/pytz/zoneinfo/leapseconds index 6826ac4a..a6a170aa 100644 --- a/lib/pytz/zoneinfo/leapseconds +++ b/lib/pytz/zoneinfo/leapseconds @@ -72,11 +72,11 @@ Leap 2016 Dec 31 23:59:60 + S # Any additional leap seconds will come after this. # This Expires line is commented out for now, # so that pre-2020a zic implementations do not reject this file. -#Expires 2023 Jun 28 00:00:00 +#Expires 2023 Dec 28 00:00:00 # POSIX timestamps for the data in this file: #updated 1467936000 (2016-07-08 00:00:00 UTC) -#expires 1687910400 (2023-06-28 00:00:00 UTC) +#expires 1703721600 (2023-12-28 00:00:00 UTC) -# Updated through IERS Bulletin C64 -# File expires on: 28 June 2023 +# Updated through IERS Bulletin C65 +# File expires on: 28 December 2023 diff --git a/lib/pytz/zoneinfo/tzdata.zi b/lib/pytz/zoneinfo/tzdata.zi index 7c88530c..23d99be4 100644 --- a/lib/pytz/zoneinfo/tzdata.zi +++ b/lib/pytz/zoneinfo/tzdata.zi @@ -75,6 +75,8 @@ R K 2014 o - May 15 24 1 S R K 2014 o - Jun 26 24 0 - R K 2014 o - Jul 31 24 1 S R K 2014 o - S lastTh 24 0 - +R K 2023 ma - Ap lastF 0 1 S +R K 2023 ma - O lastTh 24 0 - Z Africa/Cairo 2:5:9 - LMT 1900 O 2 K EE%sT Z Africa/Bissau -1:2:20 - LMT 1912 Ja 1 1u @@ -172,7 +174,7 @@ R M 2021 o - May 16 2 0 - R M 2022 o - Mar 27 3 -1 - R M 2022 o - May 8 2 0 - R M 2023 o - Mar 19 3 -1 - -R M 2023 o - Ap 30 2 0 - +R M 2023 o - Ap 23 2 0 - R M 2024 o - Mar 10 3 -1 - R M 2024 o - Ap 14 2 0 - R M 2025 o - F 23 3 -1 - @@ -188,7 +190,7 @@ R M 2029 o - F 18 2 0 - R M 2029 o - D 30 3 -1 - R M 2030 o - F 10 2 0 - R M 2030 o - D 22 3 -1 - -R M 2031 o - F 2 2 0 - +R M 2031 o - Ja 26 2 0 - R M 2031 o - D 14 3 -1 - R M 2032 o - Ja 18 2 0 - R M 2032 o - N 28 3 -1 - @@ -204,7 +206,7 @@ R M 2036 o - N 23 2 0 - R M 2037 o - O 4 3 -1 - R M 2037 o - N 15 2 0 - R M 2038 o - S 26 3 -1 - -R M 2038 o - N 7 2 0 - +R M 2038 o - O 31 2 0 - R M 2039 o - S 18 3 -1 - R M 2039 o - O 23 2 0 - R M 2040 o - S 2 3 -1 - @@ -220,7 +222,7 @@ R M 2044 o - Au 28 2 0 - R M 2045 o - Jul 9 3 -1 - R M 2045 o - Au 20 2 0 - R M 2046 o - Jul 1 3 -1 - -R M 2046 o - Au 12 2 0 - +R M 2046 o - Au 5 2 0 - R M 2047 o - Jun 23 3 -1 - R M 2047 o - Jul 28 2 0 - R M 2048 o - Jun 7 3 -1 - @@ -236,7 +238,7 @@ R M 2052 o - Jun 2 2 0 - R M 2053 o - Ap 13 3 -1 - R M 2053 o - May 25 2 0 - R M 2054 o - Ap 5 3 -1 - -R M 2054 o - May 17 2 0 - +R M 2054 o - May 10 2 0 - R M 2055 o - Mar 28 3 -1 - R M 2055 o - May 2 2 0 - R M 2056 o - Mar 12 3 -1 - @@ -252,7 +254,7 @@ R M 2060 o - Mar 7 2 0 - R M 2061 o - Ja 16 3 -1 - R M 2061 o - F 27 2 0 - R M 2062 o - Ja 8 3 -1 - -R M 2062 o - F 19 2 0 - +R M 2062 o - F 12 2 0 - R M 2062 o - D 31 3 -1 - R M 2063 o - F 4 2 0 - R M 2063 o - D 16 3 -1 - @@ -268,7 +270,7 @@ R M 2067 o - D 11 2 0 - R M 2068 o - O 21 3 -1 - R M 2068 o - D 2 2 0 - R M 2069 o - O 13 3 -1 - -R M 2069 o - N 24 2 0 - +R M 2069 o - N 17 2 0 - R M 2070 o - O 5 3 -1 - R M 2070 o - N 9 2 0 - R M 2071 o - S 20 3 -1 - @@ -284,7 +286,7 @@ R M 2075 o - S 15 2 0 - R M 2076 o - Jul 26 3 -1 - R M 2076 o - S 6 2 0 - R M 2077 o - Jul 18 3 -1 - -R M 2077 o - Au 29 2 0 - +R M 2077 o - Au 22 2 0 - R M 2078 o - Jul 10 3 -1 - R M 2078 o - Au 14 2 0 - R M 2079 o - Jun 25 3 -1 - @@ -294,13 +296,13 @@ R M 2080 o - Jul 21 2 0 - R M 2081 o - Jun 1 3 -1 - R M 2081 o - Jul 13 2 0 - R M 2082 o - May 24 3 -1 - -R M 2082 o - Jul 5 2 0 - +R M 2082 o - Jun 28 2 0 - R M 2083 o - May 16 3 -1 - R M 2083 o - Jun 20 2 0 - R M 2084 o - Ap 30 3 -1 - R M 2084 o - Jun 11 2 0 - R M 2085 o - Ap 22 3 -1 - -R M 2085 o - Jun 3 2 0 - +R M 2085 o - May 27 2 0 - R M 2086 o - Ap 14 3 -1 - R M 2086 o - May 19 2 0 - R M 2087 o - Mar 30 3 -1 - @@ -997,8 +999,86 @@ R P 2020 2021 - Mar Sa<=30 0 1 S R P 2020 o - O 24 1 0 - R P 2021 o - O 29 1 0 - R P 2022 o - Mar 27 0 1 S -R P 2022 ma - O Sa<=30 2 0 - -R P 2023 ma - Mar Sa<=30 2 1 S +R P 2022 2035 - O Sa<=30 2 0 - +R P 2023 o - Ap 29 2 1 S +R P 2024 o - Ap 13 2 1 S +R P 2025 o - Ap 5 2 1 S +R P 2026 2054 - Mar Sa<=30 2 1 S +R P 2036 o - O 18 2 0 - +R P 2037 o - O 10 2 0 - +R P 2038 o - S 25 2 0 - +R P 2039 o - S 17 2 0 - +R P 2039 o - O 22 2 1 S +R P 2039 2067 - O Sa<=30 2 0 - +R P 2040 o - S 1 2 0 - +R P 2040 o - O 13 2 1 S +R P 2041 o - Au 24 2 0 - +R P 2041 o - S 28 2 1 S +R P 2042 o - Au 16 2 0 - +R P 2042 o - S 20 2 1 S +R P 2043 o - Au 1 2 0 - +R P 2043 o - S 12 2 1 S +R P 2044 o - Jul 23 2 0 - +R P 2044 o - Au 27 2 1 S +R P 2045 o - Jul 15 2 0 - +R P 2045 o - Au 19 2 1 S +R P 2046 o - Jun 30 2 0 - +R P 2046 o - Au 11 2 1 S +R P 2047 o - Jun 22 2 0 - +R P 2047 o - Jul 27 2 1 S +R P 2048 o - Jun 6 2 0 - +R P 2048 o - Jul 18 2 1 S +R P 2049 o - May 29 2 0 - +R P 2049 o - Jul 3 2 1 S +R P 2050 o - May 21 2 0 - +R P 2050 o - Jun 25 2 1 S +R P 2051 o - May 6 2 0 - +R P 2051 o - Jun 17 2 1 S +R P 2052 o - Ap 27 2 0 - +R P 2052 o - Jun 1 2 1 S +R P 2053 o - Ap 12 2 0 - +R P 2053 o - May 24 2 1 S +R P 2054 o - Ap 4 2 0 - +R P 2054 o - May 16 2 1 S +R P 2055 o - May 1 2 1 S +R P 2056 o - Ap 22 2 1 S +R P 2057 o - Ap 7 2 1 S +R P 2058 ma - Mar Sa<=30 2 1 S +R P 2068 o - O 20 2 0 - +R P 2069 o - O 12 2 0 - +R P 2070 o - O 4 2 0 - +R P 2071 o - S 19 2 0 - +R P 2072 o - S 10 2 0 - +R P 2072 o - O 15 2 1 S +R P 2073 o - S 2 2 0 - +R P 2073 o - O 7 2 1 S +R P 2074 o - Au 18 2 0 - +R P 2074 o - S 29 2 1 S +R P 2075 o - Au 10 2 0 - +R P 2075 o - S 14 2 1 S +R P 2075 ma - O Sa<=30 2 0 - +R P 2076 o - Jul 25 2 0 - +R P 2076 o - S 5 2 1 S +R P 2077 o - Jul 17 2 0 - +R P 2077 o - Au 28 2 1 S +R P 2078 o - Jul 9 2 0 - +R P 2078 o - Au 13 2 1 S +R P 2079 o - Jun 24 2 0 - +R P 2079 o - Au 5 2 1 S +R P 2080 o - Jun 15 2 0 - +R P 2080 o - Jul 20 2 1 S +R P 2081 o - Jun 7 2 0 - +R P 2081 o - Jul 12 2 1 S +R P 2082 o - May 23 2 0 - +R P 2082 o - Jul 4 2 1 S +R P 2083 o - May 15 2 0 - +R P 2083 o - Jun 19 2 1 S +R P 2084 o - Ap 29 2 0 - +R P 2084 o - Jun 10 2 1 S +R P 2085 o - Ap 21 2 0 - +R P 2085 o - Jun 2 2 1 S +R P 2086 o - Ap 13 2 0 - +R P 2086 o - May 18 2 1 S Z Asia/Gaza 2:17:52 - LMT 1900 O 2 Z EET/EEST 1948 May 15 2 K EE%sT 1967 Jun 5 @@ -1754,8 +1834,8 @@ Z America/Scoresbysund -1:27:52 - LMT 1916 Jul 28 -1 E -01/+00 Z America/Nuuk -3:26:56 - LMT 1916 Jul 28 -3 - -03 1980 Ap 6 2 --3 E -03/-02 2023 Mar 25 22 --2 - -02 +-3 E -03/-02 2023 O 29 1u +-2 E -02/-01 Z America/Thule -4:35:8 - LMT 1916 Jul 28 -4 Th A%sT Z Europe/Tallinn 1:39 - LMT 1880 @@ -2175,13 +2255,13 @@ Z Europe/Volgograd 2:57:40 - LMT 1920 Ja 3 3 - +03 1930 Jun 21 4 - +04 1961 N 11 4 R +04/+05 1988 Mar 27 2s -3 R +03/+04 1991 Mar 31 2s +3 R MSK/MSD 1991 Mar 31 2s 4 - +04 1992 Mar 29 2s -3 R +03/+04 2011 Mar 27 2s -4 - +04 2014 O 26 2s -3 - +03 2018 O 28 2s +3 R MSK/MSD 2011 Mar 27 2s +4 - MSK 2014 O 26 2s +3 - MSK 2018 O 28 2s 4 - +04 2020 D 27 2s -3 - +03 +3 - MSK Z Europe/Saratov 3:4:18 - LMT 1919 Jul 1 0u 3 - +03 1930 Jun 21 4 R +04/+05 1988 Mar 27 2s @@ -2194,11 +2274,11 @@ Z Europe/Saratov 3:4:18 - LMT 1919 Jul 1 0u Z Europe/Kirov 3:18:48 - LMT 1919 Jul 1 0u 3 - +03 1930 Jun 21 4 R +04/+05 1989 Mar 26 2s -3 R +03/+04 1991 Mar 31 2s +3 R MSK/MSD 1991 Mar 31 2s 4 - +04 1992 Mar 29 2s -3 R +03/+04 2011 Mar 27 2s -4 - +04 2014 O 26 2s -3 - +03 +3 R MSK/MSD 2011 Mar 27 2s +4 - MSK 2014 O 26 2s +3 - MSK Z Europe/Samara 3:20:20 - LMT 1919 Jul 1 0u 3 - +03 1930 Jun 21 4 - +04 1935 Ja 27 @@ -3070,9 +3150,6 @@ Z America/Cambridge_Bay 0 - -00 1920 -5 - EST 2000 N 5 -6 - CST 2001 Ap 1 3 -7 C M%sT -Z America/Yellowknife 0 - -00 1935 --7 Y M%sT 1980 --7 C M%sT Z America/Inuvik 0 - -00 1953 -8 Y P%sT 1979 Ap lastSu 2 -7 Y M%sT 1980 @@ -4171,6 +4248,7 @@ L America/Argentina/Cordoba America/Rosario L America/Tijuana America/Santa_Isabel L America/Denver America/Shiprock L America/Toronto America/Thunder_Bay +L America/Edmonton America/Yellowknife L Pacific/Auckland Antarctica/South_Pole L Asia/Shanghai Asia/Chongqing L Asia/Shanghai Asia/Harbin diff --git a/lib/pytz/zoneinfo/zone.tab b/lib/pytz/zoneinfo/zone.tab index 6e5adb9f..dbcb6179 100644 --- a/lib/pytz/zoneinfo/zone.tab +++ b/lib/pytz/zoneinfo/zone.tab @@ -121,9 +121,8 @@ CA +744144-0944945 America/Resolute Central - NU (Resolute) CA +624900-0920459 America/Rankin_Inlet Central - NU (central) CA +5024-10439 America/Regina CST - SK (most areas) CA +5017-10750 America/Swift_Current CST - SK (midwest) -CA +5333-11328 America/Edmonton Mountain - AB; BC (E); SK (W) +CA +5333-11328 America/Edmonton Mountain - AB; BC (E); NT (E); SK (W) CA +690650-1050310 America/Cambridge_Bay Mountain - NU (west) -CA +6227-11421 America/Yellowknife Mountain - NT (central) CA +682059-1334300 America/Inuvik Mountain - NT (west) CA +4906-11631 America/Creston MST - BC (Creston) CA +5546-12014 America/Dawson_Creek MST - BC (Dawson Cr, Ft St John) @@ -139,7 +138,7 @@ CG -0416+01517 Africa/Brazzaville CH +4723+00832 Europe/Zurich CI +0519-00402 Africa/Abidjan CK -2114-15946 Pacific/Rarotonga -CL -3327-07040 America/Santiago Chile (most areas) +CL -3327-07040 America/Santiago most of Chile CL -5309-07055 America/Punta_Arenas Region of Magallanes CL -2709-10926 Pacific/Easter Easter Island CM +0403+00942 Africa/Douala @@ -151,10 +150,10 @@ CU +2308-08222 America/Havana CV +1455-02331 Atlantic/Cape_Verde CW +1211-06900 America/Curacao CX -1025+10543 Indian/Christmas -CY +3510+03322 Asia/Nicosia Cyprus (most areas) +CY +3510+03322 Asia/Nicosia most of Cyprus CY +3507+03357 Asia/Famagusta Northern Cyprus CZ +5005+01426 Europe/Prague -DE +5230+01322 Europe/Berlin Germany (most areas) +DE +5230+01322 Europe/Berlin most of Germany DE +4742+00841 Europe/Busingen Busingen DJ +1136+04309 Africa/Djibouti DK +5540+01235 Europe/Copenhagen @@ -187,7 +186,7 @@ GF +0456-05220 America/Cayenne GG +492717-0023210 Europe/Guernsey GH +0533-00013 Africa/Accra GI +3608-00521 Europe/Gibraltar -GL +6411-05144 America/Nuuk Greenland (most areas) +GL +6411-05144 America/Nuuk most of Greenland GL +7646-01840 America/Danmarkshavn National Park (east coast) GL +7029-02158 America/Scoresbysund Scoresbysund/Ittoqqortoormiit GL +7634-06847 America/Thule Thule/Pituffik @@ -235,7 +234,7 @@ KP +3901+12545 Asia/Pyongyang KR +3733+12658 Asia/Seoul KW +2920+04759 Asia/Kuwait KY +1918-08123 America/Cayman -KZ +4315+07657 Asia/Almaty Kazakhstan (most areas) +KZ +4315+07657 Asia/Almaty most of Kazakhstan KZ +4448+06528 Asia/Qyzylorda Qyzylorda/Kyzylorda/Kzyl-Orda KZ +5312+06337 Asia/Qostanay Qostanay/Kostanay/Kustanay KZ +5017+05710 Asia/Aqtobe Aqtobe/Aktobe @@ -259,12 +258,12 @@ MD +4700+02850 Europe/Chisinau ME +4226+01916 Europe/Podgorica MF +1804-06305 America/Marigot MG -1855+04731 Indian/Antananarivo -MH +0709+17112 Pacific/Majuro Marshall Islands (most areas) +MH +0709+17112 Pacific/Majuro most of Marshall Islands MH +0905+16720 Pacific/Kwajalein Kwajalein MK +4159+02126 Europe/Skopje ML +1239-00800 Africa/Bamako MM +1647+09610 Asia/Yangon -MN +4755+10653 Asia/Ulaanbaatar Mongolia (most areas) +MN +4755+10653 Asia/Ulaanbaatar most of Mongolia MN +4801+09139 Asia/Hovd Bayan-Olgiy, Govi-Altai, Hovd, Uvs, Zavkhan MN +4804+11430 Asia/Choibalsan Dornod, Sukhbaatar MO +221150+1133230 Asia/Macau @@ -302,7 +301,7 @@ NO +5955+01045 Europe/Oslo NP +2743+08519 Asia/Kathmandu NR -0031+16655 Pacific/Nauru NU -1901-16955 Pacific/Niue -NZ -3652+17446 Pacific/Auckland New Zealand (most areas) +NZ -3652+17446 Pacific/Auckland most of New Zealand NZ -4357-17633 Pacific/Chatham Chatham Islands OM +2336+05835 Asia/Muscat PA +0858-07932 America/Panama @@ -310,7 +309,7 @@ PE -1203-07703 America/Lima PF -1732-14934 Pacific/Tahiti Society Islands PF -0900-13930 Pacific/Marquesas Marquesas Islands PF -2308-13457 Pacific/Gambier Gambier Islands -PG -0930+14710 Pacific/Port_Moresby Papua New Guinea (most areas) +PG -0930+14710 Pacific/Port_Moresby most of Papua New Guinea PG -0613+15534 Pacific/Bougainville Bougainville PH +1435+12100 Asia/Manila PK +2452+06703 Asia/Karachi @@ -356,7 +355,7 @@ RU +4310+13156 Asia/Vladivostok MSK+07 - Amur River RU +643337+1431336 Asia/Ust-Nera MSK+07 - Oymyakonsky RU +5934+15048 Asia/Magadan MSK+08 - Magadan RU +4658+14242 Asia/Sakhalin MSK+08 - Sakhalin Island -RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); North Kuril Is +RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); N Kuril Is RU +5301+15839 Asia/Kamchatka MSK+09 - Kamchatka RU +6445+17729 Asia/Anadyr MSK+09 - Bering Sea RW -0157+03004 Africa/Kigali @@ -397,7 +396,7 @@ TT +1039-06131 America/Port_of_Spain TV -0831+17913 Pacific/Funafuti TW +2503+12130 Asia/Taipei TZ -0648+03917 Africa/Dar_es_Salaam -UA +5026+03031 Europe/Kyiv Ukraine (most areas) +UA +5026+03031 Europe/Kyiv most of Ukraine UG +0019+03225 Africa/Kampala UM +2813-17722 Pacific/Midway Midway Islands UM +1917+16637 Pacific/Wake Wake Island @@ -420,7 +419,7 @@ US +465042-1012439 America/North_Dakota/New_Salem Central - ND (Morton rural) US +471551-1014640 America/North_Dakota/Beulah Central - ND (Mercer) US +394421-1045903 America/Denver Mountain (most areas) US +433649-1161209 America/Boise Mountain - ID (south); OR (east) -US +332654-1120424 America/Phoenix MST - Arizona (except Navajo) +US +332654-1120424 America/Phoenix MST - AZ (except Navajo) US +340308-1181434 America/Los_Angeles Pacific US +611305-1495401 America/Anchorage Alaska (most areas) US +581807-1342511 America/Juneau Alaska - Juneau area @@ -428,7 +427,7 @@ US +571035-1351807 America/Sitka Alaska - Sitka area US +550737-1313435 America/Metlakatla Alaska - Annette Island US +593249-1394338 America/Yakutat Alaska - Yakutat US +643004-1652423 America/Nome Alaska (west) -US +515248-1763929 America/Adak Aleutian Islands +US +515248-1763929 America/Adak Alaska - western Aleutians US +211825-1575130 Pacific/Honolulu Hawaii UY -345433-0561245 America/Montevideo UZ +3940+06648 Asia/Samarkand Uzbekistan (west) diff --git a/lib/pytz/zoneinfo/zone1970.tab b/lib/pytz/zoneinfo/zone1970.tab index a9b36d36..1f1cecb8 100644 --- a/lib/pytz/zoneinfo/zone1970.tab +++ b/lib/pytz/zoneinfo/zone1970.tab @@ -18,7 +18,10 @@ # Please see the theory.html file for how these names are chosen. # If multiple timezones overlap a country, each has a row in the # table, with each column 1 containing the country code. -# 4. Comments; present if and only if a country has multiple timezones. +# 4. Comments; present if and only if countries have multiple timezones, +# and useful only for those countries. For example, the comments +# for the row with countries CH,DE,LI and name Europe/Zurich +# are useful only for DE, since CH and LI have no other timezones. # # If a timezone covers multiple countries, the most-populous city is used, # and that country is listed first in column 1; any other countries @@ -34,7 +37,7 @@ #country- #codes coordinates TZ comments AD +4230+00131 Europe/Andorra -AE,OM,RE,SC,TF +2518+05518 Asia/Dubai UAE, Oman, Réunion, Seychelles, Crozet, Scattered Is +AE,OM,RE,SC,TF +2518+05518 Asia/Dubai Crozet, Scattered Is AF +3431+06912 Asia/Kabul AL +4120+01950 Europe/Tirane AM +4011+04430 Asia/Yerevan @@ -45,7 +48,7 @@ AQ -6448-06406 Antarctica/Palmer Palmer AQ -6734-06808 Antarctica/Rothera Rothera AQ -720041+0023206 Antarctica/Troll Troll AR -3436-05827 America/Argentina/Buenos_Aires Buenos Aires (BA, CF) -AR -3124-06411 America/Argentina/Cordoba Argentina (most areas: CB, CC, CN, ER, FM, MN, SE, SF) +AR -3124-06411 America/Argentina/Cordoba most areas: CB, CC, CN, ER, FM, MN, SE, SF AR -2447-06525 America/Argentina/Salta Salta (SA, LP, NQ, RN) AR -2411-06518 America/Argentina/Jujuy Jujuy (JY) AR -2649-06513 America/Argentina/Tucuman Tucumán (TM) @@ -56,7 +59,7 @@ AR -3253-06849 America/Argentina/Mendoza Mendoza (MZ) AR -3319-06621 America/Argentina/San_Luis San Luis (SL) AR -5138-06913 America/Argentina/Rio_Gallegos Santa Cruz (SC) AR -5448-06818 America/Argentina/Ushuaia Tierra del Fuego (TF) -AS,UM -1416-17042 Pacific/Pago_Pago Samoa, Midway +AS,UM -1416-17042 Pacific/Pago_Pago Midway AT +4813+01620 Europe/Vienna AU -3133+15905 Australia/Lord_Howe Lord Howe Island AU -5430+15857 Antarctica/Macquarie Macquarie Island @@ -101,26 +104,25 @@ CA +4439-06336 America/Halifax Atlantic - NS (most areas); PE CA +4612-05957 America/Glace_Bay Atlantic - NS (Cape Breton) CA +4606-06447 America/Moncton Atlantic - New Brunswick CA +5320-06025 America/Goose_Bay Atlantic - Labrador (most areas) -CA,BS +4339-07923 America/Toronto Eastern - ON, QC (most areas), Bahamas +CA,BS +4339-07923 America/Toronto Eastern - ON, QC (most areas) CA +6344-06828 America/Iqaluit Eastern - NU (most areas) CA +4953-09709 America/Winnipeg Central - ON (west); Manitoba CA +744144-0944945 America/Resolute Central - NU (Resolute) CA +624900-0920459 America/Rankin_Inlet Central - NU (central) CA +5024-10439 America/Regina CST - SK (most areas) CA +5017-10750 America/Swift_Current CST - SK (midwest) -CA +5333-11328 America/Edmonton Mountain - AB; BC (E); SK (W) +CA +5333-11328 America/Edmonton Mountain - AB; BC (E); NT (E); SK (W) CA +690650-1050310 America/Cambridge_Bay Mountain - NU (west) -CA +6227-11421 America/Yellowknife Mountain - NT (central) CA +682059-1334300 America/Inuvik Mountain - NT (west) CA +5546-12014 America/Dawson_Creek MST - BC (Dawson Cr, Ft St John) CA +5848-12242 America/Fort_Nelson MST - BC (Ft Nelson) CA +6043-13503 America/Whitehorse MST - Yukon (east) CA +6404-13925 America/Dawson MST - Yukon (west) CA +4916-12307 America/Vancouver Pacific - BC (most areas) -CH,DE,LI +4723+00832 Europe/Zurich Swiss time +CH,DE,LI +4723+00832 Europe/Zurich Büsingen CI,BF,GH,GM,GN,IS,ML,MR,SH,SL,SN,TG +0519-00402 Africa/Abidjan CK -2114-15946 Pacific/Rarotonga -CL -3327-07040 America/Santiago Chile (most areas) +CL -3327-07040 America/Santiago most of Chile CL -5309-07055 America/Punta_Arenas Region of Magallanes CL -2709-10926 Pacific/Easter Easter Island CN +3114+12128 Asia/Shanghai Beijing Time @@ -129,10 +131,10 @@ CO +0436-07405 America/Bogota CR +0956-08405 America/Costa_Rica CU +2308-08222 America/Havana CV +1455-02331 Atlantic/Cape_Verde -CY +3510+03322 Asia/Nicosia Cyprus (most areas) +CY +3510+03322 Asia/Nicosia most of Cyprus CY +3507+03357 Asia/Famagusta Northern Cyprus CZ,SK +5005+01426 Europe/Prague -DE,DK,NO,SE,SJ +5230+01322 Europe/Berlin Germany (most areas), Scandinavia +DE,DK,NO,SE,SJ +5230+01322 Europe/Berlin most of Germany DO +1828-06954 America/Santo_Domingo DZ +3647+00303 Africa/Algiers EC -0210-07950 America/Guayaquil Ecuador (mainland) @@ -153,7 +155,7 @@ GB,GG,IM,JE +513030-0000731 Europe/London GE +4143+04449 Asia/Tbilisi GF +0456-05220 America/Cayenne GI +3608-00521 Europe/Gibraltar -GL +6411-05144 America/Nuuk Greenland (most areas) +GL +6411-05144 America/Nuuk most of Greenland GL +7646-01840 America/Danmarkshavn National Park (east coast) GL +7029-02158 America/Scoresbysund Scoresbysund/Ittoqqortoormiit GL +7634-06847 America/Thule Thule/Pituffik @@ -183,12 +185,12 @@ JO +3157+03556 Asia/Amman JP +353916+1394441 Asia/Tokyo KE,DJ,ER,ET,KM,MG,SO,TZ,UG,YT -0117+03649 Africa/Nairobi KG +4254+07436 Asia/Bishkek -KI,MH,TV,UM,WF +0125+17300 Pacific/Tarawa Gilberts, Marshalls, Tuvalu, Wallis & Futuna, Wake +KI,MH,TV,UM,WF +0125+17300 Pacific/Tarawa Gilberts, Marshalls, Wake KI -0247-17143 Pacific/Kanton Phoenix Islands KI +0152-15720 Pacific/Kiritimati Line Islands KP +3901+12545 Asia/Pyongyang KR +3733+12658 Asia/Seoul -KZ +4315+07657 Asia/Almaty Kazakhstan (most areas) +KZ +4315+07657 Asia/Almaty most of Kazakhstan KZ +4448+06528 Asia/Qyzylorda Qyzylorda/Kyzylorda/Kzyl-Orda KZ +5312+06337 Asia/Qostanay Qostanay/Kostanay/Kustanay KZ +5017+05710 Asia/Aqtobe Aqtöbe/Aktobe @@ -205,14 +207,14 @@ MA +3339-00735 Africa/Casablanca MD +4700+02850 Europe/Chisinau MH +0905+16720 Pacific/Kwajalein Kwajalein MM,CC +1647+09610 Asia/Yangon -MN +4755+10653 Asia/Ulaanbaatar Mongolia (most areas) +MN +4755+10653 Asia/Ulaanbaatar most of Mongolia MN +4801+09139 Asia/Hovd Bayan-Ölgii, Govi-Altai, Hovd, Uvs, Zavkhan MN +4804+11430 Asia/Choibalsan Dornod, Sükhbaatar MO +221150+1133230 Asia/Macau MQ +1436-06105 America/Martinique MT +3554+01431 Europe/Malta MU -2010+05730 Indian/Mauritius -MV,TF +0410+07330 Indian/Maldives Maldives, Kerguelen, St Paul I, Amsterdam I +MV,TF +0410+07330 Indian/Maldives Kerguelen, St Paul I, Amsterdam I MX +1924-09909 America/Mexico_City Central Mexico MX +2105-08646 America/Cancun Quintana Roo MX +2058-08937 America/Merida Campeche, Yucatán @@ -225,7 +227,7 @@ MX +2313-10625 America/Mazatlan Baja California Sur, Nayarit (most areas), Sinal MX +2048-10515 America/Bahia_Banderas Bahía de Banderas MX +2904-11058 America/Hermosillo Sonora MX +3232-11701 America/Tijuana Baja California -MY,BN +0133+11020 Asia/Kuching Sabah, Sarawak, Brunei +MY,BN +0133+11020 Asia/Kuching Sabah, Sarawak MZ,BI,BW,CD,MW,RW,ZM,ZW -2558+03235 Africa/Maputo Central Africa Time NA -2234+01706 Africa/Windhoek NC -2216+16627 Pacific/Noumea @@ -237,7 +239,7 @@ NR -0031+16655 Pacific/Nauru NU -1901-16955 Pacific/Niue NZ,AQ -3652+17446 Pacific/Auckland New Zealand time NZ -4357-17633 Pacific/Chatham Chatham Islands -PA,CA,KY +0858-07932 America/Panama EST - Panama, Cayman, ON (Atikokan), NU (Coral H) +PA,CA,KY +0858-07932 America/Panama EST - ON (Atikokan), NU (Coral H) PE -1203-07703 America/Lima PF -1732-14934 Pacific/Tahiti Society Islands PF -0900-13930 Pacific/Marquesas Marquesas Islands @@ -285,13 +287,13 @@ RU +4310+13156 Asia/Vladivostok MSK+07 - Amur River RU +643337+1431336 Asia/Ust-Nera MSK+07 - Oymyakonsky RU +5934+15048 Asia/Magadan MSK+08 - Magadan RU +4658+14242 Asia/Sakhalin MSK+08 - Sakhalin Island -RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); North Kuril Is +RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); N Kuril Is RU +5301+15839 Asia/Kamchatka MSK+09 - Kamchatka RU +6445+17729 Asia/Anadyr MSK+09 - Bering Sea -SA,AQ,KW,YE +2438+04643 Asia/Riyadh Arabia, Syowa -SB,FM -0932+16012 Pacific/Guadalcanal Solomons, Pohnpei +SA,AQ,KW,YE +2438+04643 Asia/Riyadh Syowa +SB,FM -0932+16012 Pacific/Guadalcanal Pohnpei SD +1536+03232 Africa/Khartoum -SG,MY +0117+10351 Asia/Singapore Singapore, peninsular Malaysia +SG,MY +0117+10351 Asia/Singapore peninsular Malaysia SR +0550-05510 America/Paramaribo SS +0451+03137 Africa/Juba ST +0020+00644 Africa/Sao_Tome @@ -299,7 +301,7 @@ SV +1342-08912 America/El_Salvador SY +3330+03618 Asia/Damascus TC +2128-07108 America/Grand_Turk TD +1207+01503 Africa/Ndjamena -TH,CX,KH,LA,VN +1345+10031 Asia/Bangkok Indochina (most areas) +TH,CX,KH,LA,VN +1345+10031 Asia/Bangkok north Vietnam TJ +3835+06848 Asia/Dushanbe TK -0922-17114 Pacific/Fakaofo TL -0833+12535 Asia/Dili @@ -308,7 +310,7 @@ TN +3648+01011 Africa/Tunis TO -210800-1751200 Pacific/Tongatapu TR +4101+02858 Europe/Istanbul TW +2503+12130 Asia/Taipei -UA +5026+03031 Europe/Kyiv Ukraine (most areas) +UA +5026+03031 Europe/Kyiv most of Ukraine US +404251-0740023 America/New_York Eastern (most areas) US +421953-0830245 America/Detroit Eastern - MI (most areas) US +381515-0854534 America/Kentucky/Louisville Eastern - KY (Louisville area) @@ -328,7 +330,7 @@ US +465042-1012439 America/North_Dakota/New_Salem Central - ND (Morton rural) US +471551-1014640 America/North_Dakota/Beulah Central - ND (Mercer) US +394421-1045903 America/Denver Mountain (most areas) US +433649-1161209 America/Boise Mountain - ID (south); OR (east) -US,CA +332654-1120424 America/Phoenix MST - Arizona (except Navajo), Creston BC +US,CA +332654-1120424 America/Phoenix MST - AZ (most areas), Creston BC US +340308-1181434 America/Los_Angeles Pacific US +611305-1495401 America/Anchorage Alaska (most areas) US +581807-1342511 America/Juneau Alaska - Juneau area @@ -336,13 +338,13 @@ US +571035-1351807 America/Sitka Alaska - Sitka area US +550737-1313435 America/Metlakatla Alaska - Annette Island US +593249-1394338 America/Yakutat Alaska - Yakutat US +643004-1652423 America/Nome Alaska (west) -US +515248-1763929 America/Adak Aleutian Islands -US,UM +211825-1575130 Pacific/Honolulu Hawaii +US +515248-1763929 America/Adak Alaska - western Aleutians +US +211825-1575130 Pacific/Honolulu Hawaii UY -345433-0561245 America/Montevideo UZ +3940+06648 Asia/Samarkand Uzbekistan (west) UZ +4120+06918 Asia/Tashkent Uzbekistan (east) VE +1030-06656 America/Caracas -VN +1045+10640 Asia/Ho_Chi_Minh Vietnam (south) +VN +1045+10640 Asia/Ho_Chi_Minh south Vietnam VU -1740+16825 Pacific/Efate WS -1350-17144 Pacific/Apia ZA,LS,SZ -2615+02800 Africa/Johannesburg diff --git a/requirements.txt b/requirements.txt index ee495cb7..00e17bde 100644 --- a/requirements.txt +++ b/requirements.txt @@ -35,7 +35,7 @@ PyJWT==2.6.0 pyparsing==3.0.9 python-dateutil==2.8.2 python-twitter==3.5 -pytz==2022.7.1 +pytz==2023.3 requests==2.28.2 requests-oauthlib==1.3.1 rumps==0.4.0; platform_system == "Darwin" From 70fb00280bb40e6d51455078d0ade91be6aeec66 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Aug 2023 21:38:28 -0700 Subject: [PATCH 040/361] Bump tzdata from 2022.7 to 2023.3 (#2032) * Bump tzdata from 2022.7 to 2023.3 Bumps [tzdata](https://github.com/python/tzdata) from 2022.7 to 2023.3. - [Release notes](https://github.com/python/tzdata/releases) - [Changelog](https://github.com/python/tzdata/blob/master/NEWS.md) - [Commits](https://github.com/python/tzdata/compare/2022.7...2023.3) --- updated-dependencies: - dependency-name: tzdata dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Update tzdata==2023.3 --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci] --- lib/tzdata/__init__.py | 4 +- lib/tzdata/zoneinfo/Africa/Cairo | Bin 1276 -> 1309 bytes lib/tzdata/zoneinfo/Africa/Casablanca | Bin 1919 -> 1919 bytes lib/tzdata/zoneinfo/Africa/El_Aaiun | Bin 1830 -> 1830 bytes lib/tzdata/zoneinfo/America/Godthab | Bin 931 -> 965 bytes lib/tzdata/zoneinfo/America/Nuuk | Bin 931 -> 965 bytes lib/tzdata/zoneinfo/America/Yellowknife | Bin 844 -> 970 bytes lib/tzdata/zoneinfo/Asia/Gaza | Bin 1258 -> 2518 bytes lib/tzdata/zoneinfo/Asia/Hebron | Bin 1276 -> 2536 bytes lib/tzdata/zoneinfo/Egypt | Bin 1276 -> 1309 bytes lib/tzdata/zoneinfo/Europe/Kirov | Bin 717 -> 735 bytes lib/tzdata/zoneinfo/Europe/Volgograd | Bin 735 -> 753 bytes lib/tzdata/zoneinfo/iso3166.tab | 2 +- lib/tzdata/zoneinfo/leapseconds | 8 +- lib/tzdata/zoneinfo/tzdata.zi | 132 +++++++++++++++++++----- lib/tzdata/zoneinfo/zone.tab | 29 +++--- lib/tzdata/zoneinfo/zone1970.tab | 58 ++++++----- lib/tzdata/zones | 2 +- requirements.txt | 2 +- 19 files changed, 158 insertions(+), 79 deletions(-) diff --git a/lib/tzdata/__init__.py b/lib/tzdata/__init__.py index 96456857..a7d6b0b5 100644 --- a/lib/tzdata/__init__.py +++ b/lib/tzdata/__init__.py @@ -1,6 +1,6 @@ # IANA versions like 2020a are not valid PEP 440 identifiers; the recommended # way to translate the version is to use YYYY.n where `n` is a 0-based index. -__version__ = "2022.7" +__version__ = "2023.3" # This exposes the original IANA version number. -IANA_VERSION = "2022g" +IANA_VERSION = "2023c" diff --git a/lib/tzdata/zoneinfo/Africa/Cairo b/lib/tzdata/zoneinfo/Africa/Cairo index ea38c970086c6ddca48d6f5703cee91ed1ac98b4..1e6d48d1ca4e5416913c41e8814dc045c57d5b58 100644 GIT binary patch delta 58 zcmeyvIhSif7-Pf2a25^*AV~50kT5xsrF3!u3-{z}EUJ>OuE8NXz9xF6dZzjYI=+Sm LK$eNVkqH+7|I-hF delta 23 fcmbQs^@npp7-RjyaF)r5EOm@blkc;rGI9X`W1|Ny diff --git a/lib/tzdata/zoneinfo/Africa/Casablanca b/lib/tzdata/zoneinfo/Africa/Casablanca index 0263c90bd0d82565a86d414b0aa86205e01dad85..240ebb2bfb22642570a6053445a6e71864e7f48a 100644 GIT binary patch delta 88 zcmey*_n&V=3KO%-M1{%eOtsASWEV_+4y4Oh0r||e%z|b>K9F8@7|3U-Wp?og@`3c- gJ3u}wP+bO)&+5xO%~D~qIh!uCz*mLM>1+~=0NOGj4FCWD delta 88 zcmey*_n&V=3KO&M=>?P1nQEDzM<`5w4x}5N0Qtb%7 diff --git a/lib/tzdata/zoneinfo/Africa/El_Aaiun b/lib/tzdata/zoneinfo/Africa/El_Aaiun index 772e23c4cd8bb1942efdc18155585f9b8f0e8429..909c5f9682927c14e3e64e21da75559ad2e598f9 100644 GIT binary patch delta 88 zcmZ3+w~TKC4->P?M1{%xOtsASWEV{C2h!!MfPCg!WQR=>?PdnQEDzM<`702ht5sfPCg!W~o9TA4qTh3*@uZGW&J``9S)) g7?95jRJRz&XZ2;CU%X)QeO6s&Dg6bT`Psx70cmF*H2?qr diff --git a/lib/tzdata/zoneinfo/America/Godthab b/lib/tzdata/zoneinfo/America/Godthab index 79d7a45464b3f1dac470b9ea5f6a12b1344c8173..00b57bb13fbfa802e61f8b93d08622163a35a9a4 100644 GIT binary patch delta 85 zcmZ3?ew3XxBq}q_c%q=#L9bZF3JwrVME&yiRjywPW delta 183 zcmX@beuiyAn1(k40|P4%i~I+IeOeL>Ao_GC&jb*CN%>0wm`;?~_|B54o|%z}nT0wK xXeAK*|G#^y(5S4mEXxtt4a4?5=fBQB@A8G{ zcsVRt{RNgxJHb*H1w$9o9p7P@jV&ze8?ey5%<>D)Tse$9^X(LzRU89nnf2BdAz9+t zb}X`c1{u1L^G9L1?hxzeT3DfPfOCA)EVP#+Xk*>o2481#VWq>CT6w}LAeIt_unaK$EjxYEM_SN`dsF{!qb!quataLr5u%PzX1LJHER8_XbN*d;f}}+Zr4f8s!Ih#Nx} zee>nW``w=*e>lW`zyWR|a+9GIebduP*v#`1s-3wT)PCGh^){}(TF7<%Z!T6BagYB4 D!F(-F delta 25 hcmca6{EBlzC=)})#_$u&lN&h{CeP=HnB2@63;=&u2~Pk3 diff --git a/lib/tzdata/zoneinfo/Asia/Hebron b/lib/tzdata/zoneinfo/Asia/Hebron index 3ce1bac63134c393682a2a91f5d13c7b05ab2db4..fcf923bd283a4a6f21d789551c29ff5e2680108d 100644 GIT binary patch delta 1178 zcmZA0dq`7Z7zXgqwvt6Gtqi$0-o0sXF zm*#5oo|>0rTAG*K2$3iZMWeFPlq^SJHw@eNoc}uiyyy2m&v)3^*&=UdUH;i+HY-{} z$2ekQy`lmm=^&SV{VjCz9f#;3(Ob5|(Ut^;I%bgzUomf{Bv&V8k)vZaxO544;B;Yc3IbrRo=ibf$@p(m}+@708(|S2#UD%)=sM9}kO6rxqN@4E)48 zWGyV&_yv~CIl)pF1w$9oz29M(jV&zeAGFZD%<>D)T0eq3>-{X8T^b8#n|0O}A=%=E z4lHtdhZwq$^T%Pi_BiX;8d#xggmeAUEwq;_XlLEi4&Pz&V5Nf_`pQEQv__O#HS)X* z@yPQg)yVT5Ij}fCO@uRk4eJ)W+>sfbiLceIgFI>zEhKnPD zh4iLaE``;u=V0~h9=OEoH~J+PcQMw5m}qQ$P|!TzfI=zZqfqKS!us=87*7Mf)zfv}Uf=>L`x*+6U z+&c@M_d^T}o%*0Y*s#MMuJY8wRew5ZOls_;aLsr*Tst2P*L9`B^r4?i&S z;l^a^7oh*YCQSeq&EYO6JQTUZEr$;dw$b z|57FL0gqS6pAEAgaFCmX+@xdesvvH-%AaemVz_SqOOuE8NXz9xF6dZzjYI=+Sm LK$eNVkqH+7|I-hF delta 23 fcmbQs^@npp7-RjyaF)r5EOm@blkc;rGI9X`W1|Ny diff --git a/lib/tzdata/zoneinfo/Europe/Kirov b/lib/tzdata/zoneinfo/Europe/Kirov index d1c93c540c2670f5fd6d04cf837f31d9c2b0c530..bfac56111d9cc81a93cce85205c685880433b96f 100644 GIT binary patch delta 134 zcmX@hdY^Sd6e~Le1B1lIcy6XrRu)!f79c?wvavES=*<9XWY98TU}0cz3t;46V6bpt o-~o}00$_0wu!x9{ZwQ07fiV!90=1 23s 0 - @@ -75,6 +75,8 @@ R K 2014 o - May 15 24 1 S R K 2014 o - Jun 26 24 0 - R K 2014 o - Jul 31 24 1 S R K 2014 o - S lastTh 24 0 - +R K 2023 ma - Ap lastF 0 1 S +R K 2023 ma - O lastTh 24 0 - Z Africa/Cairo 2:5:9 - LMT 1900 O 2 K EE%sT Z Africa/Bissau -1:2:20 - LMT 1912 Ja 1 1u @@ -172,7 +174,7 @@ R M 2021 o - May 16 2 0 - R M 2022 o - Mar 27 3 -1 - R M 2022 o - May 8 2 0 - R M 2023 o - Mar 19 3 -1 - -R M 2023 o - Ap 30 2 0 - +R M 2023 o - Ap 23 2 0 - R M 2024 o - Mar 10 3 -1 - R M 2024 o - Ap 14 2 0 - R M 2025 o - F 23 3 -1 - @@ -188,7 +190,7 @@ R M 2029 o - F 18 2 0 - R M 2029 o - D 30 3 -1 - R M 2030 o - F 10 2 0 - R M 2030 o - D 22 3 -1 - -R M 2031 o - F 2 2 0 - +R M 2031 o - Ja 26 2 0 - R M 2031 o - D 14 3 -1 - R M 2032 o - Ja 18 2 0 - R M 2032 o - N 28 3 -1 - @@ -204,7 +206,7 @@ R M 2036 o - N 23 2 0 - R M 2037 o - O 4 3 -1 - R M 2037 o - N 15 2 0 - R M 2038 o - S 26 3 -1 - -R M 2038 o - N 7 2 0 - +R M 2038 o - O 31 2 0 - R M 2039 o - S 18 3 -1 - R M 2039 o - O 23 2 0 - R M 2040 o - S 2 3 -1 - @@ -220,7 +222,7 @@ R M 2044 o - Au 28 2 0 - R M 2045 o - Jul 9 3 -1 - R M 2045 o - Au 20 2 0 - R M 2046 o - Jul 1 3 -1 - -R M 2046 o - Au 12 2 0 - +R M 2046 o - Au 5 2 0 - R M 2047 o - Jun 23 3 -1 - R M 2047 o - Jul 28 2 0 - R M 2048 o - Jun 7 3 -1 - @@ -236,7 +238,7 @@ R M 2052 o - Jun 2 2 0 - R M 2053 o - Ap 13 3 -1 - R M 2053 o - May 25 2 0 - R M 2054 o - Ap 5 3 -1 - -R M 2054 o - May 17 2 0 - +R M 2054 o - May 10 2 0 - R M 2055 o - Mar 28 3 -1 - R M 2055 o - May 2 2 0 - R M 2056 o - Mar 12 3 -1 - @@ -252,7 +254,7 @@ R M 2060 o - Mar 7 2 0 - R M 2061 o - Ja 16 3 -1 - R M 2061 o - F 27 2 0 - R M 2062 o - Ja 8 3 -1 - -R M 2062 o - F 19 2 0 - +R M 2062 o - F 12 2 0 - R M 2062 o - D 31 3 -1 - R M 2063 o - F 4 2 0 - R M 2063 o - D 16 3 -1 - @@ -268,7 +270,7 @@ R M 2067 o - D 11 2 0 - R M 2068 o - O 21 3 -1 - R M 2068 o - D 2 2 0 - R M 2069 o - O 13 3 -1 - -R M 2069 o - N 24 2 0 - +R M 2069 o - N 17 2 0 - R M 2070 o - O 5 3 -1 - R M 2070 o - N 9 2 0 - R M 2071 o - S 20 3 -1 - @@ -284,7 +286,7 @@ R M 2075 o - S 15 2 0 - R M 2076 o - Jul 26 3 -1 - R M 2076 o - S 6 2 0 - R M 2077 o - Jul 18 3 -1 - -R M 2077 o - Au 29 2 0 - +R M 2077 o - Au 22 2 0 - R M 2078 o - Jul 10 3 -1 - R M 2078 o - Au 14 2 0 - R M 2079 o - Jun 25 3 -1 - @@ -294,13 +296,13 @@ R M 2080 o - Jul 21 2 0 - R M 2081 o - Jun 1 3 -1 - R M 2081 o - Jul 13 2 0 - R M 2082 o - May 24 3 -1 - -R M 2082 o - Jul 5 2 0 - +R M 2082 o - Jun 28 2 0 - R M 2083 o - May 16 3 -1 - R M 2083 o - Jun 20 2 0 - R M 2084 o - Ap 30 3 -1 - R M 2084 o - Jun 11 2 0 - R M 2085 o - Ap 22 3 -1 - -R M 2085 o - Jun 3 2 0 - +R M 2085 o - May 27 2 0 - R M 2086 o - Ap 14 3 -1 - R M 2086 o - May 19 2 0 - R M 2087 o - Mar 30 3 -1 - @@ -997,8 +999,86 @@ R P 2020 2021 - Mar Sa<=30 0 1 S R P 2020 o - O 24 1 0 - R P 2021 o - O 29 1 0 - R P 2022 o - Mar 27 0 1 S -R P 2022 ma - O Sa<=30 2 0 - -R P 2023 ma - Mar Sa<=30 2 1 S +R P 2022 2035 - O Sa<=30 2 0 - +R P 2023 o - Ap 29 2 1 S +R P 2024 o - Ap 13 2 1 S +R P 2025 o - Ap 5 2 1 S +R P 2026 2054 - Mar Sa<=30 2 1 S +R P 2036 o - O 18 2 0 - +R P 2037 o - O 10 2 0 - +R P 2038 o - S 25 2 0 - +R P 2039 o - S 17 2 0 - +R P 2039 o - O 22 2 1 S +R P 2039 2067 - O Sa<=30 2 0 - +R P 2040 o - S 1 2 0 - +R P 2040 o - O 13 2 1 S +R P 2041 o - Au 24 2 0 - +R P 2041 o - S 28 2 1 S +R P 2042 o - Au 16 2 0 - +R P 2042 o - S 20 2 1 S +R P 2043 o - Au 1 2 0 - +R P 2043 o - S 12 2 1 S +R P 2044 o - Jul 23 2 0 - +R P 2044 o - Au 27 2 1 S +R P 2045 o - Jul 15 2 0 - +R P 2045 o - Au 19 2 1 S +R P 2046 o - Jun 30 2 0 - +R P 2046 o - Au 11 2 1 S +R P 2047 o - Jun 22 2 0 - +R P 2047 o - Jul 27 2 1 S +R P 2048 o - Jun 6 2 0 - +R P 2048 o - Jul 18 2 1 S +R P 2049 o - May 29 2 0 - +R P 2049 o - Jul 3 2 1 S +R P 2050 o - May 21 2 0 - +R P 2050 o - Jun 25 2 1 S +R P 2051 o - May 6 2 0 - +R P 2051 o - Jun 17 2 1 S +R P 2052 o - Ap 27 2 0 - +R P 2052 o - Jun 1 2 1 S +R P 2053 o - Ap 12 2 0 - +R P 2053 o - May 24 2 1 S +R P 2054 o - Ap 4 2 0 - +R P 2054 o - May 16 2 1 S +R P 2055 o - May 1 2 1 S +R P 2056 o - Ap 22 2 1 S +R P 2057 o - Ap 7 2 1 S +R P 2058 ma - Mar Sa<=30 2 1 S +R P 2068 o - O 20 2 0 - +R P 2069 o - O 12 2 0 - +R P 2070 o - O 4 2 0 - +R P 2071 o - S 19 2 0 - +R P 2072 o - S 10 2 0 - +R P 2072 o - O 15 2 1 S +R P 2073 o - S 2 2 0 - +R P 2073 o - O 7 2 1 S +R P 2074 o - Au 18 2 0 - +R P 2074 o - S 29 2 1 S +R P 2075 o - Au 10 2 0 - +R P 2075 o - S 14 2 1 S +R P 2075 ma - O Sa<=30 2 0 - +R P 2076 o - Jul 25 2 0 - +R P 2076 o - S 5 2 1 S +R P 2077 o - Jul 17 2 0 - +R P 2077 o - Au 28 2 1 S +R P 2078 o - Jul 9 2 0 - +R P 2078 o - Au 13 2 1 S +R P 2079 o - Jun 24 2 0 - +R P 2079 o - Au 5 2 1 S +R P 2080 o - Jun 15 2 0 - +R P 2080 o - Jul 20 2 1 S +R P 2081 o - Jun 7 2 0 - +R P 2081 o - Jul 12 2 1 S +R P 2082 o - May 23 2 0 - +R P 2082 o - Jul 4 2 1 S +R P 2083 o - May 15 2 0 - +R P 2083 o - Jun 19 2 1 S +R P 2084 o - Ap 29 2 0 - +R P 2084 o - Jun 10 2 1 S +R P 2085 o - Ap 21 2 0 - +R P 2085 o - Jun 2 2 1 S +R P 2086 o - Ap 13 2 0 - +R P 2086 o - May 18 2 1 S Z Asia/Gaza 2:17:52 - LMT 1900 O 2 Z EET/EEST 1948 May 15 2 K EE%sT 1967 Jun 5 @@ -1754,8 +1834,8 @@ Z America/Scoresbysund -1:27:52 - LMT 1916 Jul 28 -1 E -01/+00 Z America/Nuuk -3:26:56 - LMT 1916 Jul 28 -3 - -03 1980 Ap 6 2 --3 E -03/-02 2023 Mar 25 22 --2 - -02 +-3 E -03/-02 2023 O 29 1u +-2 E -02/-01 Z America/Thule -4:35:8 - LMT 1916 Jul 28 -4 Th A%sT Z Europe/Tallinn 1:39 - LMT 1880 @@ -2175,13 +2255,13 @@ Z Europe/Volgograd 2:57:40 - LMT 1920 Ja 3 3 - +03 1930 Jun 21 4 - +04 1961 N 11 4 R +04/+05 1988 Mar 27 2s -3 R +03/+04 1991 Mar 31 2s +3 R MSK/MSD 1991 Mar 31 2s 4 - +04 1992 Mar 29 2s -3 R +03/+04 2011 Mar 27 2s -4 - +04 2014 O 26 2s -3 - +03 2018 O 28 2s +3 R MSK/MSD 2011 Mar 27 2s +4 - MSK 2014 O 26 2s +3 - MSK 2018 O 28 2s 4 - +04 2020 D 27 2s -3 - +03 +3 - MSK Z Europe/Saratov 3:4:18 - LMT 1919 Jul 1 0u 3 - +03 1930 Jun 21 4 R +04/+05 1988 Mar 27 2s @@ -2194,11 +2274,11 @@ Z Europe/Saratov 3:4:18 - LMT 1919 Jul 1 0u Z Europe/Kirov 3:18:48 - LMT 1919 Jul 1 0u 3 - +03 1930 Jun 21 4 R +04/+05 1989 Mar 26 2s -3 R +03/+04 1991 Mar 31 2s +3 R MSK/MSD 1991 Mar 31 2s 4 - +04 1992 Mar 29 2s -3 R +03/+04 2011 Mar 27 2s -4 - +04 2014 O 26 2s -3 - +03 +3 R MSK/MSD 2011 Mar 27 2s +4 - MSK 2014 O 26 2s +3 - MSK Z Europe/Samara 3:20:20 - LMT 1919 Jul 1 0u 3 - +03 1930 Jun 21 4 - +04 1935 Ja 27 @@ -3070,9 +3150,6 @@ Z America/Cambridge_Bay 0 - -00 1920 -5 - EST 2000 N 5 -6 - CST 2001 Ap 1 3 -7 C M%sT -Z America/Yellowknife 0 - -00 1935 --7 Y M%sT 1980 --7 C M%sT Z America/Inuvik 0 - -00 1953 -8 Y P%sT 1979 Ap lastSu 2 -7 Y M%sT 1980 @@ -4171,6 +4248,7 @@ L America/Argentina/Cordoba America/Rosario L America/Tijuana America/Santa_Isabel L America/Denver America/Shiprock L America/Toronto America/Thunder_Bay +L America/Edmonton America/Yellowknife L Pacific/Auckland Antarctica/South_Pole L Asia/Shanghai Asia/Chongqing L Asia/Shanghai Asia/Harbin diff --git a/lib/tzdata/zoneinfo/zone.tab b/lib/tzdata/zoneinfo/zone.tab index 6e5adb9f..dbcb6179 100644 --- a/lib/tzdata/zoneinfo/zone.tab +++ b/lib/tzdata/zoneinfo/zone.tab @@ -121,9 +121,8 @@ CA +744144-0944945 America/Resolute Central - NU (Resolute) CA +624900-0920459 America/Rankin_Inlet Central - NU (central) CA +5024-10439 America/Regina CST - SK (most areas) CA +5017-10750 America/Swift_Current CST - SK (midwest) -CA +5333-11328 America/Edmonton Mountain - AB; BC (E); SK (W) +CA +5333-11328 America/Edmonton Mountain - AB; BC (E); NT (E); SK (W) CA +690650-1050310 America/Cambridge_Bay Mountain - NU (west) -CA +6227-11421 America/Yellowknife Mountain - NT (central) CA +682059-1334300 America/Inuvik Mountain - NT (west) CA +4906-11631 America/Creston MST - BC (Creston) CA +5546-12014 America/Dawson_Creek MST - BC (Dawson Cr, Ft St John) @@ -139,7 +138,7 @@ CG -0416+01517 Africa/Brazzaville CH +4723+00832 Europe/Zurich CI +0519-00402 Africa/Abidjan CK -2114-15946 Pacific/Rarotonga -CL -3327-07040 America/Santiago Chile (most areas) +CL -3327-07040 America/Santiago most of Chile CL -5309-07055 America/Punta_Arenas Region of Magallanes CL -2709-10926 Pacific/Easter Easter Island CM +0403+00942 Africa/Douala @@ -151,10 +150,10 @@ CU +2308-08222 America/Havana CV +1455-02331 Atlantic/Cape_Verde CW +1211-06900 America/Curacao CX -1025+10543 Indian/Christmas -CY +3510+03322 Asia/Nicosia Cyprus (most areas) +CY +3510+03322 Asia/Nicosia most of Cyprus CY +3507+03357 Asia/Famagusta Northern Cyprus CZ +5005+01426 Europe/Prague -DE +5230+01322 Europe/Berlin Germany (most areas) +DE +5230+01322 Europe/Berlin most of Germany DE +4742+00841 Europe/Busingen Busingen DJ +1136+04309 Africa/Djibouti DK +5540+01235 Europe/Copenhagen @@ -187,7 +186,7 @@ GF +0456-05220 America/Cayenne GG +492717-0023210 Europe/Guernsey GH +0533-00013 Africa/Accra GI +3608-00521 Europe/Gibraltar -GL +6411-05144 America/Nuuk Greenland (most areas) +GL +6411-05144 America/Nuuk most of Greenland GL +7646-01840 America/Danmarkshavn National Park (east coast) GL +7029-02158 America/Scoresbysund Scoresbysund/Ittoqqortoormiit GL +7634-06847 America/Thule Thule/Pituffik @@ -235,7 +234,7 @@ KP +3901+12545 Asia/Pyongyang KR +3733+12658 Asia/Seoul KW +2920+04759 Asia/Kuwait KY +1918-08123 America/Cayman -KZ +4315+07657 Asia/Almaty Kazakhstan (most areas) +KZ +4315+07657 Asia/Almaty most of Kazakhstan KZ +4448+06528 Asia/Qyzylorda Qyzylorda/Kyzylorda/Kzyl-Orda KZ +5312+06337 Asia/Qostanay Qostanay/Kostanay/Kustanay KZ +5017+05710 Asia/Aqtobe Aqtobe/Aktobe @@ -259,12 +258,12 @@ MD +4700+02850 Europe/Chisinau ME +4226+01916 Europe/Podgorica MF +1804-06305 America/Marigot MG -1855+04731 Indian/Antananarivo -MH +0709+17112 Pacific/Majuro Marshall Islands (most areas) +MH +0709+17112 Pacific/Majuro most of Marshall Islands MH +0905+16720 Pacific/Kwajalein Kwajalein MK +4159+02126 Europe/Skopje ML +1239-00800 Africa/Bamako MM +1647+09610 Asia/Yangon -MN +4755+10653 Asia/Ulaanbaatar Mongolia (most areas) +MN +4755+10653 Asia/Ulaanbaatar most of Mongolia MN +4801+09139 Asia/Hovd Bayan-Olgiy, Govi-Altai, Hovd, Uvs, Zavkhan MN +4804+11430 Asia/Choibalsan Dornod, Sukhbaatar MO +221150+1133230 Asia/Macau @@ -302,7 +301,7 @@ NO +5955+01045 Europe/Oslo NP +2743+08519 Asia/Kathmandu NR -0031+16655 Pacific/Nauru NU -1901-16955 Pacific/Niue -NZ -3652+17446 Pacific/Auckland New Zealand (most areas) +NZ -3652+17446 Pacific/Auckland most of New Zealand NZ -4357-17633 Pacific/Chatham Chatham Islands OM +2336+05835 Asia/Muscat PA +0858-07932 America/Panama @@ -310,7 +309,7 @@ PE -1203-07703 America/Lima PF -1732-14934 Pacific/Tahiti Society Islands PF -0900-13930 Pacific/Marquesas Marquesas Islands PF -2308-13457 Pacific/Gambier Gambier Islands -PG -0930+14710 Pacific/Port_Moresby Papua New Guinea (most areas) +PG -0930+14710 Pacific/Port_Moresby most of Papua New Guinea PG -0613+15534 Pacific/Bougainville Bougainville PH +1435+12100 Asia/Manila PK +2452+06703 Asia/Karachi @@ -356,7 +355,7 @@ RU +4310+13156 Asia/Vladivostok MSK+07 - Amur River RU +643337+1431336 Asia/Ust-Nera MSK+07 - Oymyakonsky RU +5934+15048 Asia/Magadan MSK+08 - Magadan RU +4658+14242 Asia/Sakhalin MSK+08 - Sakhalin Island -RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); North Kuril Is +RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); N Kuril Is RU +5301+15839 Asia/Kamchatka MSK+09 - Kamchatka RU +6445+17729 Asia/Anadyr MSK+09 - Bering Sea RW -0157+03004 Africa/Kigali @@ -397,7 +396,7 @@ TT +1039-06131 America/Port_of_Spain TV -0831+17913 Pacific/Funafuti TW +2503+12130 Asia/Taipei TZ -0648+03917 Africa/Dar_es_Salaam -UA +5026+03031 Europe/Kyiv Ukraine (most areas) +UA +5026+03031 Europe/Kyiv most of Ukraine UG +0019+03225 Africa/Kampala UM +2813-17722 Pacific/Midway Midway Islands UM +1917+16637 Pacific/Wake Wake Island @@ -420,7 +419,7 @@ US +465042-1012439 America/North_Dakota/New_Salem Central - ND (Morton rural) US +471551-1014640 America/North_Dakota/Beulah Central - ND (Mercer) US +394421-1045903 America/Denver Mountain (most areas) US +433649-1161209 America/Boise Mountain - ID (south); OR (east) -US +332654-1120424 America/Phoenix MST - Arizona (except Navajo) +US +332654-1120424 America/Phoenix MST - AZ (except Navajo) US +340308-1181434 America/Los_Angeles Pacific US +611305-1495401 America/Anchorage Alaska (most areas) US +581807-1342511 America/Juneau Alaska - Juneau area @@ -428,7 +427,7 @@ US +571035-1351807 America/Sitka Alaska - Sitka area US +550737-1313435 America/Metlakatla Alaska - Annette Island US +593249-1394338 America/Yakutat Alaska - Yakutat US +643004-1652423 America/Nome Alaska (west) -US +515248-1763929 America/Adak Aleutian Islands +US +515248-1763929 America/Adak Alaska - western Aleutians US +211825-1575130 Pacific/Honolulu Hawaii UY -345433-0561245 America/Montevideo UZ +3940+06648 Asia/Samarkand Uzbekistan (west) diff --git a/lib/tzdata/zoneinfo/zone1970.tab b/lib/tzdata/zoneinfo/zone1970.tab index a9b36d36..1f1cecb8 100644 --- a/lib/tzdata/zoneinfo/zone1970.tab +++ b/lib/tzdata/zoneinfo/zone1970.tab @@ -18,7 +18,10 @@ # Please see the theory.html file for how these names are chosen. # If multiple timezones overlap a country, each has a row in the # table, with each column 1 containing the country code. -# 4. Comments; present if and only if a country has multiple timezones. +# 4. Comments; present if and only if countries have multiple timezones, +# and useful only for those countries. For example, the comments +# for the row with countries CH,DE,LI and name Europe/Zurich +# are useful only for DE, since CH and LI have no other timezones. # # If a timezone covers multiple countries, the most-populous city is used, # and that country is listed first in column 1; any other countries @@ -34,7 +37,7 @@ #country- #codes coordinates TZ comments AD +4230+00131 Europe/Andorra -AE,OM,RE,SC,TF +2518+05518 Asia/Dubai UAE, Oman, Réunion, Seychelles, Crozet, Scattered Is +AE,OM,RE,SC,TF +2518+05518 Asia/Dubai Crozet, Scattered Is AF +3431+06912 Asia/Kabul AL +4120+01950 Europe/Tirane AM +4011+04430 Asia/Yerevan @@ -45,7 +48,7 @@ AQ -6448-06406 Antarctica/Palmer Palmer AQ -6734-06808 Antarctica/Rothera Rothera AQ -720041+0023206 Antarctica/Troll Troll AR -3436-05827 America/Argentina/Buenos_Aires Buenos Aires (BA, CF) -AR -3124-06411 America/Argentina/Cordoba Argentina (most areas: CB, CC, CN, ER, FM, MN, SE, SF) +AR -3124-06411 America/Argentina/Cordoba most areas: CB, CC, CN, ER, FM, MN, SE, SF AR -2447-06525 America/Argentina/Salta Salta (SA, LP, NQ, RN) AR -2411-06518 America/Argentina/Jujuy Jujuy (JY) AR -2649-06513 America/Argentina/Tucuman Tucumán (TM) @@ -56,7 +59,7 @@ AR -3253-06849 America/Argentina/Mendoza Mendoza (MZ) AR -3319-06621 America/Argentina/San_Luis San Luis (SL) AR -5138-06913 America/Argentina/Rio_Gallegos Santa Cruz (SC) AR -5448-06818 America/Argentina/Ushuaia Tierra del Fuego (TF) -AS,UM -1416-17042 Pacific/Pago_Pago Samoa, Midway +AS,UM -1416-17042 Pacific/Pago_Pago Midway AT +4813+01620 Europe/Vienna AU -3133+15905 Australia/Lord_Howe Lord Howe Island AU -5430+15857 Antarctica/Macquarie Macquarie Island @@ -101,26 +104,25 @@ CA +4439-06336 America/Halifax Atlantic - NS (most areas); PE CA +4612-05957 America/Glace_Bay Atlantic - NS (Cape Breton) CA +4606-06447 America/Moncton Atlantic - New Brunswick CA +5320-06025 America/Goose_Bay Atlantic - Labrador (most areas) -CA,BS +4339-07923 America/Toronto Eastern - ON, QC (most areas), Bahamas +CA,BS +4339-07923 America/Toronto Eastern - ON, QC (most areas) CA +6344-06828 America/Iqaluit Eastern - NU (most areas) CA +4953-09709 America/Winnipeg Central - ON (west); Manitoba CA +744144-0944945 America/Resolute Central - NU (Resolute) CA +624900-0920459 America/Rankin_Inlet Central - NU (central) CA +5024-10439 America/Regina CST - SK (most areas) CA +5017-10750 America/Swift_Current CST - SK (midwest) -CA +5333-11328 America/Edmonton Mountain - AB; BC (E); SK (W) +CA +5333-11328 America/Edmonton Mountain - AB; BC (E); NT (E); SK (W) CA +690650-1050310 America/Cambridge_Bay Mountain - NU (west) -CA +6227-11421 America/Yellowknife Mountain - NT (central) CA +682059-1334300 America/Inuvik Mountain - NT (west) CA +5546-12014 America/Dawson_Creek MST - BC (Dawson Cr, Ft St John) CA +5848-12242 America/Fort_Nelson MST - BC (Ft Nelson) CA +6043-13503 America/Whitehorse MST - Yukon (east) CA +6404-13925 America/Dawson MST - Yukon (west) CA +4916-12307 America/Vancouver Pacific - BC (most areas) -CH,DE,LI +4723+00832 Europe/Zurich Swiss time +CH,DE,LI +4723+00832 Europe/Zurich Büsingen CI,BF,GH,GM,GN,IS,ML,MR,SH,SL,SN,TG +0519-00402 Africa/Abidjan CK -2114-15946 Pacific/Rarotonga -CL -3327-07040 America/Santiago Chile (most areas) +CL -3327-07040 America/Santiago most of Chile CL -5309-07055 America/Punta_Arenas Region of Magallanes CL -2709-10926 Pacific/Easter Easter Island CN +3114+12128 Asia/Shanghai Beijing Time @@ -129,10 +131,10 @@ CO +0436-07405 America/Bogota CR +0956-08405 America/Costa_Rica CU +2308-08222 America/Havana CV +1455-02331 Atlantic/Cape_Verde -CY +3510+03322 Asia/Nicosia Cyprus (most areas) +CY +3510+03322 Asia/Nicosia most of Cyprus CY +3507+03357 Asia/Famagusta Northern Cyprus CZ,SK +5005+01426 Europe/Prague -DE,DK,NO,SE,SJ +5230+01322 Europe/Berlin Germany (most areas), Scandinavia +DE,DK,NO,SE,SJ +5230+01322 Europe/Berlin most of Germany DO +1828-06954 America/Santo_Domingo DZ +3647+00303 Africa/Algiers EC -0210-07950 America/Guayaquil Ecuador (mainland) @@ -153,7 +155,7 @@ GB,GG,IM,JE +513030-0000731 Europe/London GE +4143+04449 Asia/Tbilisi GF +0456-05220 America/Cayenne GI +3608-00521 Europe/Gibraltar -GL +6411-05144 America/Nuuk Greenland (most areas) +GL +6411-05144 America/Nuuk most of Greenland GL +7646-01840 America/Danmarkshavn National Park (east coast) GL +7029-02158 America/Scoresbysund Scoresbysund/Ittoqqortoormiit GL +7634-06847 America/Thule Thule/Pituffik @@ -183,12 +185,12 @@ JO +3157+03556 Asia/Amman JP +353916+1394441 Asia/Tokyo KE,DJ,ER,ET,KM,MG,SO,TZ,UG,YT -0117+03649 Africa/Nairobi KG +4254+07436 Asia/Bishkek -KI,MH,TV,UM,WF +0125+17300 Pacific/Tarawa Gilberts, Marshalls, Tuvalu, Wallis & Futuna, Wake +KI,MH,TV,UM,WF +0125+17300 Pacific/Tarawa Gilberts, Marshalls, Wake KI -0247-17143 Pacific/Kanton Phoenix Islands KI +0152-15720 Pacific/Kiritimati Line Islands KP +3901+12545 Asia/Pyongyang KR +3733+12658 Asia/Seoul -KZ +4315+07657 Asia/Almaty Kazakhstan (most areas) +KZ +4315+07657 Asia/Almaty most of Kazakhstan KZ +4448+06528 Asia/Qyzylorda Qyzylorda/Kyzylorda/Kzyl-Orda KZ +5312+06337 Asia/Qostanay Qostanay/Kostanay/Kustanay KZ +5017+05710 Asia/Aqtobe Aqtöbe/Aktobe @@ -205,14 +207,14 @@ MA +3339-00735 Africa/Casablanca MD +4700+02850 Europe/Chisinau MH +0905+16720 Pacific/Kwajalein Kwajalein MM,CC +1647+09610 Asia/Yangon -MN +4755+10653 Asia/Ulaanbaatar Mongolia (most areas) +MN +4755+10653 Asia/Ulaanbaatar most of Mongolia MN +4801+09139 Asia/Hovd Bayan-Ölgii, Govi-Altai, Hovd, Uvs, Zavkhan MN +4804+11430 Asia/Choibalsan Dornod, Sükhbaatar MO +221150+1133230 Asia/Macau MQ +1436-06105 America/Martinique MT +3554+01431 Europe/Malta MU -2010+05730 Indian/Mauritius -MV,TF +0410+07330 Indian/Maldives Maldives, Kerguelen, St Paul I, Amsterdam I +MV,TF +0410+07330 Indian/Maldives Kerguelen, St Paul I, Amsterdam I MX +1924-09909 America/Mexico_City Central Mexico MX +2105-08646 America/Cancun Quintana Roo MX +2058-08937 America/Merida Campeche, Yucatán @@ -225,7 +227,7 @@ MX +2313-10625 America/Mazatlan Baja California Sur, Nayarit (most areas), Sinal MX +2048-10515 America/Bahia_Banderas Bahía de Banderas MX +2904-11058 America/Hermosillo Sonora MX +3232-11701 America/Tijuana Baja California -MY,BN +0133+11020 Asia/Kuching Sabah, Sarawak, Brunei +MY,BN +0133+11020 Asia/Kuching Sabah, Sarawak MZ,BI,BW,CD,MW,RW,ZM,ZW -2558+03235 Africa/Maputo Central Africa Time NA -2234+01706 Africa/Windhoek NC -2216+16627 Pacific/Noumea @@ -237,7 +239,7 @@ NR -0031+16655 Pacific/Nauru NU -1901-16955 Pacific/Niue NZ,AQ -3652+17446 Pacific/Auckland New Zealand time NZ -4357-17633 Pacific/Chatham Chatham Islands -PA,CA,KY +0858-07932 America/Panama EST - Panama, Cayman, ON (Atikokan), NU (Coral H) +PA,CA,KY +0858-07932 America/Panama EST - ON (Atikokan), NU (Coral H) PE -1203-07703 America/Lima PF -1732-14934 Pacific/Tahiti Society Islands PF -0900-13930 Pacific/Marquesas Marquesas Islands @@ -285,13 +287,13 @@ RU +4310+13156 Asia/Vladivostok MSK+07 - Amur River RU +643337+1431336 Asia/Ust-Nera MSK+07 - Oymyakonsky RU +5934+15048 Asia/Magadan MSK+08 - Magadan RU +4658+14242 Asia/Sakhalin MSK+08 - Sakhalin Island -RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); North Kuril Is +RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); N Kuril Is RU +5301+15839 Asia/Kamchatka MSK+09 - Kamchatka RU +6445+17729 Asia/Anadyr MSK+09 - Bering Sea -SA,AQ,KW,YE +2438+04643 Asia/Riyadh Arabia, Syowa -SB,FM -0932+16012 Pacific/Guadalcanal Solomons, Pohnpei +SA,AQ,KW,YE +2438+04643 Asia/Riyadh Syowa +SB,FM -0932+16012 Pacific/Guadalcanal Pohnpei SD +1536+03232 Africa/Khartoum -SG,MY +0117+10351 Asia/Singapore Singapore, peninsular Malaysia +SG,MY +0117+10351 Asia/Singapore peninsular Malaysia SR +0550-05510 America/Paramaribo SS +0451+03137 Africa/Juba ST +0020+00644 Africa/Sao_Tome @@ -299,7 +301,7 @@ SV +1342-08912 America/El_Salvador SY +3330+03618 Asia/Damascus TC +2128-07108 America/Grand_Turk TD +1207+01503 Africa/Ndjamena -TH,CX,KH,LA,VN +1345+10031 Asia/Bangkok Indochina (most areas) +TH,CX,KH,LA,VN +1345+10031 Asia/Bangkok north Vietnam TJ +3835+06848 Asia/Dushanbe TK -0922-17114 Pacific/Fakaofo TL -0833+12535 Asia/Dili @@ -308,7 +310,7 @@ TN +3648+01011 Africa/Tunis TO -210800-1751200 Pacific/Tongatapu TR +4101+02858 Europe/Istanbul TW +2503+12130 Asia/Taipei -UA +5026+03031 Europe/Kyiv Ukraine (most areas) +UA +5026+03031 Europe/Kyiv most of Ukraine US +404251-0740023 America/New_York Eastern (most areas) US +421953-0830245 America/Detroit Eastern - MI (most areas) US +381515-0854534 America/Kentucky/Louisville Eastern - KY (Louisville area) @@ -328,7 +330,7 @@ US +465042-1012439 America/North_Dakota/New_Salem Central - ND (Morton rural) US +471551-1014640 America/North_Dakota/Beulah Central - ND (Mercer) US +394421-1045903 America/Denver Mountain (most areas) US +433649-1161209 America/Boise Mountain - ID (south); OR (east) -US,CA +332654-1120424 America/Phoenix MST - Arizona (except Navajo), Creston BC +US,CA +332654-1120424 America/Phoenix MST - AZ (most areas), Creston BC US +340308-1181434 America/Los_Angeles Pacific US +611305-1495401 America/Anchorage Alaska (most areas) US +581807-1342511 America/Juneau Alaska - Juneau area @@ -336,13 +338,13 @@ US +571035-1351807 America/Sitka Alaska - Sitka area US +550737-1313435 America/Metlakatla Alaska - Annette Island US +593249-1394338 America/Yakutat Alaska - Yakutat US +643004-1652423 America/Nome Alaska (west) -US +515248-1763929 America/Adak Aleutian Islands -US,UM +211825-1575130 Pacific/Honolulu Hawaii +US +515248-1763929 America/Adak Alaska - western Aleutians +US +211825-1575130 Pacific/Honolulu Hawaii UY -345433-0561245 America/Montevideo UZ +3940+06648 Asia/Samarkand Uzbekistan (west) UZ +4120+06918 Asia/Tashkent Uzbekistan (east) VE +1030-06656 America/Caracas -VN +1045+10640 Asia/Ho_Chi_Minh Vietnam (south) +VN +1045+10640 Asia/Ho_Chi_Minh south Vietnam VU -1740+16825 Pacific/Efate WS -1350-17144 Pacific/Apia ZA,LS,SZ -2615+02800 Africa/Johannesburg diff --git a/lib/tzdata/zones b/lib/tzdata/zones index 8d9892ed..9300ebb0 100644 --- a/lib/tzdata/zones +++ b/lib/tzdata/zones @@ -243,7 +243,6 @@ America/Iqaluit America/Resolute America/Rankin_Inlet America/Cambridge_Bay -America/Yellowknife America/Inuvik America/Whitehorse America/Dawson @@ -561,6 +560,7 @@ America/Rosario America/Santa_Isabel America/Shiprock America/Thunder_Bay +America/Yellowknife Antarctica/South_Pole Asia/Chongqing Asia/Harbin diff --git a/requirements.txt b/requirements.txt index 00e17bde..55053413 100644 --- a/requirements.txt +++ b/requirements.txt @@ -44,7 +44,7 @@ six==1.16.0 soupsieve==2.4 tempora==5.2.1 tokenize-rt==5.0.0 -tzdata==2022.7 +tzdata==2023.3 tzlocal==4.2 urllib3==1.26.15 webencodings==0.5.1 From 1798594569686658df7a1418af032332efb30be6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Aug 2023 21:38:39 -0700 Subject: [PATCH 041/361] Bump simplejson from 3.18.3 to 3.19.1 (#2036) * Bump simplejson from 3.18.3 to 3.19.1 Bumps [simplejson](https://github.com/simplejson/simplejson) from 3.18.3 to 3.19.1. - [Release notes](https://github.com/simplejson/simplejson/releases) - [Changelog](https://github.com/simplejson/simplejson/blob/master/CHANGES.txt) - [Commits](https://github.com/simplejson/simplejson/compare/v3.18.3...v3.19.1) --- updated-dependencies: - dependency-name: simplejson dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Update simplejson==3.19.1 --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci] --- lib/simplejson/__init__.py | 106 ++++++++++-------------- lib/simplejson/decoder.py | 82 ++++++++++-------- lib/simplejson/encoder.py | 30 +++---- lib/simplejson/scanner.py | 6 +- lib/simplejson/tests/test_decode.py | 8 ++ lib/simplejson/tests/test_fail.py | 4 +- lib/simplejson/tests/test_float.py | 7 +- lib/simplejson/tests/test_scanstring.py | 4 +- requirements.txt | 2 +- 9 files changed, 128 insertions(+), 121 deletions(-) diff --git a/lib/simplejson/__init__.py b/lib/simplejson/__init__.py index 993d64d3..7e533a24 100644 --- a/lib/simplejson/__init__.py +++ b/lib/simplejson/__init__.py @@ -118,7 +118,7 @@ Serializing multiple objects to JSON lines (newline-delimited JSON):: """ from __future__ import absolute_import -__version__ = '3.18.3' +__version__ = '3.19.1' __all__ = [ 'dump', 'dumps', 'load', 'loads', 'JSONDecoder', 'JSONDecodeError', 'JSONEncoder', @@ -149,28 +149,10 @@ def _import_c_make_encoder(): except ImportError: return None -_default_encoder = JSONEncoder( - skipkeys=False, - ensure_ascii=True, - check_circular=True, - allow_nan=True, - indent=None, - separators=None, - encoding='utf-8', - default=None, - use_decimal=True, - namedtuple_as_object=True, - tuple_as_array=True, - iterable_as_array=False, - bigint_as_string=False, - item_sort_key=None, - for_json=False, - ignore_nan=False, - int_as_string_bitcount=None, -) +_default_encoder = JSONEncoder() def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, - allow_nan=True, cls=None, indent=None, separators=None, + allow_nan=False, cls=None, indent=None, separators=None, encoding='utf-8', default=None, use_decimal=True, namedtuple_as_object=True, tuple_as_array=True, bigint_as_string=False, sort_keys=False, item_sort_key=None, @@ -187,10 +169,10 @@ def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, contain non-ASCII characters, so long as they do not need to be escaped by JSON. When it is true, all non-ASCII characters are escaped. - If *allow_nan* is false, then it will be a ``ValueError`` to - serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) - in strict compliance of the original JSON specification, instead of using - the JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). See + If *allow_nan* is true (default: ``False``), then out of range ``float`` + values (``nan``, ``inf``, ``-inf``) will be serialized to + their JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``) + instead of raising a ValueError. See *ignore_nan* for ECMA-262 compliant behavior. If *indent* is a string, then JSON array elements and object members @@ -258,7 +240,7 @@ def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, """ # cached encoder if (not skipkeys and ensure_ascii and - check_circular and allow_nan and + check_circular and not allow_nan and cls is None and indent is None and separators is None and encoding == 'utf-8' and default is None and use_decimal and namedtuple_as_object and tuple_as_array and not iterable_as_array @@ -292,7 +274,7 @@ def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, - allow_nan=True, cls=None, indent=None, separators=None, + allow_nan=False, cls=None, indent=None, separators=None, encoding='utf-8', default=None, use_decimal=True, namedtuple_as_object=True, tuple_as_array=True, bigint_as_string=False, sort_keys=False, item_sort_key=None, @@ -312,10 +294,11 @@ def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, for container types will be skipped and a circular reference will result in an ``OverflowError`` (or worse). - If ``allow_nan`` is false, then it will be a ``ValueError`` to - serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in - strict compliance of the JSON specification, instead of using the - JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). + If *allow_nan* is true (default: ``False``), then out of range ``float`` + values (``nan``, ``inf``, ``-inf``) will be serialized to + their JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``) + instead of raising a ValueError. See + *ignore_nan* for ECMA-262 compliant behavior. If ``indent`` is a string, then JSON array elements and object members will be pretty-printed with a newline followed by that string repeated @@ -383,7 +366,7 @@ def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, """ # cached encoder if (not skipkeys and ensure_ascii and - check_circular and allow_nan and + check_circular and not allow_nan and cls is None and indent is None and separators is None and encoding == 'utf-8' and default is None and use_decimal and namedtuple_as_object and tuple_as_array and not iterable_as_array @@ -412,14 +395,12 @@ def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, **kw).encode(obj) -_default_decoder = JSONDecoder(encoding=None, object_hook=None, - object_pairs_hook=None) +_default_decoder = JSONDecoder() def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, - use_decimal=False, namedtuple_as_object=True, tuple_as_array=True, - **kw): + use_decimal=False, allow_nan=False, **kw): """Deserialize ``fp`` (a ``.read()``-supporting file-like object containing a JSON document as `str` or `bytes`) to a Python object. @@ -442,23 +423,27 @@ def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None, takes priority. *parse_float*, if specified, will be called with the string of every - JSON float to be decoded. By default, this is equivalent to + JSON float to be decoded. By default, this is equivalent to ``float(num_str)``. This can be used to use another datatype or parser for JSON floats (e.g. :class:`decimal.Decimal`). *parse_int*, if specified, will be called with the string of every - JSON int to be decoded. By default, this is equivalent to + JSON int to be decoded. By default, this is equivalent to ``int(num_str)``. This can be used to use another datatype or parser for JSON integers (e.g. :class:`float`). - *parse_constant*, if specified, will be called with one of the - following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This - can be used to raise an exception if invalid JSON numbers are - encountered. + *allow_nan*, if True (default false), will allow the parser to + accept the non-standard floats ``NaN``, ``Infinity``, and ``-Infinity`` + and enable the use of the deprecated *parse_constant*. If *use_decimal* is true (default: ``False``) then it implies parse_float=decimal.Decimal for parity with ``dump``. + *parse_constant*, if specified, will be + called with one of the following strings: ``'-Infinity'``, + ``'Infinity'``, ``'NaN'``. It is not recommended to use this feature, + as it is rare to parse non-compliant JSON containing these values. + To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` kwarg. NOTE: You should use *object_hook* or *object_pairs_hook* instead of subclassing whenever possible. @@ -468,12 +453,12 @@ def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None, encoding=encoding, cls=cls, object_hook=object_hook, parse_float=parse_float, parse_int=parse_int, parse_constant=parse_constant, object_pairs_hook=object_pairs_hook, - use_decimal=use_decimal, **kw) + use_decimal=use_decimal, allow_nan=allow_nan, **kw) def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, - use_decimal=False, **kw): + use_decimal=False, allow_nan=False, **kw): """Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON document) to a Python object. @@ -505,14 +490,18 @@ def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None, ``int(num_str)``. This can be used to use another datatype or parser for JSON integers (e.g. :class:`float`). - *parse_constant*, if specified, will be called with one of the - following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This - can be used to raise an exception if invalid JSON numbers are - encountered. + *allow_nan*, if True (default false), will allow the parser to + accept the non-standard floats ``NaN``, ``Infinity``, and ``-Infinity`` + and enable the use of the deprecated *parse_constant*. If *use_decimal* is true (default: ``False``) then it implies parse_float=decimal.Decimal for parity with ``dump``. + *parse_constant*, if specified, will be + called with one of the following strings: ``'-Infinity'``, + ``'Infinity'``, ``'NaN'``. It is not recommended to use this feature, + as it is rare to parse non-compliant JSON containing these values. + To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` kwarg. NOTE: You should use *object_hook* or *object_pairs_hook* instead of subclassing whenever possible. @@ -521,7 +510,7 @@ def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None, if (cls is None and encoding is None and object_hook is None and parse_int is None and parse_float is None and parse_constant is None and object_pairs_hook is None - and not use_decimal and not kw): + and not use_decimal and not allow_nan and not kw): return _default_decoder.decode(s) if cls is None: cls = JSONDecoder @@ -539,6 +528,8 @@ def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None, if parse_float is not None: raise TypeError("use_decimal=True implies parse_float=Decimal") kw['parse_float'] = Decimal + if allow_nan: + kw['allow_nan'] = True return cls(encoding=encoding, **kw).decode(s) @@ -560,22 +551,9 @@ def _toggle_speedups(enabled): scan.make_scanner = scan.py_make_scanner dec.make_scanner = scan.make_scanner global _default_decoder - _default_decoder = JSONDecoder( - encoding=None, - object_hook=None, - object_pairs_hook=None, - ) + _default_decoder = JSONDecoder() global _default_encoder - _default_encoder = JSONEncoder( - skipkeys=False, - ensure_ascii=True, - check_circular=True, - allow_nan=True, - indent=None, - separators=None, - encoding='utf-8', - default=None, - ) + _default_encoder = JSONEncoder() def simple_first(kv): """Helper function to pass to item_sort_key to sort simple diff --git a/lib/simplejson/decoder.py b/lib/simplejson/decoder.py index 1a8f772f..c99a976d 100644 --- a/lib/simplejson/decoder.py +++ b/lib/simplejson/decoder.py @@ -46,9 +46,35 @@ BACKSLASH = { DEFAULT_ENCODING = "utf-8" +if hasattr(sys, 'get_int_max_str_digits'): + bounded_int = int +else: + def bounded_int(s, INT_MAX_STR_DIGITS=4300): + """Backport of the integer string length conversion limitation + + https://docs.python.org/3/library/stdtypes.html#int-max-str-digits + """ + if len(s) > INT_MAX_STR_DIGITS: + raise ValueError("Exceeds the limit (%s) for integer string conversion: value has %s digits" % (INT_MAX_STR_DIGITS, len(s))) + return int(s) + + +def scan_four_digit_hex(s, end, _m=re.compile(r'^[0-9a-fA-F]{4}$').match): + """Scan a four digit hex number from s[end:end + 4] + """ + msg = "Invalid \\uXXXX escape sequence" + esc = s[end:end + 4] + if not _m(esc): + raise JSONDecodeError(msg, s, end - 2) + try: + return int(esc, 16), end + 4 + except ValueError: + raise JSONDecodeError(msg, s, end - 2) + def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match, _join=u''.join, - _PY3=PY3, _maxunicode=sys.maxunicode): + _PY3=PY3, _maxunicode=sys.maxunicode, + _scan_four_digit_hex=scan_four_digit_hex): """Scan the string s for a JSON string. End is the index of the character in s after the quote that started the JSON string. Unescapes all valid JSON string escape sequences and raises ValueError @@ -67,6 +93,7 @@ def py_scanstring(s, end, encoding=None, strict=True, if chunk is None: raise JSONDecodeError( "Unterminated string starting at", s, begin) + prev_end = end end = chunk.end() content, terminator = chunk.groups() # Content is contains zero or more unescaped string characters @@ -81,7 +108,7 @@ def py_scanstring(s, end, encoding=None, strict=True, elif terminator != '\\': if strict: msg = "Invalid control character %r at" - raise JSONDecodeError(msg, s, end) + raise JSONDecodeError(msg, s, prev_end) else: _append(terminator) continue @@ -100,35 +127,18 @@ def py_scanstring(s, end, encoding=None, strict=True, end += 1 else: # Unicode escape sequence - msg = "Invalid \\uXXXX escape sequence" - esc = s[end + 1:end + 5] - escX = esc[1:2] - if len(esc) != 4 or escX == 'x' or escX == 'X': - raise JSONDecodeError(msg, s, end - 1) - try: - uni = int(esc, 16) - except ValueError: - raise JSONDecodeError(msg, s, end - 1) - if uni < 0 or uni > _maxunicode: - raise JSONDecodeError(msg, s, end - 1) - end += 5 + uni, end = _scan_four_digit_hex(s, end + 1) # Check for surrogate pair on UCS-4 systems # Note that this will join high/low surrogate pairs # but will also pass unpaired surrogates through if (_maxunicode > 65535 and uni & 0xfc00 == 0xd800 and s[end:end + 2] == '\\u'): - esc2 = s[end + 2:end + 6] - escX = esc2[1:2] - if len(esc2) == 4 and not (escX == 'x' or escX == 'X'): - try: - uni2 = int(esc2, 16) - except ValueError: - raise JSONDecodeError(msg, s, end) - if uni2 & 0xfc00 == 0xdc00: - uni = 0x10000 + (((uni - 0xd800) << 10) | - (uni2 - 0xdc00)) - end += 6 + uni2, end2 = _scan_four_digit_hex(s, end + 2) + if uni2 & 0xfc00 == 0xdc00: + uni = 0x10000 + (((uni - 0xd800) << 10) | + (uni2 - 0xdc00)) + end = end2 char = unichr(uni) # Append the unescaped character _append(char) @@ -169,7 +179,7 @@ def JSONObject(state, encoding, strict, scan_once, object_hook, return pairs, end + 1 elif nextchar != '"': raise JSONDecodeError( - "Expecting property name enclosed in double quotes", + "Expecting property name enclosed in double quotes or '}'", s, end) end += 1 while True: @@ -296,14 +306,15 @@ class JSONDecoder(object): | null | None | +---------------+-------------------+ - It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as + When allow_nan=True, it also understands + ``NaN``, ``Infinity``, and ``-Infinity`` as their corresponding ``float`` values, which is outside the JSON spec. """ def __init__(self, encoding=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, strict=True, - object_pairs_hook=None): + object_pairs_hook=None, allow_nan=False): """ *encoding* determines the encoding used to interpret any :class:`str` objects decoded by this instance (``'utf-8'`` by @@ -336,10 +347,13 @@ class JSONDecoder(object): ``int(num_str)``. This can be used to use another datatype or parser for JSON integers (e.g. :class:`float`). - *parse_constant*, if specified, will be called with one of the - following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This - can be used to raise an exception if invalid JSON numbers are - encountered. + *allow_nan*, if True (default false), will allow the parser to + accept the non-standard floats ``NaN``, ``Infinity``, and ``-Infinity``. + + *parse_constant*, if specified, will be + called with one of the following strings: ``'-Infinity'``, + ``'Infinity'``, ``'NaN'``. It is not recommended to use this feature, + as it is rare to parse non-compliant JSON containing these values. *strict* controls the parser's behavior when it encounters an invalid control character in a string. The default setting of @@ -353,8 +367,8 @@ class JSONDecoder(object): self.object_hook = object_hook self.object_pairs_hook = object_pairs_hook self.parse_float = parse_float or float - self.parse_int = parse_int or int - self.parse_constant = parse_constant or _CONSTANTS.__getitem__ + self.parse_int = parse_int or bounded_int + self.parse_constant = parse_constant or (allow_nan and _CONSTANTS.__getitem__ or None) self.strict = strict self.parse_object = JSONObject self.parse_array = JSONArray diff --git a/lib/simplejson/encoder.py b/lib/simplejson/encoder.py index e93fe43f..661ff361 100644 --- a/lib/simplejson/encoder.py +++ b/lib/simplejson/encoder.py @@ -5,7 +5,7 @@ import re from operator import itemgetter # Do not import Decimal directly to avoid reload issues import decimal -from .compat import unichr, binary_type, text_type, string_types, integer_types, PY3 +from .compat import binary_type, text_type, string_types, integer_types, PY3 def _import_speedups(): try: from . import _speedups @@ -140,7 +140,7 @@ class JSONEncoder(object): key_separator = ': ' def __init__(self, skipkeys=False, ensure_ascii=True, - check_circular=True, allow_nan=True, sort_keys=False, + check_circular=True, allow_nan=False, sort_keys=False, indent=None, separators=None, encoding='utf-8', default=None, use_decimal=True, namedtuple_as_object=True, tuple_as_array=True, bigint_as_string=False, @@ -161,10 +161,11 @@ class JSONEncoder(object): prevent an infinite recursion (which would cause an OverflowError). Otherwise, no such check takes place. - If allow_nan is true, then NaN, Infinity, and -Infinity will be - encoded as such. This behavior is not JSON specification compliant, - but is consistent with most JavaScript based encoders and decoders. - Otherwise, it will be a ValueError to encode such floats. + If allow_nan is true (default: False), then out of range float + values (nan, inf, -inf) will be serialized to + their JavaScript equivalents (NaN, Infinity, -Infinity) + instead of raising a ValueError. See + ignore_nan for ECMA-262 compliant behavior. If sort_keys is true, then the output of dictionaries will be sorted by key; this is useful for regression tests to ensure @@ -294,7 +295,7 @@ class JSONEncoder(object): # This doesn't pass the iterator directly to ''.join() because the # exceptions aren't as detailed. The list call should be roughly # equivalent to the PySequence_Fast that ''.join() would do. - chunks = self.iterencode(o, _one_shot=True) + chunks = self.iterencode(o) if not isinstance(chunks, (list, tuple)): chunks = list(chunks) if self.ensure_ascii: @@ -302,7 +303,7 @@ class JSONEncoder(object): else: return u''.join(chunks) - def iterencode(self, o, _one_shot=False): + def iterencode(self, o): """Encode the given object and yield each string representation as available. @@ -356,8 +357,7 @@ class JSONEncoder(object): key_memo = {} int_as_string_bitcount = ( 53 if self.bigint_as_string else self.int_as_string_bitcount) - if (_one_shot and c_make_encoder is not None - and self.indent is None): + if (c_make_encoder is not None and self.indent is None): _iterencode = c_make_encoder( markers, self.default, _encoder, self.indent, self.key_separator, self.item_separator, self.sort_keys, @@ -370,7 +370,7 @@ class JSONEncoder(object): _iterencode = _make_iterencode( markers, self.default, _encoder, self.indent, floatstr, self.key_separator, self.item_separator, self.sort_keys, - self.skipkeys, _one_shot, self.use_decimal, + self.skipkeys, self.use_decimal, self.namedtuple_as_object, self.tuple_as_array, int_as_string_bitcount, self.item_sort_key, self.encoding, self.for_json, @@ -398,14 +398,14 @@ class JSONEncoderForHTML(JSONEncoder): def encode(self, o): # Override JSONEncoder.encode because it has hacks for # performance that make things more complicated. - chunks = self.iterencode(o, True) + chunks = self.iterencode(o) if self.ensure_ascii: return ''.join(chunks) else: return u''.join(chunks) - def iterencode(self, o, _one_shot=False): - chunks = super(JSONEncoderForHTML, self).iterencode(o, _one_shot) + def iterencode(self, o): + chunks = super(JSONEncoderForHTML, self).iterencode(o) for chunk in chunks: chunk = chunk.replace('&', '\\u0026') chunk = chunk.replace('<', '\\u003c') @@ -419,7 +419,7 @@ class JSONEncoderForHTML(JSONEncoder): def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, - _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot, + _key_separator, _item_separator, _sort_keys, _skipkeys, _use_decimal, _namedtuple_as_object, _tuple_as_array, _int_as_string_bitcount, _item_sort_key, _encoding,_for_json, diff --git a/lib/simplejson/scanner.py b/lib/simplejson/scanner.py index 85e385e1..34710d68 100644 --- a/lib/simplejson/scanner.py +++ b/lib/simplejson/scanner.py @@ -60,11 +60,11 @@ def py_make_scanner(context): else: res = parse_int(integer) return res, m.end() - elif nextchar == 'N' and string[idx:idx + 3] == 'NaN': + elif parse_constant and nextchar == 'N' and string[idx:idx + 3] == 'NaN': return parse_constant('NaN'), idx + 3 - elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity': + elif parse_constant and nextchar == 'I' and string[idx:idx + 8] == 'Infinity': return parse_constant('Infinity'), idx + 8 - elif nextchar == '-' and string[idx:idx + 9] == '-Infinity': + elif parse_constant and nextchar == '-' and string[idx:idx + 9] == '-Infinity': return parse_constant('-Infinity'), idx + 9 else: raise JSONDecodeError(errmsg, string, idx) diff --git a/lib/simplejson/tests/test_decode.py b/lib/simplejson/tests/test_decode.py index 6960ee58..317b4f98 100644 --- a/lib/simplejson/tests/test_decode.py +++ b/lib/simplejson/tests/test_decode.py @@ -2,6 +2,7 @@ from __future__ import absolute_import import decimal from unittest import TestCase +import sys import simplejson as json from simplejson.compat import StringIO, b, binary_type from simplejson import OrderedDict @@ -117,3 +118,10 @@ class TestDecode(TestCase): diff = id(x) - id(y) self.assertRaises(ValueError, j.scan_once, y, diff) self.assertRaises(ValueError, j.raw_decode, y, i) + + def test_bounded_int(self): + # SJ-PT-23-03, limit quadratic number parsing per Python 3.11 + max_str_digits = getattr(sys, 'get_int_max_str_digits', lambda: 4300)() + s = '1' + '0' * (max_str_digits - 1) + self.assertEqual(json.loads(s), int(s)) + self.assertRaises(ValueError, json.loads, s + '0') diff --git a/lib/simplejson/tests/test_fail.py b/lib/simplejson/tests/test_fail.py index 788f3a52..5f9a8f69 100644 --- a/lib/simplejson/tests/test_fail.py +++ b/lib/simplejson/tests/test_fail.py @@ -145,7 +145,7 @@ class TestFail(TestCase): ('["spam', 'Unterminated string starting at', 1), ('["spam"', "Expecting ',' delimiter", 7), ('["spam",', 'Expecting value', 8), - ('{', 'Expecting property name enclosed in double quotes', 1), + ('{', "Expecting property name enclosed in double quotes or '}'", 1), ('{"', 'Unterminated string starting at', 1), ('{"spam', 'Unterminated string starting at', 1), ('{"spam"', "Expecting ':' delimiter", 7), @@ -156,6 +156,8 @@ class TestFail(TestCase): ('"', 'Unterminated string starting at', 0), ('"spam', 'Unterminated string starting at', 0), ('[,', "Expecting value", 1), + ('--', 'Expecting value', 0), + ('"\x18d', "Invalid control character %r", 1), ] for data, msg, idx in test_cases: try: diff --git a/lib/simplejson/tests/test_float.py b/lib/simplejson/tests/test_float.py index e382ec21..a9779694 100644 --- a/lib/simplejson/tests/test_float.py +++ b/lib/simplejson/tests/test_float.py @@ -7,9 +7,9 @@ from simplejson.decoder import NaN, PosInf, NegInf class TestFloat(TestCase): def test_degenerates_allow(self): for inf in (PosInf, NegInf): - self.assertEqual(json.loads(json.dumps(inf)), inf) + self.assertEqual(json.loads(json.dumps(inf, allow_nan=True), allow_nan=True), inf) # Python 2.5 doesn't have math.isnan - nan = json.loads(json.dumps(NaN)) + nan = json.loads(json.dumps(NaN, allow_nan=True), allow_nan=True) self.assertTrue((0 + nan) != nan) def test_degenerates_ignore(self): @@ -19,6 +19,9 @@ class TestFloat(TestCase): def test_degenerates_deny(self): for f in (PosInf, NegInf, NaN): self.assertRaises(ValueError, json.dumps, f, allow_nan=False) + for s in ('Infinity', '-Infinity', 'NaN'): + self.assertRaises(ValueError, json.loads, s, allow_nan=False) + self.assertRaises(ValueError, json.loads, s) def test_floats(self): for num in [1617161771.7650001, math.pi, math.pi**100, diff --git a/lib/simplejson/tests/test_scanstring.py b/lib/simplejson/tests/test_scanstring.py index c6c53b81..1f544834 100644 --- a/lib/simplejson/tests/test_scanstring.py +++ b/lib/simplejson/tests/test_scanstring.py @@ -132,7 +132,9 @@ class TestScanString(TestCase): self.assertRaises(ValueError, scanstring, '\\ud834\\x0123"', 0, None, True) - self.assertRaises(json.JSONDecodeError, scanstring, "\\u-123", 0, None, True) + self.assertRaises(json.JSONDecodeError, scanstring, '\\u-123"', 0, None, True) + # SJ-PT-23-01: Invalid Handling of Broken Unicode Escape Sequences + self.assertRaises(json.JSONDecodeError, scanstring, '\\u EDD"', 0, None, True) def test_issue3623(self): self.assertRaises(ValueError, json.decoder.scanstring, "xxx", 1, diff --git a/requirements.txt b/requirements.txt index 55053413..b9a561d1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -39,7 +39,7 @@ pytz==2023.3 requests==2.28.2 requests-oauthlib==1.3.1 rumps==0.4.0; platform_system == "Darwin" -simplejson==3.18.3 +simplejson==3.19.1 six==1.16.0 soupsieve==2.4 tempora==5.2.1 From e70e08c3f5ab27daf8ad3f7531819f07d97b5351 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Aug 2023 21:38:49 -0700 Subject: [PATCH 042/361] Bump beautifulsoup4 from 4.11.2 to 4.12.2 (#2037) * Bump beautifulsoup4 from 4.11.2 to 4.12.2 Bumps [beautifulsoup4](https://www.crummy.com/software/BeautifulSoup/bs4/) from 4.11.2 to 4.12.2. --- updated-dependencies: - dependency-name: beautifulsoup4 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Update beautifulsoup4==4.12.2 --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci] --- lib/bs4/__init__.py | 59 ++- lib/bs4/builder/_htmlparser.py | 25 +- lib/bs4/css.py | 280 ++++++++++ lib/bs4/diagnose.py | 15 - lib/bs4/element.py | 470 ++++++++++------- lib/bs4/formatter.py | 2 +- lib/bs4/tests/__init__.py | 40 +- ...mized-bs4_fuzzer-4818336571064320.testcase | 1 + ...mized-bs4_fuzzer-4999465949331456.testcase | 1 + ...mized-bs4_fuzzer-5167584867909632.testcase | Bin 0 -> 19469 bytes ...mized-bs4_fuzzer-5703933063462912.testcase | 2 + ...mized-bs4_fuzzer-5843991618256896.testcase | 1 + ...mized-bs4_fuzzer-5984173902397440.testcase | Bin 0 -> 51495 bytes ...mized-bs4_fuzzer-6124268085182464.testcase | 1 + ...mized-bs4_fuzzer-6241471367348224.testcase | 1 + ...mized-bs4_fuzzer-6450958476902400.testcase | Bin 0 -> 3546 bytes ...mized-bs4_fuzzer-6600557255327744.testcase | Bin 0 -> 124 bytes ...0c8ed8bcd0785b67000fcd5dea1d33f08.testcase | Bin 0 -> 2607 bytes lib/bs4/tests/test_css.py | 487 ++++++++++++++++++ lib/bs4/tests/test_formatter.py | 20 +- lib/bs4/tests/test_fuzz.py | 91 ++++ lib/bs4/tests/test_htmlparser.py | 24 + lib/bs4/tests/test_lxml.py | 12 +- lib/bs4/tests/test_pageelement.py | 480 ++--------------- lib/bs4/tests/test_soup.py | 42 ++ lib/soupsieve/__init__.py | 30 +- lib/soupsieve/__meta__.py | 2 +- lib/soupsieve/css_match.py | 54 +- lib/soupsieve/css_parser.py | 22 +- lib/soupsieve/css_types.py | 24 +- lib/soupsieve/util.py | 6 +- requirements.txt | 2 +- 32 files changed, 1439 insertions(+), 755 deletions(-) create mode 100644 lib/bs4/css.py create mode 100644 lib/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-4818336571064320.testcase create mode 100644 lib/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-4999465949331456.testcase create mode 100644 lib/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5167584867909632.testcase create mode 100644 lib/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5703933063462912.testcase create mode 100644 lib/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5843991618256896.testcase create mode 100644 lib/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5984173902397440.testcase create mode 100644 lib/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-6124268085182464.testcase create mode 100644 lib/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-6241471367348224.testcase create mode 100644 lib/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-6450958476902400.testcase create mode 100644 lib/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-6600557255327744.testcase create mode 100644 lib/bs4/tests/fuzz/crash-0d306a50c8ed8bcd0785b67000fcd5dea1d33f08.testcase create mode 100644 lib/bs4/tests/test_css.py create mode 100644 lib/bs4/tests/test_fuzz.py diff --git a/lib/bs4/__init__.py b/lib/bs4/__init__.py index db71cc7c..3d2ab09a 100644 --- a/lib/bs4/__init__.py +++ b/lib/bs4/__init__.py @@ -15,7 +15,7 @@ documentation: http://www.crummy.com/software/BeautifulSoup/bs4/doc/ """ __author__ = "Leonard Richardson (leonardr@segfault.org)" -__version__ = "4.11.2" +__version__ = "4.12.2" __copyright__ = "Copyright (c) 2004-2023 Leonard Richardson" # Use of this source code is governed by the MIT license. __license__ = "MIT" @@ -38,11 +38,13 @@ from .builder import ( builder_registry, ParserRejectedMarkup, XMLParsedAsHTMLWarning, + HTMLParserTreeBuilder ) from .dammit import UnicodeDammit from .element import ( CData, Comment, + CSS, DEFAULT_OUTPUT_ENCODING, Declaration, Doctype, @@ -116,7 +118,7 @@ class BeautifulSoup(Tag): ASCII_SPACES = '\x20\x0a\x09\x0c\x0d' NO_PARSER_SPECIFIED_WARNING = "No parser was explicitly specified, so I'm using the best available %(markup_type)s parser for this system (\"%(parser)s\"). This usually isn't a problem, but if you run this code on another system, or in a different virtual environment, it may use a different parser and behave differently.\n\nThe code that caused this warning is on line %(line_number)s of the file %(filename)s. To get rid of this warning, pass the additional argument 'features=\"%(parser)s\"' to the BeautifulSoup constructor.\n" - + def __init__(self, markup="", features=None, builder=None, parse_only=None, from_encoding=None, exclude_encodings=None, element_classes=None, **kwargs): @@ -348,25 +350,49 @@ class BeautifulSoup(Tag): self.markup = None self.builder.soup = None - def __copy__(self): - """Copy a BeautifulSoup object by converting the document to a string and parsing it again.""" - copy = type(self)( - self.encode('utf-8'), builder=self.builder, from_encoding='utf-8' - ) + def _clone(self): + """Create a new BeautifulSoup object with the same TreeBuilder, + but not associated with any markup. - # Although we encoded the tree to UTF-8, that may not have - # been the encoding of the original markup. Set the copy's - # .original_encoding to reflect the original object's - # .original_encoding. - copy.original_encoding = self.original_encoding - return copy + This is the first step of the deepcopy process. + """ + clone = type(self)("", None, self.builder) + # Keep track of the encoding of the original document, + # since we won't be parsing it again. + clone.original_encoding = self.original_encoding + return clone + def __getstate__(self): # Frequently a tree builder can't be pickled. d = dict(self.__dict__) if 'builder' in d and d['builder'] is not None and not self.builder.picklable: - d['builder'] = None + d['builder'] = type(self.builder) + # Store the contents as a Unicode string. + d['contents'] = [] + d['markup'] = self.decode() + + # If _most_recent_element is present, it's a Tag object left + # over from initial parse. It might not be picklable and we + # don't need it. + if '_most_recent_element' in d: + del d['_most_recent_element'] return d + + def __setstate__(self, state): + # If necessary, restore the TreeBuilder by looking it up. + self.__dict__ = state + if isinstance(self.builder, type): + self.builder = self.builder() + elif not self.builder: + # We don't know which builder was used to build this + # parse tree, so use a default we know is always available. + self.builder = HTMLParserTreeBuilder() + self.builder.soup = self + self.reset() + self._feed() + return state + @classmethod def _decode_markup(cls, markup): @@ -468,6 +494,7 @@ class BeautifulSoup(Tag): self.open_tag_counter = Counter() self.preserve_whitespace_tag_stack = [] self.string_container_stack = [] + self._most_recent_element = None self.pushTag(self) def new_tag(self, name, namespace=None, nsprefix=None, attrs={}, @@ -749,7 +776,7 @@ class BeautifulSoup(Tag): def decode(self, pretty_print=False, eventual_encoding=DEFAULT_OUTPUT_ENCODING, - formatter="minimal"): + formatter="minimal", iterator=None): """Returns a string or Unicode representation of the parse tree as an HTML or XML document. @@ -776,7 +803,7 @@ class BeautifulSoup(Tag): else: indent_level = 0 return prefix + super(BeautifulSoup, self).decode( - indent_level, eventual_encoding, formatter) + indent_level, eventual_encoding, formatter, iterator) # Aliases to make it easier to get started quickly, e.g. 'from bs4 import _soup' _s = BeautifulSoup diff --git a/lib/bs4/builder/_htmlparser.py b/lib/bs4/builder/_htmlparser.py index e48b6a0e..e065096b 100644 --- a/lib/bs4/builder/_htmlparser.py +++ b/lib/bs4/builder/_htmlparser.py @@ -24,6 +24,7 @@ from bs4.dammit import EntitySubstitution, UnicodeDammit from bs4.builder import ( DetectsXMLParsedAsHTML, + ParserRejectedMarkup, HTML, HTMLTreeBuilder, STRICT, @@ -70,6 +71,22 @@ class BeautifulSoupHTMLParser(HTMLParser, DetectsXMLParsedAsHTML): self._initialize_xml_detector() + def error(self, message): + # NOTE: This method is required so long as Python 3.9 is + # supported. The corresponding code is removed from HTMLParser + # in 3.5, but not removed from ParserBase until 3.10. + # https://github.com/python/cpython/issues/76025 + # + # The original implementation turned the error into a warning, + # but in every case I discovered, this made HTMLParser + # immediately crash with an error message that was less + # helpful than the warning. The new implementation makes it + # more clear that html.parser just can't parse this + # markup. The 3.10 implementation does the same, though it + # raises AssertionError rather than calling a method. (We + # catch this error and wrap it in a ParserRejectedMarkup.) + raise ParserRejectedMarkup(message) + def handle_startendtag(self, name, attrs): """Handle an incoming empty-element tag. @@ -359,6 +376,12 @@ class HTMLParserTreeBuilder(HTMLTreeBuilder): args, kwargs = self.parser_args parser = BeautifulSoupHTMLParser(*args, **kwargs) parser.soup = self.soup - parser.feed(markup) + try: + parser.feed(markup) + except AssertionError as e: + # html.parser raises AssertionError in rare cases to + # indicate a fatal problem with the markup, especially + # when there's an error in the doctype declaration. + raise ParserRejectedMarkup(e) parser.close() parser.already_closed_empty_element = [] diff --git a/lib/bs4/css.py b/lib/bs4/css.py new file mode 100644 index 00000000..245ac601 --- /dev/null +++ b/lib/bs4/css.py @@ -0,0 +1,280 @@ +"""Integration code for CSS selectors using Soup Sieve (pypi: soupsieve).""" + +import warnings +try: + import soupsieve +except ImportError as e: + soupsieve = None + warnings.warn( + 'The soupsieve package is not installed. CSS selectors cannot be used.' + ) + + +class CSS(object): + """A proxy object against the soupsieve library, to simplify its + CSS selector API. + + Acquire this object through the .css attribute on the + BeautifulSoup object, or on the Tag you want to use as the + starting point for a CSS selector. + + The main advantage of doing this is that the tag to be selected + against doesn't need to be explicitly specified in the function + calls, since it's already scoped to a tag. + """ + + def __init__(self, tag, api=soupsieve): + """Constructor. + + You don't need to instantiate this class yourself; instead, + access the .css attribute on the BeautifulSoup object, or on + the Tag you want to use as the starting point for your CSS + selector. + + :param tag: All CSS selectors will use this as their starting + point. + + :param api: A plug-in replacement for the soupsieve module, + designed mainly for use in tests. + """ + if api is None: + raise NotImplementedError( + "Cannot execute CSS selectors because the soupsieve package is not installed." + ) + self.api = api + self.tag = tag + + def escape(self, ident): + """Escape a CSS identifier. + + This is a simple wrapper around soupselect.escape(). See the + documentation for that function for more information. + """ + if soupsieve is None: + raise NotImplementedError( + "Cannot escape CSS identifiers because the soupsieve package is not installed." + ) + return self.api.escape(ident) + + def _ns(self, ns, select): + """Normalize a dictionary of namespaces.""" + if not isinstance(select, self.api.SoupSieve) and ns is None: + # If the selector is a precompiled pattern, it already has + # a namespace context compiled in, which cannot be + # replaced. + ns = self.tag._namespaces + return ns + + def _rs(self, results): + """Normalize a list of results to a Resultset. + + A ResultSet is more consistent with the rest of Beautiful + Soup's API, and ResultSet.__getattr__ has a helpful error + message if you try to treat a list of results as a single + result (a common mistake). + """ + # Import here to avoid circular import + from bs4.element import ResultSet + return ResultSet(None, results) + + def compile(self, select, namespaces=None, flags=0, **kwargs): + """Pre-compile a selector and return the compiled object. + + :param selector: A CSS selector. + + :param namespaces: A dictionary mapping namespace prefixes + used in the CSS selector to namespace URIs. By default, + Beautiful Soup will use the prefixes it encountered while + parsing the document. + + :param flags: Flags to be passed into Soup Sieve's + soupsieve.compile() method. + + :param kwargs: Keyword arguments to be passed into SoupSieve's + soupsieve.compile() method. + + :return: A precompiled selector object. + :rtype: soupsieve.SoupSieve + """ + return self.api.compile( + select, self._ns(namespaces, select), flags, **kwargs + ) + + def select_one(self, select, namespaces=None, flags=0, **kwargs): + """Perform a CSS selection operation on the current Tag and return the + first result. + + This uses the Soup Sieve library. For more information, see + that library's documentation for the soupsieve.select_one() + method. + + :param selector: A CSS selector. + + :param namespaces: A dictionary mapping namespace prefixes + used in the CSS selector to namespace URIs. By default, + Beautiful Soup will use the prefixes it encountered while + parsing the document. + + :param flags: Flags to be passed into Soup Sieve's + soupsieve.select_one() method. + + :param kwargs: Keyword arguments to be passed into SoupSieve's + soupsieve.select_one() method. + + :return: A Tag, or None if the selector has no match. + :rtype: bs4.element.Tag + + """ + return self.api.select_one( + select, self.tag, self._ns(namespaces, select), flags, **kwargs + ) + + def select(self, select, namespaces=None, limit=0, flags=0, **kwargs): + """Perform a CSS selection operation on the current Tag. + + This uses the Soup Sieve library. For more information, see + that library's documentation for the soupsieve.select() + method. + + :param selector: A string containing a CSS selector. + + :param namespaces: A dictionary mapping namespace prefixes + used in the CSS selector to namespace URIs. By default, + Beautiful Soup will pass in the prefixes it encountered while + parsing the document. + + :param limit: After finding this number of results, stop looking. + + :param flags: Flags to be passed into Soup Sieve's + soupsieve.select() method. + + :param kwargs: Keyword arguments to be passed into SoupSieve's + soupsieve.select() method. + + :return: A ResultSet of Tag objects. + :rtype: bs4.element.ResultSet + + """ + if limit is None: + limit = 0 + + return self._rs( + self.api.select( + select, self.tag, self._ns(namespaces, select), limit, flags, + **kwargs + ) + ) + + def iselect(self, select, namespaces=None, limit=0, flags=0, **kwargs): + """Perform a CSS selection operation on the current Tag. + + This uses the Soup Sieve library. For more information, see + that library's documentation for the soupsieve.iselect() + method. It is the same as select(), but it returns a generator + instead of a list. + + :param selector: A string containing a CSS selector. + + :param namespaces: A dictionary mapping namespace prefixes + used in the CSS selector to namespace URIs. By default, + Beautiful Soup will pass in the prefixes it encountered while + parsing the document. + + :param limit: After finding this number of results, stop looking. + + :param flags: Flags to be passed into Soup Sieve's + soupsieve.iselect() method. + + :param kwargs: Keyword arguments to be passed into SoupSieve's + soupsieve.iselect() method. + + :return: A generator + :rtype: types.GeneratorType + """ + return self.api.iselect( + select, self.tag, self._ns(namespaces, select), limit, flags, **kwargs + ) + + def closest(self, select, namespaces=None, flags=0, **kwargs): + """Find the Tag closest to this one that matches the given selector. + + This uses the Soup Sieve library. For more information, see + that library's documentation for the soupsieve.closest() + method. + + :param selector: A string containing a CSS selector. + + :param namespaces: A dictionary mapping namespace prefixes + used in the CSS selector to namespace URIs. By default, + Beautiful Soup will pass in the prefixes it encountered while + parsing the document. + + :param flags: Flags to be passed into Soup Sieve's + soupsieve.closest() method. + + :param kwargs: Keyword arguments to be passed into SoupSieve's + soupsieve.closest() method. + + :return: A Tag, or None if there is no match. + :rtype: bs4.Tag + + """ + return self.api.closest( + select, self.tag, self._ns(namespaces, select), flags, **kwargs + ) + + def match(self, select, namespaces=None, flags=0, **kwargs): + """Check whether this Tag matches the given CSS selector. + + This uses the Soup Sieve library. For more information, see + that library's documentation for the soupsieve.match() + method. + + :param: a CSS selector. + + :param namespaces: A dictionary mapping namespace prefixes + used in the CSS selector to namespace URIs. By default, + Beautiful Soup will pass in the prefixes it encountered while + parsing the document. + + :param flags: Flags to be passed into Soup Sieve's + soupsieve.match() method. + + :param kwargs: Keyword arguments to be passed into SoupSieve's + soupsieve.match() method. + + :return: True if this Tag matches the selector; False otherwise. + :rtype: bool + """ + return self.api.match( + select, self.tag, self._ns(namespaces, select), flags, **kwargs + ) + + def filter(self, select, namespaces=None, flags=0, **kwargs): + """Filter this Tag's direct children based on the given CSS selector. + + This uses the Soup Sieve library. It works the same way as + passing this Tag into that library's soupsieve.filter() + method. More information, for more information see the + documentation for soupsieve.filter(). + + :param namespaces: A dictionary mapping namespace prefixes + used in the CSS selector to namespace URIs. By default, + Beautiful Soup will pass in the prefixes it encountered while + parsing the document. + + :param flags: Flags to be passed into Soup Sieve's + soupsieve.filter() method. + + :param kwargs: Keyword arguments to be passed into SoupSieve's + soupsieve.filter() method. + + :return: A ResultSet of Tag objects. + :rtype: bs4.element.ResultSet + + """ + return self._rs( + self.api.filter( + select, self.tag, self._ns(namespaces, select), flags, **kwargs + ) + ) diff --git a/lib/bs4/diagnose.py b/lib/bs4/diagnose.py index 3bf583f5..e079772e 100644 --- a/lib/bs4/diagnose.py +++ b/lib/bs4/diagnose.py @@ -59,21 +59,6 @@ def diagnose(data): if hasattr(data, 'read'): data = data.read() - elif data.startswith("http:") or data.startswith("https:"): - print(('"%s" looks like a URL. Beautiful Soup is not an HTTP client.' % data)) - print("You need to use some other library to get the document behind the URL, and feed that document to Beautiful Soup.") - return - else: - try: - if os.path.exists(data): - print(('"%s" looks like a filename. Reading data from the file.' % data)) - with open(data) as fp: - data = fp.read() - except ValueError: - # This can happen on some platforms when the 'filename' is - # too long. Assume it's data and not a filename. - pass - print("") for parser in basic_parsers: print(("Trying to parse your markup with %s" % parser)) diff --git a/lib/bs4/element.py b/lib/bs4/element.py index 583d0e8a..9c73957c 100644 --- a/lib/bs4/element.py +++ b/lib/bs4/element.py @@ -8,14 +8,8 @@ except ImportError as e: import re import sys import warnings -try: - import soupsieve -except ImportError as e: - soupsieve = None - warnings.warn( - 'The soupsieve package is not installed. CSS selectors cannot be used.' - ) +from bs4.css import CSS from bs4.formatter import ( Formatter, HTMLFormatter, @@ -69,13 +63,13 @@ PYTHON_SPECIFIC_ENCODINGS = set([ "string-escape", "string_escape", ]) - + class NamespacedAttribute(str): """A namespaced string (e.g. 'xml:lang') that remembers the namespace ('xml') and the name ('lang') that were used to create it. """ - + def __new__(cls, prefix, name=None, namespace=None): if not name: # This is the default namespace. Its name "has no value" @@ -146,14 +140,19 @@ class ContentMetaAttributeValue(AttributeValueWithCharsetSubstitution): return match.group(1) + encoding return self.CHARSET_RE.sub(rewrite, self.original_value) - + class PageElement(object): """Contains the navigational information for some part of the page: that is, its current location in the parse tree. NavigableString, Tag, etc. are all subclasses of PageElement. """ - + + # In general, we can't tell just by looking at an element whether + # it's contained in an XML document or an HTML document. But for + # Tags (q.v.) we can store this information at parse time. + known_xml = None + def setup(self, parent=None, previous_element=None, next_element=None, previous_sibling=None, next_sibling=None): """Sets up the initial relations between this element and @@ -163,7 +162,7 @@ class PageElement(object): :param previous_element: The element parsed immediately before this one. - + :param next_element: The element parsed immediately before this one. @@ -257,11 +256,11 @@ class PageElement(object): default = object() def _all_strings(self, strip=False, types=default): """Yield all strings of certain classes, possibly stripping them. - + This is implemented differently in Tag and NavigableString. """ raise NotImplementedError() - + @property def stripped_strings(self): """Yield all strings in this PageElement, stripping them first. @@ -294,11 +293,11 @@ class PageElement(object): strip, types=types)]) getText = get_text text = property(get_text) - + def replace_with(self, *args): - """Replace this PageElement with one or more PageElements, keeping the + """Replace this PageElement with one or more PageElements, keeping the rest of the tree the same. - + :param args: One or more PageElements. :return: `self`, no longer part of the tree. """ @@ -410,7 +409,7 @@ class PageElement(object): This works the same way as `list.insert`. :param position: The numeric position that should be occupied - in `self.children` by the new PageElement. + in `self.children` by the new PageElement. :param new_child: A PageElement. """ if new_child is None: @@ -546,7 +545,7 @@ class PageElement(object): "Element has no parent, so 'after' has no meaning.") if any(x is self for x in args): raise ValueError("Can't insert an element after itself.") - + offset = 0 for successor in args: # Extract first so that the index won't be screwed up if they @@ -912,7 +911,7 @@ class PageElement(object): :rtype: bool """ return getattr(self, '_decomposed', False) or False - + # Old non-property versions of the generators, for backwards # compatibility with BS3. def nextGenerator(self): @@ -936,16 +935,11 @@ class NavigableString(str, PageElement): When Beautiful Soup parses the markup penguin, it will create a NavigableString for the string "penguin". - """ + """ PREFIX = '' SUFFIX = '' - # We can't tell just by looking at a string whether it's contained - # in an XML document or an HTML document. - - known_xml = None - def __new__(cls, value): """Create a new NavigableString. @@ -961,12 +955,22 @@ class NavigableString(str, PageElement): u.setup() return u - def __copy__(self): + def __deepcopy__(self, memo, recursive=False): """A copy of a NavigableString has the same contents and class as the original, but it is not connected to the parse tree. + + :param recursive: This parameter is ignored; it's only defined + so that NavigableString.__deepcopy__ implements the same + signature as Tag.__deepcopy__. """ return type(self)(self) + def __copy__(self): + """A copy of a NavigableString can only be a deep copy, because + only one PageElement can occupy a given place in a parse tree. + """ + return self.__deepcopy__({}) + def __getnewargs__(self): return (str(self),) @@ -1059,10 +1063,10 @@ class PreformattedString(NavigableString): as comments (the Comment class) and CDATA blocks (the CData class). """ - + PREFIX = '' SUFFIX = '' - + def output_ready(self, formatter=None): """Make this string ready for output by adding any subclass-specific prefix or suffix. @@ -1144,7 +1148,7 @@ class Stylesheet(NavigableString): """ pass - + class Script(NavigableString): """A NavigableString representing an executable script (probably Javascript). @@ -1250,7 +1254,7 @@ class Tag(PageElement): if ((not builder or builder.store_line_numbers) and (sourceline is not None or sourcepos is not None)): self.sourceline = sourceline - self.sourcepos = sourcepos + self.sourcepos = sourcepos if attrs is None: attrs = {} elif attrs: @@ -1308,13 +1312,49 @@ class Tag(PageElement): self.interesting_string_types = builder.string_containers[self.name] else: self.interesting_string_types = self.DEFAULT_INTERESTING_STRING_TYPES - + parserClass = _alias("parser_class") # BS3 - def __copy__(self): - """A copy of a Tag is a new Tag, unconnected to the parse tree. + def __deepcopy__(self, memo, recursive=True): + """A deepcopy of a Tag is a new Tag, unconnected to the parse tree. Its contents are a copy of the old Tag's contents. """ + clone = self._clone() + + if recursive: + # Clone this tag's descendants recursively, but without + # making any recursive function calls. + tag_stack = [clone] + for event, element in self._event_stream(self.descendants): + if event is Tag.END_ELEMENT_EVENT: + # Stop appending incoming Tags to the Tag that was + # just closed. + tag_stack.pop() + else: + descendant_clone = element.__deepcopy__( + memo, recursive=False + ) + # Add to its parent's .contents + tag_stack[-1].append(descendant_clone) + + if event is Tag.START_ELEMENT_EVENT: + # Add the Tag itself to the stack so that its + # children will be .appended to it. + tag_stack.append(descendant_clone) + return clone + + def __copy__(self): + """A copy of a Tag must always be a deep copy, because a Tag's + children can only have one parent at a time. + """ + return self.__deepcopy__({}) + + def _clone(self): + """Create a new Tag just like this one, but with no + contents and unattached to any parse tree. + + This is the first step in the deepcopy process. + """ clone = type(self)( None, self.builder, self.name, self.namespace, self.prefix, self.attrs, is_xml=self._is_xml, @@ -1326,8 +1366,6 @@ class Tag(PageElement): ) for attr in ('can_be_empty_element', 'hidden'): setattr(clone, attr, getattr(self, attr)) - for child in self.contents: - clone.append(child.__copy__()) return clone @property @@ -1433,7 +1471,7 @@ class Tag(PageElement): i.contents = [] i._decomposed = True i = n - + def clear(self, decompose=False): """Wipe out all children of this PageElement by calling extract() on them. @@ -1521,7 +1559,7 @@ class Tag(PageElement): if not isinstance(value, list): value = [value] return value - + def has_attr(self, key): """Does this PageElement have an attribute with the given name?""" return key in self.attrs @@ -1608,7 +1646,7 @@ class Tag(PageElement): def __repr__(self, encoding="unicode-escape"): """Renders this PageElement as a string. - :param encoding: The encoding to use (Python 2 only). + :param encoding: The encoding to use (Python 2 only). TODO: This is now ignored and a warning should be issued if a value is provided. :return: A (Unicode) string. @@ -1650,106 +1688,212 @@ class Tag(PageElement): def decode(self, indent_level=None, eventual_encoding=DEFAULT_OUTPUT_ENCODING, - formatter="minimal"): - """Render a Unicode representation of this PageElement and its - contents. - - :param indent_level: Each line of the rendering will be - indented this many spaces. Used internally in - recursive calls while pretty-printing. - :param eventual_encoding: The tag is destined to be - encoded into this encoding. This method is _not_ - responsible for performing that encoding. This information - is passed in so that it can be substituted in if the - document contains a tag that mentions the document's - encoding. - :param formatter: A Formatter object, or a string naming one of - the standard formatters. - """ - + formatter="minimal", + iterator=None): + pieces = [] # First off, turn a non-Formatter `formatter` into a Formatter # object. This will stop the lookup from happening over and # over again. if not isinstance(formatter, Formatter): formatter = self.formatter_for_name(formatter) - attributes = formatter.attributes(self) - attrs = [] - for key, val in attributes: - if val is None: - decoded = key + + if indent_level is True: + indent_level = 0 + + # The currently active tag that put us into string literal + # mode. Until this element is closed, children will be treated + # as string literals and not pretty-printed. String literal + # mode is turned on immediately after this tag begins, and + # turned off immediately before it's closed. This means there + # will be whitespace before and after the tag itself. + string_literal_tag = None + + for event, element in self._event_stream(iterator): + if event in (Tag.START_ELEMENT_EVENT, Tag.EMPTY_ELEMENT_EVENT): + piece = element._format_tag( + eventual_encoding, formatter, opening=True + ) + elif event is Tag.END_ELEMENT_EVENT: + piece = element._format_tag( + eventual_encoding, formatter, opening=False + ) + if indent_level is not None: + indent_level -= 1 else: - if isinstance(val, list) or isinstance(val, tuple): - val = ' '.join(val) - elif not isinstance(val, str): - val = str(val) - elif ( - isinstance(val, AttributeValueWithCharsetSubstitution) - and eventual_encoding is not None - ): - val = val.encode(eventual_encoding) + piece = element.output_ready(formatter) - text = formatter.attribute_value(val) - decoded = ( - str(key) + '=' - + formatter.quoted_attribute_value(text)) - attrs.append(decoded) - close = '' - closeTag = '' + # Now we need to apply the 'prettiness' -- extra + # whitespace before and/or after this tag. This can get + # complicated because certain tags, like
 and
+            #  for you 
+
""" + + expect = """
+
some
+ for you 
+
+
+""" + soup = self.soup(markup) + assert expect == soup.div.prettify() def test_prettify_accepts_formatter_function(self): soup = BeautifulSoup("foo", 'html.parser') @@ -216,429 +249,6 @@ class TestFormatters(SoupTest): assert soup.contents[0].name == 'pre' -@pytest.mark.skipif(not SOUP_SIEVE_PRESENT, reason="Soup Sieve not installed") -class TestCSSSelectors(SoupTest): - """Test basic CSS selector functionality. - - This functionality is implemented in soupsieve, which has a much - more comprehensive test suite, so this is basically an extra check - that soupsieve works as expected. - """ - - HTML = """ - - - -The title - - - -Hello there. -
-
-

An H1

-

Some text

-

Some more text

-

An H2

-

Another

-Bob -

Another H2

-me - -span1a1 -span1a2 test - -span2a1 - - - -
- -
- - - - - - - - -

English

-

English UK

-

English US

-

French

-
- - -""" - - def setup_method(self): - self.soup = BeautifulSoup(self.HTML, 'html.parser') - - def assert_selects(self, selector, expected_ids, **kwargs): - el_ids = [el['id'] for el in self.soup.select(selector, **kwargs)] - el_ids.sort() - expected_ids.sort() - assert expected_ids == el_ids, "Selector %s, expected [%s], got [%s]" % ( - selector, ', '.join(expected_ids), ', '.join(el_ids) - ) - - assertSelect = assert_selects - - def assert_select_multiple(self, *tests): - for selector, expected_ids in tests: - self.assert_selects(selector, expected_ids) - - def test_one_tag_one(self): - els = self.soup.select('title') - assert len(els) == 1 - assert els[0].name == 'title' - assert els[0].contents == ['The title'] - - def test_one_tag_many(self): - els = self.soup.select('div') - assert len(els) == 4 - for div in els: - assert div.name == 'div' - - el = self.soup.select_one('div') - assert 'main' == el['id'] - - def test_select_one_returns_none_if_no_match(self): - match = self.soup.select_one('nonexistenttag') - assert None == match - - - def test_tag_in_tag_one(self): - els = self.soup.select('div div') - self.assert_selects('div div', ['inner', 'data1']) - - def test_tag_in_tag_many(self): - for selector in ('html div', 'html body div', 'body div'): - self.assert_selects(selector, ['data1', 'main', 'inner', 'footer']) - - - def test_limit(self): - self.assert_selects('html div', ['main'], limit=1) - self.assert_selects('html body div', ['inner', 'main'], limit=2) - self.assert_selects('body div', ['data1', 'main', 'inner', 'footer'], - limit=10) - - def test_tag_no_match(self): - assert len(self.soup.select('del')) == 0 - - def test_invalid_tag(self): - with pytest.raises(SelectorSyntaxError): - self.soup.select('tag%t') - - def test_select_dashed_tag_ids(self): - self.assert_selects('custom-dashed-tag', ['dash1', 'dash2']) - - def test_select_dashed_by_id(self): - dashed = self.soup.select('custom-dashed-tag[id=\"dash2\"]') - assert dashed[0].name == 'custom-dashed-tag' - assert dashed[0]['id'] == 'dash2' - - def test_dashed_tag_text(self): - assert self.soup.select('body > custom-dashed-tag')[0].text == 'Hello there.' - - def test_select_dashed_matches_find_all(self): - assert self.soup.select('custom-dashed-tag') == self.soup.find_all('custom-dashed-tag') - - def test_header_tags(self): - self.assert_select_multiple( - ('h1', ['header1']), - ('h2', ['header2', 'header3']), - ) - - def test_class_one(self): - for selector in ('.onep', 'p.onep', 'html p.onep'): - els = self.soup.select(selector) - assert len(els) == 1 - assert els[0].name == 'p' - assert els[0]['class'] == ['onep'] - - def test_class_mismatched_tag(self): - els = self.soup.select('div.onep') - assert len(els) == 0 - - def test_one_id(self): - for selector in ('div#inner', '#inner', 'div div#inner'): - self.assert_selects(selector, ['inner']) - - def test_bad_id(self): - els = self.soup.select('#doesnotexist') - assert len(els) == 0 - - def test_items_in_id(self): - els = self.soup.select('div#inner p') - assert len(els) == 3 - for el in els: - assert el.name == 'p' - assert els[1]['class'] == ['onep'] - assert not els[0].has_attr('class') - - def test_a_bunch_of_emptys(self): - for selector in ('div#main del', 'div#main div.oops', 'div div#main'): - assert len(self.soup.select(selector)) == 0 - - def test_multi_class_support(self): - for selector in ('.class1', 'p.class1', '.class2', 'p.class2', - '.class3', 'p.class3', 'html p.class2', 'div#inner .class2'): - self.assert_selects(selector, ['pmulti']) - - def test_multi_class_selection(self): - for selector in ('.class1.class3', '.class3.class2', - '.class1.class2.class3'): - self.assert_selects(selector, ['pmulti']) - - def test_child_selector(self): - self.assert_selects('.s1 > a', ['s1a1', 's1a2']) - self.assert_selects('.s1 > a span', ['s1a2s1']) - - def test_child_selector_id(self): - self.assert_selects('.s1 > a#s1a2 span', ['s1a2s1']) - - def test_attribute_equals(self): - self.assert_select_multiple( - ('p[class="onep"]', ['p1']), - ('p[id="p1"]', ['p1']), - ('[class="onep"]', ['p1']), - ('[id="p1"]', ['p1']), - ('link[rel="stylesheet"]', ['l1']), - ('link[type="text/css"]', ['l1']), - ('link[href="blah.css"]', ['l1']), - ('link[href="no-blah.css"]', []), - ('[rel="stylesheet"]', ['l1']), - ('[type="text/css"]', ['l1']), - ('[href="blah.css"]', ['l1']), - ('[href="no-blah.css"]', []), - ('p[href="no-blah.css"]', []), - ('[href="no-blah.css"]', []), - ) - - def test_attribute_tilde(self): - self.assert_select_multiple( - ('p[class~="class1"]', ['pmulti']), - ('p[class~="class2"]', ['pmulti']), - ('p[class~="class3"]', ['pmulti']), - ('[class~="class1"]', ['pmulti']), - ('[class~="class2"]', ['pmulti']), - ('[class~="class3"]', ['pmulti']), - ('a[rel~="friend"]', ['bob']), - ('a[rel~="met"]', ['bob']), - ('[rel~="friend"]', ['bob']), - ('[rel~="met"]', ['bob']), - ) - - def test_attribute_startswith(self): - self.assert_select_multiple( - ('[rel^="style"]', ['l1']), - ('link[rel^="style"]', ['l1']), - ('notlink[rel^="notstyle"]', []), - ('[rel^="notstyle"]', []), - ('link[rel^="notstyle"]', []), - ('link[href^="bla"]', ['l1']), - ('a[href^="http://"]', ['bob', 'me']), - ('[href^="http://"]', ['bob', 'me']), - ('[id^="p"]', ['pmulti', 'p1']), - ('[id^="m"]', ['me', 'main']), - ('div[id^="m"]', ['main']), - ('a[id^="m"]', ['me']), - ('div[data-tag^="dashed"]', ['data1']) - ) - - def test_attribute_endswith(self): - self.assert_select_multiple( - ('[href$=".css"]', ['l1']), - ('link[href$=".css"]', ['l1']), - ('link[id$="1"]', ['l1']), - ('[id$="1"]', ['data1', 'l1', 'p1', 'header1', 's1a1', 's2a1', 's1a2s1', 'dash1']), - ('div[id$="1"]', ['data1']), - ('[id$="noending"]', []), - ) - - def test_attribute_contains(self): - self.assert_select_multiple( - # From test_attribute_startswith - ('[rel*="style"]', ['l1']), - ('link[rel*="style"]', ['l1']), - ('notlink[rel*="notstyle"]', []), - ('[rel*="notstyle"]', []), - ('link[rel*="notstyle"]', []), - ('link[href*="bla"]', ['l1']), - ('[href*="http://"]', ['bob', 'me']), - ('[id*="p"]', ['pmulti', 'p1']), - ('div[id*="m"]', ['main']), - ('a[id*="m"]', ['me']), - # From test_attribute_endswith - ('[href*=".css"]', ['l1']), - ('link[href*=".css"]', ['l1']), - ('link[id*="1"]', ['l1']), - ('[id*="1"]', ['data1', 'l1', 'p1', 'header1', 's1a1', 's1a2', 's2a1', 's1a2s1', 'dash1']), - ('div[id*="1"]', ['data1']), - ('[id*="noending"]', []), - # New for this test - ('[href*="."]', ['bob', 'me', 'l1']), - ('a[href*="."]', ['bob', 'me']), - ('link[href*="."]', ['l1']), - ('div[id*="n"]', ['main', 'inner']), - ('div[id*="nn"]', ['inner']), - ('div[data-tag*="edval"]', ['data1']) - ) - - def test_attribute_exact_or_hypen(self): - self.assert_select_multiple( - ('p[lang|="en"]', ['lang-en', 'lang-en-gb', 'lang-en-us']), - ('[lang|="en"]', ['lang-en', 'lang-en-gb', 'lang-en-us']), - ('p[lang|="fr"]', ['lang-fr']), - ('p[lang|="gb"]', []), - ) - - def test_attribute_exists(self): - self.assert_select_multiple( - ('[rel]', ['l1', 'bob', 'me']), - ('link[rel]', ['l1']), - ('a[rel]', ['bob', 'me']), - ('[lang]', ['lang-en', 'lang-en-gb', 'lang-en-us', 'lang-fr']), - ('p[class]', ['p1', 'pmulti']), - ('[blah]', []), - ('p[blah]', []), - ('div[data-tag]', ['data1']) - ) - - def test_quoted_space_in_selector_name(self): - html = """
nope
-
yes
- """ - soup = BeautifulSoup(html, 'html.parser') - [chosen] = soup.select('div[style="display: right"]') - assert "yes" == chosen.string - - def test_unsupported_pseudoclass(self): - with pytest.raises(NotImplementedError): - self.soup.select("a:no-such-pseudoclass") - - with pytest.raises(SelectorSyntaxError): - self.soup.select("a:nth-of-type(a)") - - def test_nth_of_type(self): - # Try to select first paragraph - els = self.soup.select('div#inner p:nth-of-type(1)') - assert len(els) == 1 - assert els[0].string == 'Some text' - - # Try to select third paragraph - els = self.soup.select('div#inner p:nth-of-type(3)') - assert len(els) == 1 - assert els[0].string == 'Another' - - # Try to select (non-existent!) fourth paragraph - els = self.soup.select('div#inner p:nth-of-type(4)') - assert len(els) == 0 - - # Zero will select no tags. - els = self.soup.select('div p:nth-of-type(0)') - assert len(els) == 0 - - def test_nth_of_type_direct_descendant(self): - els = self.soup.select('div#inner > p:nth-of-type(1)') - assert len(els) == 1 - assert els[0].string == 'Some text' - - def test_id_child_selector_nth_of_type(self): - self.assert_selects('#inner > p:nth-of-type(2)', ['p1']) - - def test_select_on_element(self): - # Other tests operate on the tree; this operates on an element - # within the tree. - inner = self.soup.find("div", id="main") - selected = inner.select("div") - # The
tag was selected. The