From f2a664953c353527a7e13ab871aafb7e6316a1d3 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Wed, 26 Jan 2022 21:33:27 -0800 Subject: [PATCH 001/743] Assume IPv6 is public address * Fixes #1629 --- plexpy/helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plexpy/helpers.py b/plexpy/helpers.py index cd4bf921..6cd28a36 100644 --- a/plexpy/helpers.py +++ b/plexpy/helpers.py @@ -705,7 +705,7 @@ def sanitize(obj): def is_public_ip(host): ip = is_valid_ip(get_ip(host)) - if ip and ip.iptype() == 'PUBLIC': + if ip and (ip.iptype() == 'PUBLIC' or ip.version() == 6): return True return False From 2145f1e2728c20b31cb3587e2ae810f86ab389bc Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Wed, 26 Jan 2022 22:26:25 -0800 Subject: [PATCH 002/743] Check for local IPv6 addresses --- plexpy/helpers.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/plexpy/helpers.py b/plexpy/helpers.py index 6cd28a36..fb4afb80 100644 --- a/plexpy/helpers.py +++ b/plexpy/helpers.py @@ -705,7 +705,11 @@ def sanitize(obj): def is_public_ip(host): ip = is_valid_ip(get_ip(host)) - if ip and (ip.iptype() == 'PUBLIC' or ip.version() == 6): + ip_version = ip.version() + ip_type = ip.iptype() + if ip and ip_type != 'LOOPBACK' and ( + ip_version == 4 and ip_type == 'PUBLIC' or + ip_version == 6 and 'LOCAL' not in ip_type): return True return False From 96025c839d7b67d831cca15f0bb2dfc0ca6aa0e3 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 27 Jan 2022 10:52:07 -0800 Subject: [PATCH 003/743] Separate out plex.direct IP address log filter --- plexpy/logger.py | 23 ++++++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/plexpy/logger.py b/plexpy/logger.py index e09ad52d..53f18662 100644 --- a/plexpy/logger.py +++ b/plexpy/logger.py @@ -191,15 +191,31 @@ class PublicIPFilter(RegexFilter): Log filter for public IP addresses """ REGEX = re.compile( - r'(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)[.-]){3}' + r'(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)[.]){3}' r'(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)' r'(?!\d*-[a-z0-9]{6})' ) + def replace(self, text, ip): + if helpers.is_public_ip(ip): + return text.replace(ip, '.'.join(['***'] * 4)) + return text + + +class PlexDirectIPFilter(RegexFilter): + """ + Log filter for IP addresses in plex.direct URL + """ + REGEX = re.compile( + r'(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)[-]){3}' + r'(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)' + r'(?!\d*-[a-z0-9]{6})' + r'(?=\.[a-z0-9]+\.plex\.direct)' + ) + def replace(self, text, ip): if helpers.is_public_ip(ip.replace('-', '.')): - partition = '-' if '-' in ip else '.' - return text.replace(ip, partition.join(['***'] * 4)) + return text.replace(ip, '-'.join(['***'] * 4)) return text @@ -321,6 +337,7 @@ def initLogger(console=False, log_dir=False, verbose=False): handler.addFilter(UsernameFilter()) handler.addFilter(BlacklistFilter()) handler.addFilter(PublicIPFilter()) + handler.addFilter(PlexDirectIPFilter()) handler.addFilter(EmailFilter()) handler.addFilter(PlexTokenFilter()) From b9f04ac71ae8a67e1fc37534cc8d1cf85d8af38b Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sun, 30 Jan 2022 16:50:23 -0800 Subject: [PATCH 004/743] Default Telegram to MarkdownV2 when HTML disabled --- plexpy/notifiers.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py index aaf74ddf..cff735cf 100644 --- a/plexpy/notifiers.py +++ b/plexpy/notifiers.py @@ -3869,7 +3869,7 @@ class TELEGRAM(Notifier): 'chat_id': '', 'disable_web_preview': 0, 'silent_notification': 0, - 'html_support': 1, + 'html_support': 0, 'incl_subject': 1, 'incl_poster': 0 } @@ -3884,6 +3884,8 @@ class TELEGRAM(Notifier): if self.config['html_support']: data['parse_mode'] = 'HTML' + else: + data['parse_mode'] = 'MarkdownV2' if self.config['incl_poster'] and kwargs.get('parameters'): # Grab formatted metadata @@ -3963,7 +3965,7 @@ class TELEGRAM(Notifier): {'label': 'Enable HTML Support', 'value': self.config['html_support'], 'name': 'telegram_html_support', - 'description': 'Style your messages using these HTML tags: b, i, a[href], code, pre.', + 'description': 'Enable to style your messages using HTML. Disable to use Markdown instead.', 'input_type': 'checkbox' }, {'label': 'Disable Web Page Previews', From fe388e903d05e9d8aeee4db6d6b9b2ac99f2cf55 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sun, 30 Jan 2022 17:26:08 -0800 Subject: [PATCH 005/743] Fix export download not including images --- plexpy/exporter.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/plexpy/exporter.py b/plexpy/exporter.py index 1feb8a04..196b15f0 100644 --- a/plexpy/exporter.py +++ b/plexpy/exporter.py @@ -1928,14 +1928,13 @@ class Export(object): self.file_size += os.path.getsize(filepath) def _exported_images(self, title): - images_dirpath = get_export_dirpath(self.directory, images_directory=title) + dirpath = get_export_dirpath(self.directory) - if os.path.exists(images_dirpath): - for f in os.listdir(images_dirpath): - if f.endswith('.thumb.jpg'): - self.exported_thumb = True - elif f.endswith('.art.jpg'): - self.exported_art = True + for root, dirs, files in os.walk(dirpath): + if any(f.endswith('.thumb.jpg') for f in files): + self.exported_thumb = True + elif any(f.endswith('.art.jpg') for f in files): + self.exported_art = True def _media_type(self, obj): return 'photoalbum' if self.is_photoalbum(obj) else obj.type From 636d2fe43e370667afa47793067180d50dc699a8 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Mon, 31 Jan 2022 15:04:41 -0800 Subject: [PATCH 006/743] Add disc number notification parameter --- plexpy/common.py | 2 ++ plexpy/notification_handler.py | 7 +++++++ 2 files changed, 9 insertions(+) diff --git a/plexpy/common.py b/plexpy/common.py index 4d59f713..1312d98f 100644 --- a/plexpy/common.py +++ b/plexpy/common.py @@ -502,6 +502,8 @@ NOTIFICATION_PARAMETERS = [ {'name': 'Season Number 00', 'type': 'int', 'value': 'season_num00', 'description': 'The two digit season number.', 'example': 'e.g. 01, or 01-03'}, {'name': 'Episode Number', 'type': 'int', 'value': 'episode_num', 'description': 'The episode number.', 'example': 'e.g. 6, or 6-10'}, {'name': 'Episode Number 00', 'type': 'int', 'value': 'episode_num00', 'description': 'The two digit episode number.', 'example': 'e.g. 06, or 06-10'}, + {'name': 'Disc Number', 'type': 'int', 'value': 'disc_num', 'description': 'The disc number.', 'example': 'e.g. 2'}, + {'name': 'Disc Number 00', 'type': 'int', 'value': 'disc_num00', 'description': 'The two digit disc number.', 'example': 'e.g. 02'}, {'name': 'Track Number', 'type': 'int', 'value': 'track_num', 'description': 'The track number.', 'example': 'e.g. 4, or 4-10'}, {'name': 'Track Number 00', 'type': 'int', 'value': 'track_num00', 'description': 'The two digit track number.', 'example': 'e.g. 04, or 04-10'}, {'name': 'Season Count', 'type': 'int', 'value': 'season_count', 'description': 'The number of seasons in a grouped recently added notification.'}, diff --git a/plexpy/notification_handler.py b/plexpy/notification_handler.py index adf3d103..9a662e2c 100644 --- a/plexpy/notification_handler.py +++ b/plexpy/notification_handler.py @@ -841,6 +841,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m season_num, season_num00 = num, num00 episode_num, episode_num00 = '', '' + disc_num, disc_num00 = '', '' track_num, track_num00 = '', '' child_count = len(child_num) @@ -864,6 +865,8 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m episode_num, episode_num00 = num, num00 track_num, track_num00 = num, num00 + disc_num, disc_num00 = '', '' + child_count = 1 grandchild_count = len(grandchild_num) @@ -878,6 +881,8 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m season_num00 = str(notify_params['parent_media_index']).zfill(2) episode_num = str(notify_params['media_index']).zfill(1) episode_num00 = str(notify_params['media_index']).zfill(2) + disc_num = str(notify_params['parent_media_index']).zfill(1) + disc_num00 = str(notify_params['parent_media_index']).zfill(2) track_num = str(notify_params['media_index']).zfill(1) track_num00 = str(notify_params['media_index']).zfill(2) child_count = 1 @@ -1046,6 +1051,8 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m 'season_num00': season_num00, 'episode_num': episode_num, 'episode_num00': episode_num00, + 'disc_num': disc_num, + 'disc_num00': disc_num00, 'track_num': track_num, 'track_num00': track_num00, 'season_count': child_count, From e33767de9b527e365125cb695a3a018b031e4e5a Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Tue, 1 Feb 2022 22:06:53 -0800 Subject: [PATCH 007/743] Fix updating section_id when updating metadata --- plexpy/datafactory.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plexpy/datafactory.py b/plexpy/datafactory.py index daf57c56..3b337f75 100644 --- a/plexpy/datafactory.py +++ b/plexpy/datafactory.py @@ -1910,7 +1910,7 @@ class DataFactory(object): monitor_db = database.MonitorDatabase() query = 'UPDATE session_history SET section_id = ? WHERE rating_key = ?' - args = [metadata['section_id'], old_rating_key] + args = [metadata['section_id'], new_rating_key] monitor_db.action(query=query, args=args) # Update the session_history_metadata table From cba746920aa7c16193daeb4bfc2cc44680bc7bce Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 3 Feb 2022 12:22:14 -0800 Subject: [PATCH 008/743] Add Telegram channel ID to config label and description --- plexpy/notifiers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py index cff735cf..5eae6b55 100644 --- a/plexpy/notifiers.py +++ b/plexpy/notifiers.py @@ -3941,10 +3941,10 @@ class TELEGRAM(Notifier): ' on Telegram to get one.', 'input_type': 'token' }, - {'label': 'Telegram Chat ID, Group ID, or Channel Username', + {'label': 'Telegram Chat ID, Group ID, or Channel ID/Username', 'value': self.config['chat_id'], 'name': 'telegram_chat_id', - 'description': 'Your Telegram Chat ID, Group ID, or @channelusername. ' + 'description': 'Your Telegram Chat ID, Group ID, Channel ID or @channelusername. ' 'Contact @myidbot' ' on Telegram to get an ID.', From c3d09d34f828679c4d937aed074e9db1c7085548 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sun, 6 Feb 2022 23:18:53 -0800 Subject: [PATCH 009/743] Prevent rating_key collisions when updating metadata --- data/interfaces/default/update_metadata.html | 5 +- plexpy/datafactory.py | 132 ++++++++++++++++--- plexpy/webserve.py | 7 +- 3 files changed, 120 insertions(+), 24 deletions(-) diff --git a/data/interfaces/default/update_metadata.html b/data/interfaces/default/update_metadata.html index fe962e1b..59b35fd0 100644 --- a/data/interfaces/default/update_metadata.html +++ b/data/interfaces/default/update_metadata.html @@ -233,8 +233,9 @@ DOCUMENTATION :: END data: { old_rating_key: '${query["rating_key"]}', new_rating_key: new_rating_key, - media_type: '${query["media_type"]}' - }, + media_type: '${query["media_type"]}', + single_update: true + }, async: true, success: function (data) { window.location.href = new_href; diff --git a/plexpy/datafactory.py b/plexpy/datafactory.py index 3b337f75..7d906b9d 100644 --- a/plexpy/datafactory.py +++ b/plexpy/datafactory.py @@ -44,6 +44,13 @@ else: from plexpy import pmsconnect from plexpy import session +# Temporarily store update_metadata row ids in memory to prevent rating_key collisions +_UPDATE_METADATA_IDS = { + 'grandparent_rating_key_ids': set(), + 'parent_rating_key_ids': set(), + 'rating_key_ids': set() +} + class DataFactory(object): """ @@ -1831,7 +1838,7 @@ class DataFactory(object): return key_list - def update_metadata(self, old_key_list='', new_key_list='', media_type=''): + def update_metadata(self, old_key_list='', new_key_list='', media_type='', single_update=False): pms_connect = pmsconnect.PmsConnect() monitor_db = database.MonitorDatabase() @@ -1853,6 +1860,15 @@ class DataFactory(object): if mapping: logger.info("Tautulli DataFactory :: Updating metadata in the database.") + + global _UPDATE_METADATA_IDS + if single_update: + _UPDATE_METADATA_IDS = { + 'grandparent_rating_key_ids': set(), + 'parent_rating_key_ids': set(), + 'rating_key_ids': set() + } + for old_key, new_key in mapping.items(): metadata = pms_connect.get_metadata_details(new_key) @@ -1862,31 +1878,107 @@ class DataFactory(object): if metadata['media_type'] == 'show' or metadata['media_type'] == 'artist': # check grandparent_rating_key (2 tables) - monitor_db.action('UPDATE session_history SET grandparent_rating_key = ? WHERE grandparent_rating_key = ?', - [new_key, old_key]) - monitor_db.action('UPDATE session_history_metadata SET grandparent_rating_key = ? WHERE grandparent_rating_key = ?', - [new_key, old_key]) + query = ( + 'SELECT id FROM session_history ' + 'WHERE grandparent_rating_key = ? ' + ) + args = [old_key] + + if _UPDATE_METADATA_IDS['grandparent_rating_key_ids']: + query += ( + 'AND id NOT IN (%s)' % ','.join(_UPDATE_METADATA_IDS['grandparent_rating_key_ids']) + ) + + ids = [str(row['id']) for row in monitor_db.select(query, args)] + if ids: + _UPDATE_METADATA_IDS['grandparent_rating_key_ids'].update(ids) + else: + continue + + monitor_db.action( + 'UPDATE session_history SET grandparent_rating_key = ? ' + 'WHERE id IN (%s)' % ','.join(ids), + [new_key] + ) + monitor_db.action( + 'UPDATE session_history_metadata SET grandparent_rating_key = ? ' + 'WHERE id IN (%s)' % ','.join(ids), + [new_key] + ) + elif metadata['media_type'] == 'season' or metadata['media_type'] == 'album': # check parent_rating_key (2 tables) - monitor_db.action('UPDATE session_history SET parent_rating_key = ? WHERE parent_rating_key = ?', - [new_key, old_key]) - monitor_db.action('UPDATE session_history_metadata SET parent_rating_key = ? WHERE parent_rating_key = ?', - [new_key, old_key]) + query = ( + 'SELECT id FROM session_history ' + 'WHERE parent_rating_key = ? ' + ) + args = [old_key] + + if _UPDATE_METADATA_IDS['parent_rating_key_ids']: + query += ( + 'AND id NOT IN (%s)' % ','.join(_UPDATE_METADATA_IDS['parent_rating_key_ids']) + ) + + ids = [str(row['id']) for row in monitor_db.select(query, args)] + if ids: + _UPDATE_METADATA_IDS['parent_rating_key_ids'].update(ids) + else: + continue + + monitor_db.action( + 'UPDATE session_history SET parent_rating_key = ? ' + 'WHERE id IN (%s)' % ','.join(ids), + [new_key] + ) + monitor_db.action( + 'UPDATE session_history_metadata SET parent_rating_key = ? ' + 'WHERE id IN (%s)' % ','.join(ids), + [new_key] + ) + else: # check rating_key (2 tables) - monitor_db.action('UPDATE session_history SET rating_key = ? WHERE rating_key = ?', - [new_key, old_key]) - monitor_db.action('UPDATE session_history_media_info SET rating_key = ? WHERE rating_key = ?', - [new_key, old_key]) + query = ( + 'SELECT id FROM session_history ' + 'WHERE rating_key = ? ' + ) + args = [old_key] + + if _UPDATE_METADATA_IDS['rating_key_ids']: + query += ( + 'AND id NOT IN (%s)' % ','.join(_UPDATE_METADATA_IDS['rating_key_ids']) + ) + + ids = [str(row['id']) for row in monitor_db.select(query, args)] + if ids: + _UPDATE_METADATA_IDS['rating_key_ids'].update(ids) + else: + continue + + monitor_db.action( + 'UPDATE session_history SET rating_key = ? ' + 'WHERE id IN (%s)' % ','.join(ids), + [new_key] + ) + monitor_db.action( + 'UPDATE session_history_media_info SET rating_key = ? ' + 'WHERE id IN (%s)' % ','.join(ids), + [new_key] + ) + monitor_db.action( + 'INSERT INTO update_metadata (rating_key_id) ' + 'VALUES %s' % ','.join(['(?)'] * len(ids)), + ids + ) # update session_history_metadata table - self.update_metadata_details(old_key, new_key, metadata) + self.update_metadata_details(old_key, new_key, metadata, ids) return 'Updated metadata in database.' else: return 'Unable to update metadata in database. No changes were made.' - def update_metadata_details(self, old_rating_key='', new_rating_key='', metadata=None): + def update_metadata_details(self, old_rating_key='', new_rating_key='', metadata=None, ids=None): if metadata: # Create full_title @@ -1909,8 +2001,9 @@ class DataFactory(object): monitor_db = database.MonitorDatabase() - query = 'UPDATE session_history SET section_id = ? WHERE rating_key = ?' - args = [metadata['section_id'], new_rating_key] + query = 'UPDATE session_history SET section_id = ? ' \ + 'WHERE id IN (%s)' % ','.join(ids) + args = [metadata['section_id']] monitor_db.action(query=query, args=args) # Update the session_history_metadata table @@ -1922,7 +2015,7 @@ class DataFactory(object): 'added_at = ?, updated_at = ?, last_viewed_at = ?, content_rating = ?, summary = ?, ' \ 'tagline = ?, rating = ?, duration = ?, guid = ?, directors = ?, writers = ?, actors = ?, ' \ 'genres = ?, studio = ?, labels = ? ' \ - 'WHERE rating_key = ?' + 'WHERE id IN (%s)' % ','.join(ids) args = [metadata['rating_key'], metadata['parent_rating_key'], metadata['grandparent_rating_key'], metadata['title'], metadata['parent_title'], metadata['grandparent_title'], @@ -1932,8 +2025,7 @@ class DataFactory(object): metadata['year'], metadata['originally_available_at'], metadata['added_at'], metadata['updated_at'], metadata['last_viewed_at'], metadata['content_rating'], metadata['summary'], metadata['tagline'], metadata['rating'], metadata['duration'], metadata['guid'], directors, writers, actors, genres, - metadata['studio'], labels, - old_rating_key] + metadata['studio'], labels] monitor_db.action(query=query, args=args) diff --git a/plexpy/webserve.py b/plexpy/webserve.py index 7a30cfb4..dd6e66a4 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -5145,7 +5145,7 @@ class WebInterface(object): @cherrypy.tools.json_out() @requireAuth(member_of("admin")) @addtoapi() - def update_metadata_details(self, old_rating_key, new_rating_key, media_type, **kwargs): + def update_metadata_details(self, old_rating_key, new_rating_key, media_type, single_update=False, **kwargs): """ Update the metadata in the Tautulli database by matching rating keys. Also updates all parents or children of the media item if it is a show/season/episode or artist/album/track. @@ -5163,6 +5163,8 @@ class WebInterface(object): None ``` """ + single_update = helpers.bool_true(single_update) + if new_rating_key: data_factory = datafactory.DataFactory() pms_connect = pmsconnect.PmsConnect() @@ -5172,7 +5174,8 @@ class WebInterface(object): result = data_factory.update_metadata(old_key_list=old_key_list, new_key_list=new_key_list, - media_type=media_type) + media_type=media_type, + single_update=single_update) if result: return {'message': result} From 8e6f8d7735271d48995674b6b7ed0fb8f0b53957 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Mon, 7 Feb 2022 09:12:07 -0800 Subject: [PATCH 010/743] Remove unnecessary insert statement --- plexpy/datafactory.py | 17 +++-------------- 1 file changed, 3 insertions(+), 14 deletions(-) diff --git a/plexpy/datafactory.py b/plexpy/datafactory.py index 7d906b9d..82b8a193 100644 --- a/plexpy/datafactory.py +++ b/plexpy/datafactory.py @@ -1885,9 +1885,7 @@ class DataFactory(object): args = [old_key] if _UPDATE_METADATA_IDS['grandparent_rating_key_ids']: - query += ( - 'AND id NOT IN (%s)' % ','.join(_UPDATE_METADATA_IDS['grandparent_rating_key_ids']) - ) + query += 'AND id NOT IN (%s)' % ','.join(_UPDATE_METADATA_IDS['grandparent_rating_key_ids']) ids = [str(row['id']) for row in monitor_db.select(query, args)] if ids: @@ -1915,9 +1913,7 @@ class DataFactory(object): args = [old_key] if _UPDATE_METADATA_IDS['parent_rating_key_ids']: - query += ( - 'AND id NOT IN (%s)' % ','.join(_UPDATE_METADATA_IDS['parent_rating_key_ids']) - ) + query += 'AND id NOT IN (%s)' % ','.join(_UPDATE_METADATA_IDS['parent_rating_key_ids']) ids = [str(row['id']) for row in monitor_db.select(query, args)] if ids: @@ -1945,9 +1941,7 @@ class DataFactory(object): args = [old_key] if _UPDATE_METADATA_IDS['rating_key_ids']: - query += ( - 'AND id NOT IN (%s)' % ','.join(_UPDATE_METADATA_IDS['rating_key_ids']) - ) + query += 'AND id NOT IN (%s)' % ','.join(_UPDATE_METADATA_IDS['rating_key_ids']) ids = [str(row['id']) for row in monitor_db.select(query, args)] if ids: @@ -1965,11 +1959,6 @@ class DataFactory(object): 'WHERE id IN (%s)' % ','.join(ids), [new_key] ) - monitor_db.action( - 'INSERT INTO update_metadata (rating_key_id) ' - 'VALUES %s' % ','.join(['(?)'] * len(ids)), - ids - ) # update session_history_metadata table self.update_metadata_details(old_key, new_key, metadata, ids) From 2d9e109f97177d7794481f691a8e6d5c00d85dd9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Feb 2022 21:57:11 -0800 Subject: [PATCH 011/743] Bump arrow from 1.2.1 to 1.2.2 (#1630) * Bump arrow from 1.2.1 to 1.2.2 Bumps [arrow](https://github.com/arrow-py/arrow) from 1.2.1 to 1.2.2. - [Release notes](https://github.com/arrow-py/arrow/releases) - [Changelog](https://github.com/arrow-py/arrow/blob/master/CHANGELOG.rst) - [Commits](https://github.com/arrow-py/arrow/compare/1.2.1...1.2.2) --- updated-dependencies: - dependency-name: arrow dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Update arrow==1.2.2 Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci] --- lib/arrow/_version.py | 2 +- lib/arrow/arrow.py | 4 +- lib/arrow/constants.py | 19 + lib/arrow/locales.py | 159 ++- lib/typing_extensions.py | 2009 ++++++++++++++------------------------ requirements.txt | 2 +- 6 files changed, 891 insertions(+), 1304 deletions(-) diff --git a/lib/arrow/_version.py b/lib/arrow/_version.py index a955fdae..bc86c944 100644 --- a/lib/arrow/_version.py +++ b/lib/arrow/_version.py @@ -1 +1 @@ -__version__ = "1.2.1" +__version__ = "1.2.2" diff --git a/lib/arrow/arrow.py b/lib/arrow/arrow.py index fef66c10..21b0347f 100644 --- a/lib/arrow/arrow.py +++ b/lib/arrow/arrow.py @@ -759,7 +759,7 @@ class Arrow: >>> start = datetime(2013, 5, 5, 12, 30) >>> end = datetime(2013, 5, 5, 17, 15) >>> for r in arrow.Arrow.interval('hour', start, end, 2): - ... print r + ... print(r) ... (, ) (, ) @@ -1384,7 +1384,7 @@ class Arrow: search_string = search_string.format(r"\d+") # Create search pattern and find within string - pattern = re.compile(fr"{search_string}") + pattern = re.compile(fr"(^|\b|\d){search_string}") match = pattern.search(input_string) # If there is no match continue to next iteration diff --git a/lib/arrow/constants.py b/lib/arrow/constants.py index e41ffa5d..1189d07c 100644 --- a/lib/arrow/constants.py +++ b/lib/arrow/constants.py @@ -73,7 +73,20 @@ DEHUMANIZE_LOCALES = { "zh-hk", "nl", "nl-nl", + "be", + "be-by", + "pl", + "pl-pl", + "ru", + "ru-ru", "af", + "bg", + "bg-bg", + "ua", + "uk", + "uk-ua", + "mk", + "mk-mk", "de", "de-de", "de-ch", @@ -97,6 +110,10 @@ DEHUMANIZE_LOCALES = { "da-dk", "ml", "hi", + "cs", + "cs-cz", + "sk", + "sk-sk", "fa", "fa-ir", "mr", @@ -145,4 +162,6 @@ DEHUMANIZE_LOCALES = { "ta-lk", "ur", "ur-pk", + "kk", + "kk-kz", } diff --git a/lib/arrow/locales.py b/lib/arrow/locales.py index d6d5c486..b7fb0d32 100644 --- a/lib/arrow/locales.py +++ b/lib/arrow/locales.py @@ -46,7 +46,7 @@ _TimeFrameElements = Union[ ] -_locale_map: Dict[str, Type["Locale"]] = dict() +_locale_map: Dict[str, Type["Locale"]] = {} def get_locale(name: str) -> "Locale": @@ -172,7 +172,16 @@ class Locale: humanized = " ".join(parts) if not only_distance: - humanized = self._format_relative(humanized, *timeframes[-1]) + # Needed to determine the correct relative string to use + timeframe_value = 0 + + for _unit_name, unit_value in timeframes: + if trunc(unit_value) != 0: + timeframe_value = trunc(unit_value) + break + + # Note it doesn't matter the timeframe unit we use on the call, only the value + humanized = self._format_relative(humanized, "seconds", timeframe_value) return humanized @@ -951,7 +960,7 @@ class ChineseCNLocale(Locale): timeframes = { "now": "刚才", - "second": "一秒", + "second": "1秒", "seconds": "{0}秒", "minute": "1分钟", "minutes": "{0}分钟", @@ -959,7 +968,7 @@ class ChineseCNLocale(Locale): "hours": "{0}小时", "day": "1天", "days": "{0}天", - "week": "一周", + "week": "1周", "weeks": "{0}周", "month": "1个月", "months": "{0}个月", @@ -1487,8 +1496,12 @@ class RussianLocale(SlavicBaseLocale): timeframes: ClassVar[Mapping[TimeFrameLiteral, Union[str, Mapping[str, str]]]] = { "now": "сейчас", - "second": "Второй", - "seconds": "{0} несколько секунд", + "second": "секунда", + "seconds": { + "singular": "{0} секунду", + "dual": "{0} секунды", + "plural": "{0} секунд", + }, "minute": "минуту", "minutes": { "singular": "{0} минуту", @@ -1511,6 +1524,12 @@ class RussianLocale(SlavicBaseLocale): "dual": "{0} месяца", "plural": "{0} месяцев", }, + "quarter": "квартал", + "quarters": { + "singular": "{0} квартал", + "dual": "{0} квартала", + "plural": "{0} кварталов", + }, "year": "год", "years": {"singular": "{0} год", "dual": "{0} года", "plural": "{0} лет"}, } @@ -2037,6 +2056,8 @@ class NorwegianLocale(Locale): "hours": "{0} timer", "day": "en dag", "days": "{0} dager", + "week": "en uke", + "weeks": "{0} uker", "month": "en måned", "months": "{0} måneder", "year": "ett år", @@ -2086,6 +2107,9 @@ class NorwegianLocale(Locale): ] day_abbreviations = ["", "ma", "ti", "on", "to", "fr", "lø", "sø"] + def _ordinal_number(self, n: int) -> str: + return f"{n}." + class NewNorwegianLocale(Locale): @@ -2104,7 +2128,9 @@ class NewNorwegianLocale(Locale): "hours": "{0} timar", "day": "ein dag", "days": "{0} dagar", - "month": "en månad", + "week": "ei veke", + "weeks": "{0} veker", + "month": "ein månad", "months": "{0} månader", "year": "eitt år", "years": "{0} år", @@ -2153,6 +2179,9 @@ class NewNorwegianLocale(Locale): ] day_abbreviations = ["", "må", "ty", "on", "to", "fr", "la", "su"] + def _ordinal_number(self, n: int) -> str: + return f"{n}." + class PortugueseLocale(Locale): names = ["pt", "pt-pt"] @@ -3048,44 +3077,51 @@ class CzechLocale(Locale): timeframes: ClassVar[Mapping[TimeFrameLiteral, Union[str, Mapping[str, str]]]] = { "now": "Teď", - "second": {"past": "vteřina", "future": "vteřina", "zero": "vteřina"}, + "second": {"past": "vteřina", "future": "vteřina"}, "seconds": { + "zero": "vteřina", "past": "{0} sekundami", "future-singular": "{0} sekundy", "future-paucal": "{0} sekund", }, - "minute": {"past": "minutou", "future": "minutu", "zero": "{0} minut"}, + "minute": {"past": "minutou", "future": "minutu"}, "minutes": { + "zero": "{0} minut", "past": "{0} minutami", "future-singular": "{0} minuty", "future-paucal": "{0} minut", }, - "hour": {"past": "hodinou", "future": "hodinu", "zero": "{0} hodin"}, + "hour": {"past": "hodinou", "future": "hodinu"}, "hours": { + "zero": "{0} hodin", "past": "{0} hodinami", "future-singular": "{0} hodiny", "future-paucal": "{0} hodin", }, - "day": {"past": "dnem", "future": "den", "zero": "{0} dnů"}, + "day": {"past": "dnem", "future": "den"}, "days": { + "zero": "{0} dnů", "past": "{0} dny", "future-singular": "{0} dny", "future-paucal": "{0} dnů", }, - "week": {"past": "týdnem", "future": "týden", "zero": "{0} týdnů"}, + "week": {"past": "týdnem", "future": "týden"}, "weeks": { + "zero": "{0} týdnů", "past": "{0} týdny", "future-singular": "{0} týdny", "future-paucal": "{0} týdnů", }, - "month": {"past": "měsícem", "future": "měsíc", "zero": "{0} měsíců"}, + "month": {"past": "měsícem", "future": "měsíc"}, "months": { + "zero": "{0} měsíců", "past": "{0} měsíci", "future-singular": "{0} měsíce", "future-paucal": "{0} měsíců", }, - "year": {"past": "rokem", "future": "rok", "zero": "{0} let"}, + "year": {"past": "rokem", "future": "rok"}, "years": { + "zero": "{0} let", "past": "{0} lety", "future-singular": "{0} roky", "future-paucal": "{0} let", @@ -3171,44 +3207,51 @@ class SlovakLocale(Locale): timeframes: ClassVar[Mapping[TimeFrameLiteral, Union[str, Mapping[str, str]]]] = { "now": "Teraz", - "second": {"past": "sekundou", "future": "sekundu", "zero": "{0} sekúnd"}, + "second": {"past": "sekundou", "future": "sekundu"}, "seconds": { + "zero": "{0} sekúnd", "past": "{0} sekundami", "future-singular": "{0} sekundy", "future-paucal": "{0} sekúnd", }, - "minute": {"past": "minútou", "future": "minútu", "zero": "{0} minút"}, + "minute": {"past": "minútou", "future": "minútu"}, "minutes": { + "zero": "{0} minút", "past": "{0} minútami", "future-singular": "{0} minúty", "future-paucal": "{0} minút", }, - "hour": {"past": "hodinou", "future": "hodinu", "zero": "{0} hodín"}, + "hour": {"past": "hodinou", "future": "hodinu"}, "hours": { + "zero": "{0} hodín", "past": "{0} hodinami", "future-singular": "{0} hodiny", "future-paucal": "{0} hodín", }, - "day": {"past": "dňom", "future": "deň", "zero": "{0} dní"}, + "day": {"past": "dňom", "future": "deň"}, "days": { + "zero": "{0} dní", "past": "{0} dňami", "future-singular": "{0} dni", "future-paucal": "{0} dní", }, - "week": {"past": "týždňom", "future": "týždeň", "zero": "{0} týždňov"}, + "week": {"past": "týždňom", "future": "týždeň"}, "weeks": { + "zero": "{0} týždňov", "past": "{0} týždňami", "future-singular": "{0} týždne", "future-paucal": "{0} týždňov", }, - "month": {"past": "mesiacom", "future": "mesiac", "zero": "{0} mesiacov"}, + "month": {"past": "mesiacom", "future": "mesiac"}, "months": { + "zero": "{0} mesiacov", "past": "{0} mesiacmi", "future-singular": "{0} mesiace", "future-paucal": "{0} mesiacov", }, - "year": {"past": "rokom", "future": "rok", "zero": "{0} rokov"}, + "year": {"past": "rokom", "future": "rok"}, "years": { + "zero": "{0} rokov", "past": "{0} rokmi", "future-singular": "{0} roky", "future-paucal": "{0} rokov", @@ -4229,8 +4272,12 @@ class IndonesianLocale(Locale): "hours": "{0} jam", "day": "1 hari", "days": "{0} hari", + "week": "1 minggu", + "weeks": "{0} minggu", "month": "1 bulan", "months": "{0} bulan", + "quarter": "1 kuartal", + "quarters": "{0} kuartal", "year": "1 tahun", "years": "{0} tahun", } @@ -5706,7 +5753,7 @@ class SinhalaLocale(Locale): } # Sinhala: the general format to describe timeframe is different from past and future, # so we do not copy the original timeframes dictionary - timeframes_only_distance = dict() + timeframes_only_distance = {} timeframes_only_distance["second"] = "තත්පරයක්" timeframes_only_distance["seconds"] = "තත්පර {0}" timeframes_only_distance["minute"] = "මිනිත්තුවක්" @@ -5895,3 +5942,71 @@ class UrduLocale(Locale): "ہفتہ", "اتوار", ] + + +class KazakhLocale(Locale): + + names = ["kk", "kk-kz"] + + past = "{0} бұрын" + future = "{0} кейін" + timeframes = { + "now": "қазір", + "second": "бір секунд", + "seconds": "{0} секунд", + "minute": "бір минут", + "minutes": "{0} минут", + "hour": "бір сағат", + "hours": "{0} сағат", + "day": "бір күн", + "days": "{0} күн", + "week": "бір апта", + "weeks": "{0} апта", + "month": "бір ай", + "months": "{0} ай", + "year": "бір жыл", + "years": "{0} жыл", + } + + month_names = [ + "", + "Қаңтар", + "Ақпан", + "Наурыз", + "Сәуір", + "Мамыр", + "Маусым", + "Шілде", + "Тамыз", + "Қыркүйек", + "Қазан", + "Қараша", + "Желтоқсан", + ] + month_abbreviations = [ + "", + "Қан", + "Ақп", + "Нау", + "Сәу", + "Мам", + "Мау", + "Шіл", + "Там", + "Қыр", + "Қаз", + "Қар", + "Жел", + ] + + day_names = [ + "", + "Дүйсембі", + "Сейсенбі", + "Сәрсенбі", + "Бейсенбі", + "Жұма", + "Сенбі", + "Жексенбі", + ] + day_abbreviations = ["", "Дс", "Сс", "Ср", "Бс", "Жм", "Сб", "Жс"] diff --git a/lib/typing_extensions.py b/lib/typing_extensions.py index 95bb8735..9f1c7aa3 100644 --- a/lib/typing_extensions.py +++ b/lib/typing_extensions.py @@ -1,50 +1,20 @@ import abc import collections -import contextlib +import collections.abc +import operator import sys import typing -import collections.abc as collections_abc -import operator - -# These are used by Protocol implementation -# We use internal typing helpers here, but this significantly reduces -# code duplication. (Also this is only until Protocol is in typing.) -from typing import Generic, Callable, TypeVar, Tuple # After PEP 560, internal typing API was substantially reworked. # This is especially important for Protocol class which uses internal APIs -# quite extensivelly. +# quite extensively. PEP_560 = sys.version_info[:3] >= (3, 7, 0) if PEP_560: - GenericMeta = TypingMeta = type - from typing import _GenericAlias + GenericMeta = type else: - from typing import GenericMeta, TypingMeta -OLD_GENERICS = False -try: - from typing import _type_vars, _next_in_mro, _type_check -except ImportError: - OLD_GENERICS = True -try: - from typing import _subs_tree # noqa - SUBS_TREE = True -except ImportError: - SUBS_TREE = False -try: - from typing import _tp_cache -except ImportError: - def _tp_cache(x): - return x -try: - from typing import _TypingEllipsis, _TypingEmpty -except ImportError: - class _TypingEllipsis: - pass - - class _TypingEmpty: - pass - + # 3.6 + from typing import GenericMeta, _type_vars # noqa # The two functions below are copies of typing internal helpers. # They are needed by _ProtocolMeta @@ -60,56 +30,12 @@ def _no_slots_copy(dct): def _check_generic(cls, parameters): if not cls.__parameters__: - raise TypeError("%s is not a generic class" % repr(cls)) + raise TypeError(f"{cls} is not a generic class") alen = len(parameters) elen = len(cls.__parameters__) if alen != elen: - raise TypeError("Too %s parameters for %s; actual %s, expected %s" % - ("many" if alen > elen else "few", repr(cls), alen, elen)) - - -if hasattr(typing, '_generic_new'): - _generic_new = typing._generic_new -else: - # Note: The '_generic_new(...)' function is used as a part of the - # process of creating a generic type and was added to the typing module - # as of Python 3.5.3. - # - # We've defined '_generic_new(...)' below to exactly match the behavior - # implemented in older versions of 'typing' bundled with Python 3.5.0 to - # 3.5.2. This helps eliminate redundancy when defining collection types - # like 'Deque' later. - # - # See https://github.com/python/typing/pull/308 for more details -- in - # particular, compare and contrast the definition of types like - # 'typing.List' before and after the merge. - - def _generic_new(base_cls, cls, *args, **kwargs): - return base_cls.__new__(cls, *args, **kwargs) - -# See https://github.com/python/typing/pull/439 -if hasattr(typing, '_geqv'): - from typing import _geqv - _geqv_defined = True -else: - _geqv = None - _geqv_defined = False - -if sys.version_info[:2] >= (3, 6): - import _collections_abc - _check_methods_in_mro = _collections_abc._check_methods -else: - def _check_methods_in_mro(C, *methods): - mro = C.__mro__ - for method in methods: - for B in mro: - if method in B.__dict__: - if B.__dict__[method] is None: - return NotImplemented - break - else: - return NotImplemented - return True + raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments for {cls};" + f" actual {alen}, expected {elen}") # Please keep __all__ alphabetized within each category. @@ -119,18 +45,17 @@ __all__ = [ 'Concatenate', 'Final', 'ParamSpec', + 'Self', 'Type', # ABCs (from collections.abc). - # The following are added depending on presence - # of their non-generic counterparts in stdlib: - # 'Awaitable', - # 'AsyncIterator', - # 'AsyncIterable', - # 'Coroutine', - # 'AsyncGenerator', - # 'AsyncContextManager', - # 'ChainMap', + 'Awaitable', + 'AsyncIterator', + 'AsyncIterable', + 'Coroutine', + 'AsyncGenerator', + 'AsyncContextManager', + 'ChainMap', # Concrete collection types. 'ContextManager', @@ -144,38 +69,29 @@ __all__ = [ 'SupportsIndex', # One-off things. + 'Annotated', 'final', 'IntVar', 'Literal', 'NewType', 'overload', + 'Protocol', + 'runtime', + 'runtime_checkable', 'Text', 'TypeAlias', 'TypeGuard', 'TYPE_CHECKING', ] -# Annotated relies on substitution trees of pep 560. It will not work for -# versions of typing older than 3.5.3 -HAVE_ANNOTATED = PEP_560 or SUBS_TREE - if PEP_560: __all__.extend(["get_args", "get_origin", "get_type_hints"]) -if HAVE_ANNOTATED: - __all__.append("Annotated") - -# Protocols are hard to backport to the original version of typing 3.5.0 -HAVE_PROTOCOLS = sys.version_info[:3] != (3, 5, 0) - -if HAVE_PROTOCOLS: - __all__.extend(['Protocol', 'runtime', 'runtime_checkable']) - - -# TODO +# 3.6.2+ if hasattr(typing, 'NoReturn'): NoReturn = typing.NoReturn -elif hasattr(typing, '_FinalTypingBase'): +# 3.6.0-3.6.1 +else: class _NoReturn(typing._FinalTypingBase, _root=True): """Special type indicating functions that never return. Example:: @@ -197,32 +113,6 @@ elif hasattr(typing, '_FinalTypingBase'): raise TypeError("NoReturn cannot be used with issubclass().") NoReturn = _NoReturn(_root=True) -else: - class _NoReturnMeta(typing.TypingMeta): - """Metaclass for NoReturn""" - def __new__(cls, name, bases, namespace, _root=False): - return super().__new__(cls, name, bases, namespace, _root=_root) - - def __instancecheck__(self, obj): - raise TypeError("NoReturn cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("NoReturn cannot be used with issubclass().") - - class NoReturn(typing.Final, metaclass=_NoReturnMeta, _root=True): - """Special type indicating functions that never return. - Example:: - - from typing import NoReturn - - def stop() -> NoReturn: - raise Exception('no way') - - This type is invalid in other positions, e.g., ``List[NoReturn]`` - will fail in static type checkers. - """ - __slots__ = () - # Some unconstrained type variables. These are used by the container types. # (These are not for export.) @@ -230,142 +120,15 @@ T = typing.TypeVar('T') # Any type. KT = typing.TypeVar('KT') # Key type. VT = typing.TypeVar('VT') # Value type. T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers. -V_co = typing.TypeVar('V_co', covariant=True) # Any type covariant containers. -VT_co = typing.TypeVar('VT_co', covariant=True) # Value type covariant containers. T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant. - -if hasattr(typing, 'ClassVar'): - ClassVar = typing.ClassVar -elif hasattr(typing, '_FinalTypingBase'): - class _ClassVar(typing._FinalTypingBase, _root=True): - """Special type construct to mark class variables. - - An annotation wrapped in ClassVar indicates that a given - attribute is intended to be used as a class variable and - should not be set on instances of that class. Usage:: - - class Starship: - stats: ClassVar[Dict[str, int]] = {} # class variable - damage: int = 10 # instance variable - - ClassVar accepts only types and cannot be further subscribed. - - Note that ClassVar is not a class itself, and should not - be used with isinstance() or issubclass(). - """ - - __slots__ = ('__type__',) - - def __init__(self, tp=None, **kwds): - self.__type__ = tp - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is None: - return cls(typing._type_check(item, - '{} accepts only single type.'.format(cls.__name__[1:])), - _root=True) - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(new_tp, _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, _ClassVar): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - ClassVar = _ClassVar(_root=True) -else: - class _ClassVarMeta(typing.TypingMeta): - """Metaclass for ClassVar""" - - def __new__(cls, name, bases, namespace, tp=None, _root=False): - self = super().__new__(cls, name, bases, namespace, _root=_root) - if tp is not None: - self.__type__ = tp - return self - - def __instancecheck__(self, obj): - raise TypeError("ClassVar cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("ClassVar cannot be used with issubclass().") - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is not None: - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - param = typing._type_check( - item, - '{} accepts only single type.'.format(cls.__name__[1:])) - return cls(self.__name__, self.__bases__, - dict(self.__dict__), tp=param, _root=True) - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(self.__name__, self.__bases__, - dict(self.__dict__), tp=self.__type__, - _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, ClassVar): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - class ClassVar(typing.Final, metaclass=_ClassVarMeta, _root=True): - """Special type construct to mark class variables. - - An annotation wrapped in ClassVar indicates that a given - attribute is intended to be used as a class variable and - should not be set on instances of that class. Usage:: - - class Starship: - stats: ClassVar[Dict[str, int]] = {} # class variable - damage: int = 10 # instance variable - - ClassVar accepts only types and cannot be further subscribed. - - Note that ClassVar is not a class itself, and should not - be used with isinstance() or issubclass(). - """ - - __type__ = None +ClassVar = typing.ClassVar # On older versions of typing there is an internal class named "Final". +# 3.8+ if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7): Final = typing.Final +# 3.7 elif sys.version_info[:2] >= (3, 7): class _FinalForm(typing._SpecialForm, _root=True): @@ -374,8 +137,8 @@ elif sys.version_info[:2] >= (3, 7): def __getitem__(self, parameters): item = typing._type_check(parameters, - '{} accepts only single type'.format(self._name)) - return _GenericAlias(self, (item,)) + f'{self._name} accepts only single type') + return typing._GenericAlias(self, (item,)) Final = _FinalForm('Final', doc="""A special typing construct to indicate that a name @@ -391,7 +154,8 @@ elif sys.version_info[:2] >= (3, 7): TIMEOUT = 1 # Error reported by type checker There is no runtime checking of these properties.""") -elif hasattr(typing, '_FinalTypingBase'): +# 3.6 +else: class _Final(typing._FinalTypingBase, _root=True): """A special typing construct to indicate that a name cannot be re-assigned or overridden in a subclass. @@ -417,10 +181,9 @@ elif hasattr(typing, '_FinalTypingBase'): cls = type(self) if self.__type__ is None: return cls(typing._type_check(item, - '{} accepts only single type.'.format(cls.__name__[1:])), + f'{cls.__name__[1:]} accepts only single type.'), _root=True) - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) + raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted') def _eval_type(self, globalns, localns): new_tp = typing._eval_type(self.__type__, globalns, localns) @@ -431,7 +194,7 @@ elif hasattr(typing, '_FinalTypingBase'): def __repr__(self): r = super().__repr__() if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) + r += f'[{typing._type_repr(self.__type__)}]' return r def __hash__(self): @@ -445,79 +208,12 @@ elif hasattr(typing, '_FinalTypingBase'): return self is other Final = _Final(_root=True) -else: - class _FinalMeta(typing.TypingMeta): - """Metaclass for Final""" - - def __new__(cls, name, bases, namespace, tp=None, _root=False): - self = super().__new__(cls, name, bases, namespace, _root=_root) - if tp is not None: - self.__type__ = tp - return self - - def __instancecheck__(self, obj): - raise TypeError("Final cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Final cannot be used with issubclass().") - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is not None: - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - param = typing._type_check( - item, - '{} accepts only single type.'.format(cls.__name__[1:])) - return cls(self.__name__, self.__bases__, - dict(self.__dict__), tp=param, _root=True) - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(self.__name__, self.__bases__, - dict(self.__dict__), tp=self.__type__, - _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, Final): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - class Final(typing.Final, metaclass=_FinalMeta, _root=True): - """A special typing construct to indicate that a name - cannot be re-assigned or overridden in a subclass. - For example: - - MAX_SIZE: Final = 9000 - MAX_SIZE += 1 # Error reported by type checker - - class Connection: - TIMEOUT: Final[int] = 10 - class FastConnector(Connection): - TIMEOUT = 1 # Error reported by type checker - - There is no runtime checking of these properties. - """ - - __type__ = None +# 3.8+ if hasattr(typing, 'final'): final = typing.final +# 3.6-3.7 else: def final(f): """This decorator can be used to indicate to type checkers that @@ -543,11 +239,13 @@ else: def IntVar(name): - return TypeVar(name) + return typing.TypeVar(name) +# 3.8+: if hasattr(typing, 'Literal'): Literal = typing.Literal +# 3.7: elif sys.version_info[:2] >= (3, 7): class _LiteralForm(typing._SpecialForm, _root=True): @@ -555,7 +253,7 @@ elif sys.version_info[:2] >= (3, 7): return 'typing_extensions.' + self._name def __getitem__(self, parameters): - return _GenericAlias(self, parameters) + return typing._GenericAlias(self, parameters) Literal = _LiteralForm('Literal', doc="""A type that can be used to indicate to type checkers @@ -570,7 +268,8 @@ elif sys.version_info[:2] >= (3, 7): Literal[...] cannot be subclassed. There is no runtime checking verifying that the parameter is actually a value instead of a type.""") -elif hasattr(typing, '_FinalTypingBase'): +# 3.6: +else: class _Literal(typing._FinalTypingBase, _root=True): """A type that can be used to indicate to type checkers that the corresponding value has a value literally equivalent to the @@ -596,8 +295,7 @@ elif hasattr(typing, '_FinalTypingBase'): if not isinstance(values, tuple): values = (values,) return cls(values, _root=True) - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) + raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted') def _eval_type(self, globalns, localns): return self @@ -605,7 +303,7 @@ elif hasattr(typing, '_FinalTypingBase'): def __repr__(self): r = super().__repr__() if self.__values__ is not None: - r += '[{}]'.format(', '.join(map(typing._type_repr, self.__values__))) + r += f'[{", ".join(map(typing._type_repr, self.__values__))}]' return r def __hash__(self): @@ -619,161 +317,18 @@ elif hasattr(typing, '_FinalTypingBase'): return self is other Literal = _Literal(_root=True) -else: - class _LiteralMeta(typing.TypingMeta): - """Metaclass for Literal""" - - def __new__(cls, name, bases, namespace, values=None, _root=False): - self = super().__new__(cls, name, bases, namespace, _root=_root) - if values is not None: - self.__values__ = values - return self - - def __instancecheck__(self, obj): - raise TypeError("Literal cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Literal cannot be used with issubclass().") - - def __getitem__(self, item): - cls = type(self) - if self.__values__ is not None: - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - if not isinstance(item, tuple): - item = (item,) - return cls(self.__name__, self.__bases__, - dict(self.__dict__), values=item, _root=True) - - def _eval_type(self, globalns, localns): - return self - - def __repr__(self): - r = super().__repr__() - if self.__values__ is not None: - r += '[{}]'.format(', '.join(map(typing._type_repr, self.__values__))) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__values__)) - - def __eq__(self, other): - if not isinstance(other, Literal): - return NotImplemented - if self.__values__ is not None: - return self.__values__ == other.__values__ - return self is other - - class Literal(typing.Final, metaclass=_LiteralMeta, _root=True): - """A type that can be used to indicate to type checkers that the - corresponding value has a value literally equivalent to the - provided parameter. For example: - - var: Literal[4] = 4 - - The type checker understands that 'var' is literally equal to the - value 4 and no other value. - - Literal[...] cannot be subclassed. There is no runtime checking - verifying that the parameter is actually a value instead of a type. - """ - - __values__ = None -def _overload_dummy(*args, **kwds): - """Helper for @overload to raise when called.""" - raise NotImplementedError( - "You should not call an overloaded function. " - "A series of @overload-decorated functions " - "outside a stub module should always be followed " - "by an implementation that is not @overload-ed.") - - -def overload(func): - """Decorator for overloaded functions/methods. - - In a stub file, place two or more stub definitions for the same - function in a row, each decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - - In a non-stub file (i.e. a regular .py file), do the same but - follow it with an implementation. The implementation should *not* - be decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - def utf8(value): - # implementation goes here - """ - return _overload_dummy +_overload_dummy = typing._overload_dummy # noqa +overload = typing.overload # This is not a real generic class. Don't use outside annotations. -if hasattr(typing, 'Type'): - Type = typing.Type -else: - # Internal type variable used for Type[]. - CT_co = typing.TypeVar('CT_co', covariant=True, bound=type) - - class Type(typing.Generic[CT_co], extra=type): - """A special construct usable to annotate class objects. - - For example, suppose we have the following classes:: - - class User: ... # Abstract base for User classes - class BasicUser(User): ... - class ProUser(User): ... - class TeamUser(User): ... - - And a function that takes a class argument that's a subclass of - User and returns an instance of the corresponding class:: - - U = TypeVar('U', bound=User) - def new_user(user_class: Type[U]) -> U: - user = user_class() - # (Here we could write the user object to a database) - return user - joe = new_user(BasicUser) - - At this point the type checker knows that joe has type BasicUser. - """ - - __slots__ = () - +Type = typing.Type # Various ABCs mimicking those in collections.abc. # A few are simply re-exported for completeness. -def _define_guard(type_name): - """ - Returns True if the given type isn't defined in typing but - is defined in collections_abc. - - Adds the type to __all__ if the collection is found in either - typing or collection_abc. - """ - if hasattr(typing, type_name): - __all__.append(type_name) - globals()[type_name] = getattr(typing, type_name) - return False - elif hasattr(collections_abc, type_name): - __all__.append(type_name) - return True - else: - return False - class _ExtensionsGenericMeta(GenericMeta): def __subclasscheck__(self, subclass): @@ -782,12 +337,11 @@ class _ExtensionsGenericMeta(GenericMeta): between collections, typing, and typing_extensions on older versions of Python, see https://github.com/python/typing/issues/501. """ - if sys.version_info[:3] >= (3, 5, 3) or sys.version_info[:3] < (3, 5, 0): - if self.__origin__ is not None: - if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: - raise TypeError("Parameterized generics cannot be used with class " - "or instance checks") - return False + if self.__origin__ is not None: + if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: + raise TypeError("Parameterized generics cannot be used with class " + "or instance checks") + return False if not self.__extra__: return super().__subclasscheck__(subclass) res = self.__extra__.__subclasshook__(subclass) @@ -803,45 +357,15 @@ class _ExtensionsGenericMeta(GenericMeta): return False -if _define_guard('Awaitable'): - class Awaitable(typing.Generic[T_co], metaclass=_ExtensionsGenericMeta, - extra=collections_abc.Awaitable): - __slots__ = () - - -if _define_guard('Coroutine'): - class Coroutine(Awaitable[V_co], typing.Generic[T_co, T_contra, V_co], - metaclass=_ExtensionsGenericMeta, - extra=collections_abc.Coroutine): - __slots__ = () - - -if _define_guard('AsyncIterable'): - class AsyncIterable(typing.Generic[T_co], - metaclass=_ExtensionsGenericMeta, - extra=collections_abc.AsyncIterable): - __slots__ = () - - -if _define_guard('AsyncIterator'): - class AsyncIterator(AsyncIterable[T_co], - metaclass=_ExtensionsGenericMeta, - extra=collections_abc.AsyncIterator): - __slots__ = () - +Awaitable = typing.Awaitable +Coroutine = typing.Coroutine +AsyncIterable = typing.AsyncIterable +AsyncIterator = typing.AsyncIterator +# 3.6.1+ if hasattr(typing, 'Deque'): Deque = typing.Deque -elif _geqv_defined: - class Deque(collections.deque, typing.MutableSequence[T], - metaclass=_ExtensionsGenericMeta, - extra=collections.deque): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Deque): - return collections.deque(*args, **kwds) - return _generic_new(collections.deque, cls, *args, **kwds) +# 3.6.0 else: class Deque(collections.deque, typing.MutableSequence[T], metaclass=_ExtensionsGenericMeta, @@ -851,114 +375,41 @@ else: def __new__(cls, *args, **kwds): if cls._gorg is Deque: return collections.deque(*args, **kwds) - return _generic_new(collections.deque, cls, *args, **kwds) + return typing._generic_new(collections.deque, cls, *args, **kwds) - -if hasattr(typing, 'ContextManager'): - ContextManager = typing.ContextManager -elif hasattr(contextlib, 'AbstractContextManager'): - class ContextManager(typing.Generic[T_co], - metaclass=_ExtensionsGenericMeta, - extra=contextlib.AbstractContextManager): - __slots__ = () +ContextManager = typing.ContextManager +# 3.6.2+ +if hasattr(typing, 'AsyncContextManager'): + AsyncContextManager = typing.AsyncContextManager +# 3.6.0-3.6.1 else: - class ContextManager(typing.Generic[T_co]): + from _collections_abc import _check_methods as _check_methods_in_mro # noqa + + class AsyncContextManager(typing.Generic[T_co]): __slots__ = () - def __enter__(self): + async def __aenter__(self): return self @abc.abstractmethod - def __exit__(self, exc_type, exc_value, traceback): + async def __aexit__(self, exc_type, exc_value, traceback): return None @classmethod def __subclasshook__(cls, C): - if cls is ContextManager: - # In Python 3.6+, it is possible to set a method to None to - # explicitly indicate that the class does not implement an ABC - # (https://bugs.python.org/issue25958), but we do not support - # that pattern here because this fallback class is only used - # in Python 3.5 and earlier. - if (any("__enter__" in B.__dict__ for B in C.__mro__) and - any("__exit__" in B.__dict__ for B in C.__mro__)): - return True + if cls is AsyncContextManager: + return _check_methods_in_mro(C, "__aenter__", "__aexit__") return NotImplemented +DefaultDict = typing.DefaultDict -if hasattr(typing, 'AsyncContextManager'): - AsyncContextManager = typing.AsyncContextManager - __all__.append('AsyncContextManager') -elif hasattr(contextlib, 'AbstractAsyncContextManager'): - class AsyncContextManager(typing.Generic[T_co], - metaclass=_ExtensionsGenericMeta, - extra=contextlib.AbstractAsyncContextManager): - __slots__ = () - - __all__.append('AsyncContextManager') -elif sys.version_info[:2] >= (3, 5): - exec(""" -class AsyncContextManager(typing.Generic[T_co]): - __slots__ = () - - async def __aenter__(self): - return self - - @abc.abstractmethod - async def __aexit__(self, exc_type, exc_value, traceback): - return None - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncContextManager: - return _check_methods_in_mro(C, "__aenter__", "__aexit__") - return NotImplemented - -__all__.append('AsyncContextManager') -""") - - -if hasattr(typing, 'DefaultDict'): - DefaultDict = typing.DefaultDict -elif _geqv_defined: - class DefaultDict(collections.defaultdict, typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.defaultdict): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, DefaultDict): - return collections.defaultdict(*args, **kwds) - return _generic_new(collections.defaultdict, cls, *args, **kwds) -else: - class DefaultDict(collections.defaultdict, typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.defaultdict): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is DefaultDict: - return collections.defaultdict(*args, **kwds) - return _generic_new(collections.defaultdict, cls, *args, **kwds) - - +# 3.7.2+ if hasattr(typing, 'OrderedDict'): OrderedDict = typing.OrderedDict +# 3.7.0-3.7.2 elif (3, 7, 0) <= sys.version_info[:3] < (3, 7, 2): OrderedDict = typing._alias(collections.OrderedDict, (KT, VT)) -elif _geqv_defined: - class OrderedDict(collections.OrderedDict, typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.OrderedDict): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, OrderedDict): - return collections.OrderedDict(*args, **kwds) - return _generic_new(collections.OrderedDict, cls, *args, **kwds) +# 3.6 else: class OrderedDict(collections.OrderedDict, typing.MutableMapping[KT, VT], metaclass=_ExtensionsGenericMeta, @@ -969,44 +420,12 @@ else: def __new__(cls, *args, **kwds): if cls._gorg is OrderedDict: return collections.OrderedDict(*args, **kwds) - return _generic_new(collections.OrderedDict, cls, *args, **kwds) - + return typing._generic_new(collections.OrderedDict, cls, *args, **kwds) +# 3.6.2+ if hasattr(typing, 'Counter'): Counter = typing.Counter -elif (3, 5, 0) <= sys.version_info[:3] <= (3, 5, 1): - assert _geqv_defined - _TInt = typing.TypeVar('_TInt') - - class _CounterMeta(typing.GenericMeta): - """Metaclass for Counter""" - def __getitem__(self, item): - return super().__getitem__((item, int)) - - class Counter(collections.Counter, - typing.Dict[T, int], - metaclass=_CounterMeta, - extra=collections.Counter): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Counter): - return collections.Counter(*args, **kwds) - return _generic_new(collections.Counter, cls, *args, **kwds) - -elif _geqv_defined: - class Counter(collections.Counter, - typing.Dict[T, int], - metaclass=_ExtensionsGenericMeta, extra=collections.Counter): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Counter): - return collections.Counter(*args, **kwds) - return _generic_new(collections.Counter, cls, *args, **kwds) - +# 3.6.0-3.6.1 else: class Counter(collections.Counter, typing.Dict[T, int], @@ -1017,88 +436,36 @@ else: def __new__(cls, *args, **kwds): if cls._gorg is Counter: return collections.Counter(*args, **kwds) - return _generic_new(collections.Counter, cls, *args, **kwds) - + return typing._generic_new(collections.Counter, cls, *args, **kwds) +# 3.6.1+ if hasattr(typing, 'ChainMap'): ChainMap = typing.ChainMap - __all__.append('ChainMap') elif hasattr(collections, 'ChainMap'): - # ChainMap only exists in 3.3+ - if _geqv_defined: - class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.ChainMap): + class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT], + metaclass=_ExtensionsGenericMeta, + extra=collections.ChainMap): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, ChainMap): - return collections.ChainMap(*args, **kwds) - return _generic_new(collections.ChainMap, cls, *args, **kwds) - else: - class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.ChainMap): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is ChainMap: - return collections.ChainMap(*args, **kwds) - return _generic_new(collections.ChainMap, cls, *args, **kwds) - - __all__.append('ChainMap') - - -if _define_guard('AsyncGenerator'): - class AsyncGenerator(AsyncIterator[T_co], typing.Generic[T_co, T_contra], - metaclass=_ExtensionsGenericMeta, - extra=collections_abc.AsyncGenerator): __slots__ = () + def __new__(cls, *args, **kwds): + if cls._gorg is ChainMap: + return collections.ChainMap(*args, **kwds) + return typing._generic_new(collections.ChainMap, cls, *args, **kwds) -if hasattr(typing, 'NewType'): - NewType = typing.NewType +# 3.6.1+ +if hasattr(typing, 'AsyncGenerator'): + AsyncGenerator = typing.AsyncGenerator +# 3.6.0 else: - def NewType(name, tp): - """NewType creates simple unique types with almost zero - runtime overhead. NewType(name, tp) is considered a subtype of tp - by static type checkers. At runtime, NewType(name, tp) returns - a dummy function that simply returns its argument. Usage:: + class AsyncGenerator(AsyncIterator[T_co], typing.Generic[T_co, T_contra], + metaclass=_ExtensionsGenericMeta, + extra=collections.abc.AsyncGenerator): + __slots__ = () - UserId = NewType('UserId', int) - - def name_by_id(user_id: UserId) -> str: - ... - - UserId('user') # Fails type check - - name_by_id(42) # Fails type check - name_by_id(UserId(42)) # OK - - num = UserId(5) + 1 # type: int - """ - - def new_type(x): - return x - - new_type.__name__ = name - new_type.__supertype__ = tp - return new_type - - -if hasattr(typing, 'Text'): - Text = typing.Text -else: - Text = str - - -if hasattr(typing, 'TYPE_CHECKING'): - TYPE_CHECKING = typing.TYPE_CHECKING -else: - # Constant that's True when type checking, but False here. - TYPE_CHECKING = False +NewType = typing.NewType +Text = typing.Text +TYPE_CHECKING = typing.TYPE_CHECKING def _gorg(cls): @@ -1111,16 +478,6 @@ def _gorg(cls): return cls -if OLD_GENERICS: - def _next_in_mro(cls): # noqa - """This function exists for compatibility with old typing versions.""" - next_in_mro = object - for i, c in enumerate(cls.__mro__[:-1]): - if isinstance(c, GenericMeta) and _gorg(c) is Generic: - next_in_mro = cls.__mro__[i + 1] - return next_in_mro - - _PROTO_WHITELIST = ['Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator', 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', @@ -1150,9 +507,196 @@ def _is_callable_members_only(cls): return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls)) +# 3.8+ if hasattr(typing, 'Protocol'): Protocol = typing.Protocol -elif HAVE_PROTOCOLS and not PEP_560: +# 3.7 +elif PEP_560: + from typing import _collect_type_vars # noqa + + def _no_init(self, *args, **kwargs): + if type(self)._is_protocol: + raise TypeError('Protocols cannot be instantiated') + + class _ProtocolMeta(abc.ABCMeta): + # This metaclass is a bit unfortunate and exists only because of the lack + # of __instancehook__. + def __instancecheck__(cls, instance): + # We need this method for situations where attributes are + # assigned in __init__. + if ((not getattr(cls, '_is_protocol', False) or + _is_callable_members_only(cls)) and + issubclass(instance.__class__, cls)): + return True + if cls._is_protocol: + if all(hasattr(instance, attr) and + (not callable(getattr(cls, attr, None)) or + getattr(instance, attr) is not None) + for attr in _get_protocol_attrs(cls)): + return True + return super().__instancecheck__(instance) + + class Protocol(metaclass=_ProtocolMeta): + # There is quite a lot of overlapping code with typing.Generic. + # Unfortunately it is hard to avoid this while these live in two different + # modules. The duplicated code will be removed when Protocol is moved to typing. + """Base class for protocol classes. Protocol classes are defined as:: + + class Proto(Protocol): + def meth(self) -> int: + ... + + Such classes are primarily used with static type checkers that recognize + structural subtyping (static duck-typing), for example:: + + class C: + def meth(self) -> int: + return 0 + + def func(x: Proto) -> int: + return x.meth() + + func(C()) # Passes static type check + + See PEP 544 for details. Protocol classes decorated with + @typing_extensions.runtime act as simple-minded runtime protocol that checks + only the presence of given attributes, ignoring their type signatures. + + Protocol classes can be generic, they are defined as:: + + class GenProto(Protocol[T]): + def meth(self) -> T: + ... + """ + __slots__ = () + _is_protocol = True + + def __new__(cls, *args, **kwds): + if cls is Protocol: + raise TypeError("Type Protocol cannot be instantiated; " + "it can only be used as a base class") + return super().__new__(cls) + + @typing._tp_cache + def __class_getitem__(cls, params): + if not isinstance(params, tuple): + params = (params,) + if not params and cls is not typing.Tuple: + raise TypeError( + f"Parameter list to {cls.__qualname__}[...] cannot be empty") + msg = "Parameters to generic types must be types." + params = tuple(typing._type_check(p, msg) for p in params) # noqa + if cls is Protocol: + # Generic can only be subscripted with unique type variables. + if not all(isinstance(p, typing.TypeVar) for p in params): + i = 0 + while isinstance(params[i], typing.TypeVar): + i += 1 + raise TypeError( + "Parameters to Protocol[...] must all be type variables." + f" Parameter {i + 1} is {params[i]}") + if len(set(params)) != len(params): + raise TypeError( + "Parameters to Protocol[...] must all be unique") + else: + # Subscripting a regular Generic subclass. + _check_generic(cls, params) + return typing._GenericAlias(cls, params) + + def __init_subclass__(cls, *args, **kwargs): + tvars = [] + if '__orig_bases__' in cls.__dict__: + error = typing.Generic in cls.__orig_bases__ + else: + error = typing.Generic in cls.__bases__ + if error: + raise TypeError("Cannot inherit from plain Generic") + if '__orig_bases__' in cls.__dict__: + tvars = _collect_type_vars(cls.__orig_bases__) + # Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn]. + # If found, tvars must be a subset of it. + # If not found, tvars is it. + # Also check for and reject plain Generic, + # and reject multiple Generic[...] and/or Protocol[...]. + gvars = None + for base in cls.__orig_bases__: + if (isinstance(base, typing._GenericAlias) and + base.__origin__ in (typing.Generic, Protocol)): + # for error messages + the_base = base.__origin__.__name__ + if gvars is not None: + raise TypeError( + "Cannot inherit from Generic[...]" + " and/or Protocol[...] multiple types.") + gvars = base.__parameters__ + if gvars is None: + gvars = tvars + else: + tvarset = set(tvars) + gvarset = set(gvars) + if not tvarset <= gvarset: + s_vars = ', '.join(str(t) for t in tvars if t not in gvarset) + s_args = ', '.join(str(g) for g in gvars) + raise TypeError(f"Some type variables ({s_vars}) are" + f" not listed in {the_base}[{s_args}]") + tvars = gvars + cls.__parameters__ = tuple(tvars) + + # Determine if this is a protocol or a concrete subclass. + if not cls.__dict__.get('_is_protocol', None): + cls._is_protocol = any(b is Protocol for b in cls.__bases__) + + # Set (or override) the protocol subclass hook. + def _proto_hook(other): + if not cls.__dict__.get('_is_protocol', None): + return NotImplemented + if not getattr(cls, '_is_runtime_protocol', False): + if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: + return NotImplemented + raise TypeError("Instance and class checks can only be used with" + " @runtime protocols") + if not _is_callable_members_only(cls): + if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: + return NotImplemented + raise TypeError("Protocols with non-method members" + " don't support issubclass()") + if not isinstance(other, type): + # Same error as for issubclass(1, int) + raise TypeError('issubclass() arg 1 must be a class') + for attr in _get_protocol_attrs(cls): + for base in other.__mro__: + if attr in base.__dict__: + if base.__dict__[attr] is None: + return NotImplemented + break + annotations = getattr(base, '__annotations__', {}) + if (isinstance(annotations, typing.Mapping) and + attr in annotations and + isinstance(other, _ProtocolMeta) and + other._is_protocol): + break + else: + return NotImplemented + return True + if '__subclasshook__' not in cls.__dict__: + cls.__subclasshook__ = _proto_hook + + # We have nothing more to do for non-protocols. + if not cls._is_protocol: + return + + # Check consistency of bases. + for base in cls.__bases__: + if not (base in (object, typing.Generic) or + base.__module__ == 'collections.abc' and + base.__name__ in _PROTO_WHITELIST or + isinstance(base, _ProtocolMeta) and base._is_protocol): + raise TypeError('Protocols can only inherit from other' + f' protocols, got {repr(base)}') + cls.__init__ = _no_init +# 3.6 +else: + from typing import _next_in_mro, _type_check # noqa def _no_init(self, *args, **kwargs): if type(self)._is_protocol: @@ -1164,71 +708,69 @@ elif HAVE_PROTOCOLS and not PEP_560: This exists so Protocol classes can be generic without deriving from Generic. """ - if not OLD_GENERICS: - def __new__(cls, name, bases, namespace, - tvars=None, args=None, origin=None, extra=None, orig_bases=None): - # This is just a version copied from GenericMeta.__new__ that - # includes "Protocol" special treatment. (Comments removed for brevity.) - assert extra is None # Protocols should not have extra - if tvars is not None: - assert origin is not None - assert all(isinstance(t, TypeVar) for t in tvars), tvars - else: - tvars = _type_vars(bases) - gvars = None - for base in bases: - if base is Generic: - raise TypeError("Cannot inherit from plain Generic") - if (isinstance(base, GenericMeta) and - base.__origin__ in (Generic, Protocol)): - if gvars is not None: - raise TypeError( - "Cannot inherit from Generic[...] or" - " Protocol[...] multiple times.") - gvars = base.__parameters__ - if gvars is None: - gvars = tvars - else: - tvarset = set(tvars) - gvarset = set(gvars) - if not tvarset <= gvarset: + def __new__(cls, name, bases, namespace, + tvars=None, args=None, origin=None, extra=None, orig_bases=None): + # This is just a version copied from GenericMeta.__new__ that + # includes "Protocol" special treatment. (Comments removed for brevity.) + assert extra is None # Protocols should not have extra + if tvars is not None: + assert origin is not None + assert all(isinstance(t, typing.TypeVar) for t in tvars), tvars + else: + tvars = _type_vars(bases) + gvars = None + for base in bases: + if base is typing.Generic: + raise TypeError("Cannot inherit from plain Generic") + if (isinstance(base, GenericMeta) and + base.__origin__ in (typing.Generic, Protocol)): + if gvars is not None: raise TypeError( - "Some type variables (%s) " - "are not listed in %s[%s]" % - (", ".join(str(t) for t in tvars if t not in gvarset), - "Generic" if any(b.__origin__ is Generic - for b in bases) else "Protocol", - ", ".join(str(g) for g in gvars))) - tvars = gvars + "Cannot inherit from Generic[...] or" + " Protocol[...] multiple times.") + gvars = base.__parameters__ + if gvars is None: + gvars = tvars + else: + tvarset = set(tvars) + gvarset = set(gvars) + if not tvarset <= gvarset: + s_vars = ", ".join(str(t) for t in tvars if t not in gvarset) + s_args = ", ".join(str(g) for g in gvars) + cls_name = "Generic" if any(b.__origin__ is typing.Generic + for b in bases) else "Protocol" + raise TypeError(f"Some type variables ({s_vars}) are" + f" not listed in {cls_name}[{s_args}]") + tvars = gvars - initial_bases = bases - if (extra is not None and type(extra) is abc.ABCMeta and - extra not in bases): - bases = (extra,) + bases - bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b - for b in bases) - if any(isinstance(b, GenericMeta) and b is not Generic for b in bases): - bases = tuple(b for b in bases if b is not Generic) - namespace.update({'__origin__': origin, '__extra__': extra}) - self = super(GenericMeta, cls).__new__(cls, name, bases, namespace, - _root=True) - super(GenericMeta, self).__setattr__('_gorg', - self if not origin else - _gorg(origin)) - self.__parameters__ = tvars - self.__args__ = tuple(... if a is _TypingEllipsis else - () if a is _TypingEmpty else - a for a in args) if args else None - self.__next_in_mro__ = _next_in_mro(self) - if orig_bases is None: - self.__orig_bases__ = initial_bases - elif origin is not None: - self._abc_registry = origin._abc_registry - self._abc_cache = origin._abc_cache - if hasattr(self, '_subs_tree'): - self.__tree_hash__ = (hash(self._subs_tree()) if origin else - super(GenericMeta, self).__hash__()) - return self + initial_bases = bases + if (extra is not None and type(extra) is abc.ABCMeta and + extra not in bases): + bases = (extra,) + bases + bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b + for b in bases) + if any(isinstance(b, GenericMeta) and b is not typing.Generic for b in bases): + bases = tuple(b for b in bases if b is not typing.Generic) + namespace.update({'__origin__': origin, '__extra__': extra}) + self = super(GenericMeta, cls).__new__(cls, name, bases, namespace, + _root=True) + super(GenericMeta, self).__setattr__('_gorg', + self if not origin else + _gorg(origin)) + self.__parameters__ = tvars + self.__args__ = tuple(... if a is typing._TypingEllipsis else + () if a is typing._TypingEmpty else + a for a in args) if args else None + self.__next_in_mro__ = _next_in_mro(self) + if orig_bases is None: + self.__orig_bases__ = initial_bases + elif origin is not None: + self._abc_registry = origin._abc_registry + self._abc_cache = origin._abc_cache + if hasattr(self, '_subs_tree'): + self.__tree_hash__ = (hash(self._subs_tree()) if origin else + super(GenericMeta, self).__hash__()) + return self def __init__(cls, *args, **kwargs): super().__init__(*args, **kwargs) @@ -1239,14 +781,14 @@ elif HAVE_PROTOCOLS and not PEP_560: for b in cls.__bases__) if cls._is_protocol: for base in cls.__mro__[1:]: - if not (base in (object, Generic) or + if not (base in (object, typing.Generic) or base.__module__ == 'collections.abc' and base.__name__ in _PROTO_WHITELIST or - isinstance(base, TypingMeta) and base._is_protocol or + isinstance(base, typing.TypingMeta) and base._is_protocol or isinstance(base, GenericMeta) and - base.__origin__ is Generic): - raise TypeError('Protocols can only inherit from other' - ' protocols, got %r' % base) + base.__origin__ is typing.Generic): + raise TypeError(f'Protocols can only inherit from other' + f' protocols, got {repr(base)}') cls.__init__ = _no_init @@ -1313,47 +855,45 @@ elif HAVE_PROTOCOLS and not PEP_560: " don't support issubclass()") return super(GenericMeta, self).__subclasscheck__(cls) - if not OLD_GENERICS: - @_tp_cache - def __getitem__(self, params): - # We also need to copy this from GenericMeta.__getitem__ to get - # special treatment of "Protocol". (Comments removed for brevity.) - if not isinstance(params, tuple): - params = (params,) - if not params and _gorg(self) is not Tuple: + @typing._tp_cache + def __getitem__(self, params): + # We also need to copy this from GenericMeta.__getitem__ to get + # special treatment of "Protocol". (Comments removed for brevity.) + if not isinstance(params, tuple): + params = (params,) + if not params and _gorg(self) is not typing.Tuple: + raise TypeError( + f"Parameter list to {self.__qualname__}[...] cannot be empty") + msg = "Parameters to generic types must be types." + params = tuple(_type_check(p, msg) for p in params) + if self in (typing.Generic, Protocol): + if not all(isinstance(p, typing.TypeVar) for p in params): raise TypeError( - "Parameter list to %s[...] cannot be empty" % self.__qualname__) - msg = "Parameters to generic types must be types." - params = tuple(_type_check(p, msg) for p in params) - if self in (Generic, Protocol): - if not all(isinstance(p, TypeVar) for p in params): - raise TypeError( - "Parameters to %r[...] must all be type variables" % self) - if len(set(params)) != len(params): - raise TypeError( - "Parameters to %r[...] must all be unique" % self) - tvars = params - args = params - elif self in (Tuple, Callable): - tvars = _type_vars(params) - args = params - elif self.__origin__ in (Generic, Protocol): - raise TypeError("Cannot subscript already-subscripted %s" % - repr(self)) - else: - _check_generic(self, params) - tvars = _type_vars(params) - args = params + f"Parameters to {repr(self)}[...] must all be type variables") + if len(set(params)) != len(params): + raise TypeError( + f"Parameters to {repr(self)}[...] must all be unique") + tvars = params + args = params + elif self in (typing.Tuple, typing.Callable): + tvars = _type_vars(params) + args = params + elif self.__origin__ in (typing.Generic, Protocol): + raise TypeError(f"Cannot subscript already-subscripted {repr(self)}") + else: + _check_generic(self, params) + tvars = _type_vars(params) + args = params - prepend = (self,) if self.__origin__ is None else () - return self.__class__(self.__name__, - prepend + self.__bases__, - _no_slots_copy(self.__dict__), - tvars=tvars, - args=args, - origin=self, - extra=self.__extra__, - orig_bases=self.__orig_bases__) + prepend = (self,) if self.__origin__ is None else () + return self.__class__(self.__name__, + prepend + self.__bases__, + _no_slots_copy(self.__dict__), + tvars=tvars, + args=args, + origin=self, + extra=self.__extra__, + orig_bases=self.__orig_bases__) class Protocol(metaclass=_ProtocolMeta): """Base class for protocol classes. Protocol classes are defined as:: @@ -1380,7 +920,7 @@ elif HAVE_PROTOCOLS and not PEP_560: Protocol classes can be generic, they are defined as:: - class GenProto({bases}): + class GenProto(Protocol[T]): def meth(self) -> T: ... """ @@ -1391,203 +931,14 @@ elif HAVE_PROTOCOLS and not PEP_560: if _gorg(cls) is Protocol: raise TypeError("Type Protocol cannot be instantiated; " "it can be used only as a base class") - if OLD_GENERICS: - return _generic_new(_next_in_mro(cls), cls, *args, **kwds) - return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) - if Protocol.__doc__ is not None: - Protocol.__doc__ = Protocol.__doc__.format(bases="Protocol, Generic[T]" if - OLD_GENERICS else "Protocol[T]") - - -elif PEP_560: - from typing import _type_check, _collect_type_vars # noqa - - def _no_init(self, *args, **kwargs): - if type(self)._is_protocol: - raise TypeError('Protocols cannot be instantiated') - - class _ProtocolMeta(abc.ABCMeta): - # This metaclass is a bit unfortunate and exists only because of the lack - # of __instancehook__. - def __instancecheck__(cls, instance): - # We need this method for situations where attributes are - # assigned in __init__. - if ((not getattr(cls, '_is_protocol', False) or - _is_callable_members_only(cls)) and - issubclass(instance.__class__, cls)): - return True - if cls._is_protocol: - if all(hasattr(instance, attr) and - (not callable(getattr(cls, attr, None)) or - getattr(instance, attr) is not None) - for attr in _get_protocol_attrs(cls)): - return True - return super().__instancecheck__(instance) - - class Protocol(metaclass=_ProtocolMeta): - # There is quite a lot of overlapping code with typing.Generic. - # Unfortunately it is hard to avoid this while these live in two different - # modules. The duplicated code will be removed when Protocol is moved to typing. - """Base class for protocol classes. Protocol classes are defined as:: - - class Proto(Protocol): - def meth(self) -> int: - ... - - Such classes are primarily used with static type checkers that recognize - structural subtyping (static duck-typing), for example:: - - class C: - def meth(self) -> int: - return 0 - - def func(x: Proto) -> int: - return x.meth() - - func(C()) # Passes static type check - - See PEP 544 for details. Protocol classes decorated with - @typing_extensions.runtime act as simple-minded runtime protocol that checks - only the presence of given attributes, ignoring their type signatures. - - Protocol classes can be generic, they are defined as:: - - class GenProto(Protocol[T]): - def meth(self) -> T: - ... - """ - __slots__ = () - _is_protocol = True - - def __new__(cls, *args, **kwds): - if cls is Protocol: - raise TypeError("Type Protocol cannot be instantiated; " - "it can only be used as a base class") - return super().__new__(cls) - - @_tp_cache - def __class_getitem__(cls, params): - if not isinstance(params, tuple): - params = (params,) - if not params and cls is not Tuple: - raise TypeError( - "Parameter list to {}[...] cannot be empty".format(cls.__qualname__)) - msg = "Parameters to generic types must be types." - params = tuple(_type_check(p, msg) for p in params) - if cls is Protocol: - # Generic can only be subscripted with unique type variables. - if not all(isinstance(p, TypeVar) for p in params): - i = 0 - while isinstance(params[i], TypeVar): - i += 1 - raise TypeError( - "Parameters to Protocol[...] must all be type variables." - " Parameter {} is {}".format(i + 1, params[i])) - if len(set(params)) != len(params): - raise TypeError( - "Parameters to Protocol[...] must all be unique") - else: - # Subscripting a regular Generic subclass. - _check_generic(cls, params) - return _GenericAlias(cls, params) - - def __init_subclass__(cls, *args, **kwargs): - tvars = [] - if '__orig_bases__' in cls.__dict__: - error = Generic in cls.__orig_bases__ - else: - error = Generic in cls.__bases__ - if error: - raise TypeError("Cannot inherit from plain Generic") - if '__orig_bases__' in cls.__dict__: - tvars = _collect_type_vars(cls.__orig_bases__) - # Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn]. - # If found, tvars must be a subset of it. - # If not found, tvars is it. - # Also check for and reject plain Generic, - # and reject multiple Generic[...] and/or Protocol[...]. - gvars = None - for base in cls.__orig_bases__: - if (isinstance(base, _GenericAlias) and - base.__origin__ in (Generic, Protocol)): - # for error messages - the_base = 'Generic' if base.__origin__ is Generic else 'Protocol' - if gvars is not None: - raise TypeError( - "Cannot inherit from Generic[...]" - " and/or Protocol[...] multiple types.") - gvars = base.__parameters__ - if gvars is None: - gvars = tvars - else: - tvarset = set(tvars) - gvarset = set(gvars) - if not tvarset <= gvarset: - s_vars = ', '.join(str(t) for t in tvars if t not in gvarset) - s_args = ', '.join(str(g) for g in gvars) - raise TypeError("Some type variables ({}) are" - " not listed in {}[{}]".format(s_vars, - the_base, s_args)) - tvars = gvars - cls.__parameters__ = tuple(tvars) - - # Determine if this is a protocol or a concrete subclass. - if not cls.__dict__.get('_is_protocol', None): - cls._is_protocol = any(b is Protocol for b in cls.__bases__) - - # Set (or override) the protocol subclass hook. - def _proto_hook(other): - if not cls.__dict__.get('_is_protocol', None): - return NotImplemented - if not getattr(cls, '_is_runtime_protocol', False): - if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: - return NotImplemented - raise TypeError("Instance and class checks can only be used with" - " @runtime protocols") - if not _is_callable_members_only(cls): - if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: - return NotImplemented - raise TypeError("Protocols with non-method members" - " don't support issubclass()") - if not isinstance(other, type): - # Same error as for issubclass(1, int) - raise TypeError('issubclass() arg 1 must be a class') - for attr in _get_protocol_attrs(cls): - for base in other.__mro__: - if attr in base.__dict__: - if base.__dict__[attr] is None: - return NotImplemented - break - annotations = getattr(base, '__annotations__', {}) - if (isinstance(annotations, typing.Mapping) and - attr in annotations and - isinstance(other, _ProtocolMeta) and - other._is_protocol): - break - else: - return NotImplemented - return True - if '__subclasshook__' not in cls.__dict__: - cls.__subclasshook__ = _proto_hook - - # We have nothing more to do for non-protocols. - if not cls._is_protocol: - return - - # Check consistency of bases. - for base in cls.__bases__: - if not (base in (object, Generic) or - base.__module__ == 'collections.abc' and - base.__name__ in _PROTO_WHITELIST or - isinstance(base, _ProtocolMeta) and base._is_protocol): - raise TypeError('Protocols can only inherit from other' - ' protocols, got %r' % base) - cls.__init__ = _no_init + return typing._generic_new(cls.__next_in_mro__, cls, *args, **kwds) +# 3.8+ if hasattr(typing, 'runtime_checkable'): runtime_checkable = typing.runtime_checkable -elif HAVE_PROTOCOLS: +# 3.6-3.7 +else: def runtime_checkable(cls): """Mark a protocol class as a runtime protocol, so that it can be used with isinstance() and issubclass(). Raise TypeError @@ -1598,19 +949,20 @@ elif HAVE_PROTOCOLS: """ if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol: raise TypeError('@runtime_checkable can be only applied to protocol classes,' - ' got %r' % cls) + f' got {cls!r}') cls._is_runtime_protocol = True return cls -if HAVE_PROTOCOLS: - # Exists for backwards compatibility. - runtime = runtime_checkable +# Exists for backwards compatibility. +runtime = runtime_checkable +# 3.8+ if hasattr(typing, 'SupportsIndex'): SupportsIndex = typing.SupportsIndex -elif HAVE_PROTOCOLS: +# 3.6-3.7 +else: @runtime_checkable class SupportsIndex(Protocol): __slots__ = () @@ -1665,8 +1017,8 @@ else: fields, = args # allow the "_fields" keyword be passed except ValueError: raise TypeError('TypedDict.__new__() takes from 2 to 3 ' - 'positional arguments but {} ' - 'were given'.format(len(args) + 2)) + f'positional arguments but {len(args) + 2} ' + 'were given') elif '_fields' in kwargs and len(kwargs) == 1: fields = kwargs.pop('_fields') import warnings @@ -1695,10 +1047,7 @@ else: class _TypedDictMeta(type): def __init__(cls, name, bases, ns, total=True): - # In Python 3.4 and 3.5 the __init__ method also needs to support the - # keyword arguments. - # See https://www.python.org/dev/peps/pep-0487/#implementation-details - super(_TypedDictMeta, cls).__init__(name, bases, ns) + super().__init__(name, bases, ns) def __new__(cls, name, bases, ns, total=True): # Create new typed dict class object. @@ -1708,7 +1057,7 @@ else: # Subclasses and instances of TypedDict return actual dictionaries # via _dict_new. ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new - tp_dict = super(_TypedDictMeta, cls).__new__(cls, name, (dict,), ns) + tp_dict = super().__new__(cls, name, (dict,), ns) annotations = {} own_annotations = ns.get('__annotations__', {}) @@ -1780,6 +1129,7 @@ if hasattr(typing, 'Annotated'): # Not exported and not a public API, but needed for get_origin() and get_args() # to work. _AnnotatedAlias = typing._AnnotatedAlias +# 3.7-3.8 elif PEP_560: class _AnnotatedAlias(typing._GenericAlias, _root=True): """Runtime representation of an annotated type. @@ -1802,10 +1152,8 @@ elif PEP_560: return _AnnotatedAlias(new_type, self.__metadata__) def __repr__(self): - return "typing_extensions.Annotated[{}, {}]".format( - typing._type_repr(self.__origin__), - ", ".join(repr(a) for a in self.__metadata__) - ) + return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, " + f"{', '.join(repr(a) for a in self.__metadata__)}]") def __reduce__(self): return operator.getitem, ( @@ -1860,7 +1208,7 @@ elif PEP_560: def __new__(cls, *args, **kwargs): raise TypeError("Type Annotated cannot be instantiated.") - @_tp_cache + @typing._tp_cache def __class_getitem__(cls, params): if not isinstance(params, tuple) or len(params) < 2: raise TypeError("Annotated[...] should be used " @@ -1873,7 +1221,7 @@ elif PEP_560: def __init_subclass__(cls, *args, **kwargs): raise TypeError( - "Cannot subclass {}.Annotated".format(cls.__module__) + f"Cannot subclass {cls.__module__}.Annotated" ) def _strip_annotations(t): @@ -1925,8 +1273,8 @@ elif PEP_560: if include_extras: return hint return {k: _strip_annotations(t) for k, t in hint.items()} - -elif HAVE_ANNOTATED: +# 3.6 +else: def _is_dunder(name): """Returns True if name is a __dunder_variable_name__.""" @@ -1955,7 +1303,7 @@ elif HAVE_ANNOTATED: else: tp_repr = origin[0]._tree_repr(origin) metadata_reprs = ", ".join(repr(arg) for arg in metadata) - return '%s[%s, %s]' % (cls, tp_repr, metadata_reprs) + return f'{cls}[{tp_repr}, {metadata_reprs}]' def _subs_tree(self, tvars=None, args=None): # noqa if self is Annotated: @@ -1981,7 +1329,7 @@ elif HAVE_ANNOTATED: else: return tree - @_tp_cache + @typing._tp_cache def __getitem__(self, params): if not isinstance(params, tuple): params = (params,) @@ -2067,23 +1415,23 @@ elif HAVE_ANNOTATED: """ # Python 3.8 has get_origin() and get_args() but those implementations aren't -# Annotated-aware, so we can't use those, only Python 3.9 versions will do. -# Similarly, Python 3.9's implementation doesn't support ParamSpecArgs and -# ParamSpecKwargs. +# Annotated-aware, so we can't use those. Python 3.9's versions don't support +# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do. if sys.version_info[:2] >= (3, 10): get_origin = typing.get_origin get_args = typing.get_args +# 3.7-3.9 elif PEP_560: try: # 3.9+ from typing import _BaseGenericAlias except ImportError: - _BaseGenericAlias = _GenericAlias + _BaseGenericAlias = typing._GenericAlias try: # 3.9+ from typing import GenericAlias except ImportError: - GenericAlias = _GenericAlias + GenericAlias = typing._GenericAlias def get_origin(tp): """Get the unsubscripted version of a type. @@ -2102,11 +1450,11 @@ elif PEP_560: """ if isinstance(tp, _AnnotatedAlias): return Annotated - if isinstance(tp, (_GenericAlias, GenericAlias, _BaseGenericAlias, + if isinstance(tp, (typing._GenericAlias, GenericAlias, _BaseGenericAlias, ParamSpecArgs, ParamSpecKwargs)): return tp.__origin__ - if tp is Generic: - return Generic + if tp is typing.Generic: + return typing.Generic return None def get_args(tp): @@ -2122,7 +1470,7 @@ elif PEP_560: """ if isinstance(tp, _AnnotatedAlias): return (tp.__origin__,) + tp.__metadata__ - if isinstance(tp, (_GenericAlias, GenericAlias)): + if isinstance(tp, (typing._GenericAlias, GenericAlias)): if getattr(tp, "_special", False): return () res = tp.__args__ @@ -2132,8 +1480,10 @@ elif PEP_560: return () +# 3.10+ if hasattr(typing, 'TypeAlias'): TypeAlias = typing.TypeAlias +# 3.9 elif sys.version_info[:2] >= (3, 9): class _TypeAliasForm(typing._SpecialForm, _root=True): def __repr__(self): @@ -2151,8 +1501,8 @@ elif sys.version_info[:2] >= (3, 9): It's invalid when used anywhere except as in the example above. """ - raise TypeError("{} is not subscriptable".format(self)) - + raise TypeError(f"{self} is not subscriptable") +# 3.7-3.8 elif sys.version_info[:2] >= (3, 7): class _TypeAliasForm(typing._SpecialForm, _root=True): def __repr__(self): @@ -2169,8 +1519,8 @@ elif sys.version_info[:2] >= (3, 7): It's invalid when used anywhere except as in the example above.""") - -elif hasattr(typing, '_FinalTypingBase'): +# 3.6 +else: class _TypeAliasMeta(typing.TypingMeta): """Metaclass for TypeAlias""" @@ -2200,37 +1550,13 @@ elif hasattr(typing, '_FinalTypingBase'): return 'typing_extensions.TypeAlias' TypeAlias = _TypeAliasBase(_root=True) -else: - class _TypeAliasMeta(typing.TypingMeta): - """Metaclass for TypeAlias""" - - def __instancecheck__(self, obj): - raise TypeError("TypeAlias cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("TypeAlias cannot be used with issubclass().") - - def __call__(self, *args, **kwargs): - raise TypeError("Cannot instantiate TypeAlias") - - class TypeAlias(metaclass=_TypeAliasMeta, _root=True): - """Special marker indicating that an assignment should - be recognized as a proper type alias definition by type - checkers. - - For example:: - - Predicate: TypeAlias = Callable[..., bool] - - It's invalid when used anywhere except as in the example above. - """ - __slots__ = () # Python 3.10+ has PEP 612 if hasattr(typing, 'ParamSpecArgs'): ParamSpecArgs = typing.ParamSpecArgs ParamSpecKwargs = typing.ParamSpecKwargs +# 3.6-3.9 else: class _Immutable: """Mixin to indicate that object should not be copied.""" @@ -2258,7 +1584,7 @@ else: self.__origin__ = origin def __repr__(self): - return "{}.args".format(self.__origin__.__name__) + return f"{self.__origin__.__name__}.args" class ParamSpecKwargs(_Immutable): """The kwargs for a ParamSpec object. @@ -2276,10 +1602,12 @@ else: self.__origin__ = origin def __repr__(self): - return "{}.kwargs".format(self.__origin__.__name__) + return f"{self.__origin__.__name__}.kwargs" +# 3.10+ if hasattr(typing, 'ParamSpec'): ParamSpec = typing.ParamSpec +# 3.6-3.9 else: # Inherits from list as a workaround for Callable checks in Python < 3.9.2. @@ -2331,7 +1659,7 @@ else: """ # Trick Generic __parameters__. - __class__ = TypeVar + __class__ = typing.TypeVar @property def args(self): @@ -2382,61 +1710,60 @@ else: pass if not PEP_560: - # Only needed in 3.6 and lower. + # Only needed in 3.6. def _get_type_vars(self, tvars): if self not in tvars: tvars.append(self) -# Inherits from list as a workaround for Callable checks in Python < 3.9.2. -class _ConcatenateGenericAlias(list): +# 3.6-3.9 +if not hasattr(typing, 'Concatenate'): + # Inherits from list as a workaround for Callable checks in Python < 3.9.2. + class _ConcatenateGenericAlias(list): - # Trick Generic into looking into this for __parameters__. - if PEP_560: - __class__ = typing._GenericAlias - elif sys.version_info[:3] == (3, 5, 2): - __class__ = typing.TypingMeta - else: - __class__ = typing._TypingBase + # Trick Generic into looking into this for __parameters__. + if PEP_560: + __class__ = typing._GenericAlias + else: + __class__ = typing._TypingBase - # Flag in 3.8. - _special = False - # Attribute in 3.6 and earlier. - if sys.version_info[:3] == (3, 5, 2): - _gorg = typing.GenericMeta - else: + # Flag in 3.8. + _special = False + # Attribute in 3.6 and earlier. _gorg = typing.Generic - def __init__(self, origin, args): - super().__init__(args) - self.__origin__ = origin - self.__args__ = args + def __init__(self, origin, args): + super().__init__(args) + self.__origin__ = origin + self.__args__ = args - def __repr__(self): - _type_repr = typing._type_repr - return '{origin}[{args}]' \ - .format(origin=_type_repr(self.__origin__), - args=', '.join(_type_repr(arg) for arg in self.__args__)) + def __repr__(self): + _type_repr = typing._type_repr + return (f'{_type_repr(self.__origin__)}' + f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]') - def __hash__(self): - return hash((self.__origin__, self.__args__)) + def __hash__(self): + return hash((self.__origin__, self.__args__)) - # Hack to get typing._type_check to pass in Generic. - def __call__(self, *args, **kwargs): - pass + # Hack to get typing._type_check to pass in Generic. + def __call__(self, *args, **kwargs): + pass - @property - def __parameters__(self): - return tuple(tp for tp in self.__args__ if isinstance(tp, (TypeVar, ParamSpec))) + @property + def __parameters__(self): + return tuple( + tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec)) + ) - if not PEP_560: - # Only required in 3.6 and lower. - def _get_type_vars(self, tvars): - if self.__origin__ and self.__parameters__: - typing._get_type_vars(self.__parameters__, tvars) + if not PEP_560: + # Only required in 3.6. + def _get_type_vars(self, tvars): + if self.__origin__ and self.__parameters__: + typing._get_type_vars(self.__parameters__, tvars) -@_tp_cache +# 3.6-3.9 +@typing._tp_cache def _concatenate_getitem(self, parameters): if parameters == (): raise TypeError("Cannot take a Concatenate of no types.") @@ -2450,9 +1777,11 @@ def _concatenate_getitem(self, parameters): return _ConcatenateGenericAlias(self, parameters) +# 3.10+ if hasattr(typing, 'Concatenate'): Concatenate = typing.Concatenate _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa +# 3.9 elif sys.version_info[:2] >= (3, 9): @_TypeAliasForm def Concatenate(self, parameters): @@ -2467,7 +1796,7 @@ elif sys.version_info[:2] >= (3, 9): See PEP 612 for detailed information. """ return _concatenate_getitem(self, parameters) - +# 3.7-8 elif sys.version_info[:2] >= (3, 7): class _ConcatenateForm(typing._SpecialForm, _root=True): def __repr__(self): @@ -2488,8 +1817,8 @@ elif sys.version_info[:2] >= (3, 7): See PEP 612 for detailed information. """) - -elif hasattr(typing, '_FinalTypingBase'): +# 3.6 +else: class _ConcatenateAliasMeta(typing.TypingMeta): """Metaclass for Concatenate.""" @@ -2524,38 +1853,11 @@ elif hasattr(typing, '_FinalTypingBase'): return _concatenate_getitem(self, parameters) Concatenate = _ConcatenateAliasBase(_root=True) -# For 3.5.0 - 3.5.2 -else: - class _ConcatenateAliasMeta(typing.TypingMeta): - """Metaclass for Concatenate.""" - - def __instancecheck__(self, obj): - raise TypeError("TypeAlias cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("TypeAlias cannot be used with issubclass().") - - def __call__(self, *args, **kwargs): - raise TypeError("Cannot instantiate TypeAlias") - - def __getitem__(self, parameters): - return _concatenate_getitem(self, parameters) - - class Concatenate(metaclass=_ConcatenateAliasMeta, _root=True): - """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a - higher order function which adds, removes or transforms parameters of a - callable. - - For example:: - - Callable[Concatenate[int, P], int] - - See PEP 612 for detailed information. - """ - __slots__ = () +# 3.10+ if hasattr(typing, 'TypeGuard'): TypeGuard = typing.TypeGuard +# 3.9 elif sys.version_info[:2] >= (3, 9): class _TypeGuardForm(typing._SpecialForm, _root=True): def __repr__(self): @@ -2605,9 +1907,9 @@ elif sys.version_info[:2] >= (3, 9): ``TypeGuard`` also works with type variables. For more information, see PEP 647 (User-Defined Type Guards). """ - item = typing._type_check(parameters, '{} accepts only single type.'.format(self)) - return _GenericAlias(self, (item,)) - + item = typing._type_check(parameters, f'{self} accepts only single type.') + return typing._GenericAlias(self, (item,)) +# 3.7-3.8 elif sys.version_info[:2] >= (3, 7): class _TypeGuardForm(typing._SpecialForm, _root=True): @@ -2616,8 +1918,8 @@ elif sys.version_info[:2] >= (3, 7): def __getitem__(self, parameters): item = typing._type_check(parameters, - '{} accepts only a single type'.format(self._name)) - return _GenericAlias(self, (item,)) + f'{self._name} accepts only a single type') + return typing._GenericAlias(self, (item,)) TypeGuard = _TypeGuardForm( 'TypeGuard', @@ -2663,7 +1965,8 @@ elif sys.version_info[:2] >= (3, 7): ``TypeGuard`` also works with type variables. For more information, see PEP 647 (User-Defined Type Guards). """) -elif hasattr(typing, '_FinalTypingBase'): +# 3.6 +else: class _TypeGuard(typing._FinalTypingBase, _root=True): """Special typing form used to annotate the return type of a user-defined type guard function. ``TypeGuard`` only accepts a single type argument. @@ -2717,7 +2020,221 @@ elif hasattr(typing, '_FinalTypingBase'): cls = type(self) if self.__type__ is None: return cls(typing._type_check(item, - '{} accepts only a single type.'.format(cls.__name__[1:])), + f'{cls.__name__[1:]} accepts only a single type.'), + _root=True) + raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted') + + def _eval_type(self, globalns, localns): + new_tp = typing._eval_type(self.__type__, globalns, localns) + if new_tp == self.__type__: + return self + return type(self)(new_tp, _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__type__ is not None: + r += f'[{typing._type_repr(self.__type__)}]' + return r + + def __hash__(self): + return hash((type(self).__name__, self.__type__)) + + def __eq__(self, other): + if not isinstance(other, _TypeGuard): + return NotImplemented + if self.__type__ is not None: + return self.__type__ == other.__type__ + return self is other + + TypeGuard = _TypeGuard(_root=True) + +if hasattr(typing, "Self"): + Self = typing.Self +elif sys.version_info[:2] >= (3, 7): + # Vendored from cpython typing._SpecialFrom + class _SpecialForm(typing._Final, _root=True): + __slots__ = ('_name', '__doc__', '_getitem') + + def __init__(self, getitem): + self._getitem = getitem + self._name = getitem.__name__ + self.__doc__ = getitem.__doc__ + + def __getattr__(self, item): + if item in {'__name__', '__qualname__'}: + return self._name + + raise AttributeError(item) + + def __mro_entries__(self, bases): + raise TypeError(f"Cannot subclass {self!r}") + + def __repr__(self): + return f'typing_extensions.{self._name}' + + def __reduce__(self): + return self._name + + def __call__(self, *args, **kwds): + raise TypeError(f"Cannot instantiate {self!r}") + + def __or__(self, other): + return typing.Union[self, other] + + def __ror__(self, other): + return typing.Union[other, self] + + def __instancecheck__(self, obj): + raise TypeError(f"{self} cannot be used with isinstance()") + + def __subclasscheck__(self, cls): + raise TypeError(f"{self} cannot be used with issubclass()") + + @typing._tp_cache + def __getitem__(self, parameters): + return self._getitem(self, parameters) + + @_SpecialForm + def Self(self, params): + """Used to spell the type of "self" in classes. + + Example:: + + from typing import Self + + class ReturnsSelf: + def parse(self, data: bytes) -> Self: + ... + return self + + """ + + raise TypeError(f"{self} is not subscriptable") +else: + class _Self(typing._FinalTypingBase, _root=True): + """Used to spell the type of "self" in classes. + + Example:: + + from typing import Self + + class ReturnsSelf: + def parse(self, data: bytes) -> Self: + ... + return self + + """ + + __slots__ = () + + def __instancecheck__(self, obj): + raise TypeError(f"{self} cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError(f"{self} cannot be used with issubclass().") + + Self = _Self(_root=True) + + +if hasattr(typing, 'Required'): + Required = typing.Required + NotRequired = typing.NotRequired +elif sys.version_info[:2] >= (3, 9): + class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + @_ExtensionsSpecialForm + def Required(self, parameters): + """A special typing construct to mark a key of a total=False TypedDict + as required. For example: + + class Movie(TypedDict, total=False): + title: Required[str] + year: int + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. + """ + item = typing._type_check(parameters, f'{self._name} accepts only single type') + return typing._GenericAlias(self, (item,)) + + @_ExtensionsSpecialForm + def NotRequired(self, parameters): + """A special typing construct to mark a key of a TypedDict as + potentially missing. For example: + + class Movie(TypedDict): + title: str + year: NotRequired[int] + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + """ + item = typing._type_check(parameters, f'{self._name} accepts only single type') + return typing._GenericAlias(self, (item,)) + +elif sys.version_info[:2] >= (3, 7): + class _RequiredForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + item = typing._type_check(parameters, + '{} accepts only single type'.format(self._name)) + return typing._GenericAlias(self, (item,)) + + Required = _RequiredForm( + 'Required', + doc="""A special typing construct to mark a key of a total=False TypedDict + as required. For example: + + class Movie(TypedDict, total=False): + title: Required[str] + year: int + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. + """) + NotRequired = _RequiredForm( + 'NotRequired', + doc="""A special typing construct to mark a key of a TypedDict as + potentially missing. For example: + + class Movie(TypedDict): + title: str + year: NotRequired[int] + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + """) +else: + # NOTE: Modeled after _Final's implementation when _FinalTypingBase available + class _MaybeRequired(typing._FinalTypingBase, _root=True): + __slots__ = ('__type__',) + + def __init__(self, tp=None, **kwds): + self.__type__ = tp + + def __getitem__(self, item): + cls = type(self) + if self.__type__ is None: + return cls(typing._type_check(item, + '{} accepts only single type.'.format(cls.__name__[1:])), _root=True) raise TypeError('{} cannot be further subscripted' .format(cls.__name__[1:])) @@ -2738,106 +2255,42 @@ elif hasattr(typing, '_FinalTypingBase'): return hash((type(self).__name__, self.__type__)) def __eq__(self, other): - if not isinstance(other, _TypeGuard): + if not isinstance(other, type(self)): return NotImplemented if self.__type__ is not None: return self.__type__ == other.__type__ return self is other - TypeGuard = _TypeGuard(_root=True) -else: - class _TypeGuardMeta(typing.TypingMeta): - """Metaclass for TypeGuard""" + class _Required(_MaybeRequired, _root=True): + """A special typing construct to mark a key of a total=False TypedDict + as required. For example: - def __new__(cls, name, bases, namespace, tp=None, _root=False): - self = super().__new__(cls, name, bases, namespace, _root=_root) - if tp is not None: - self.__type__ = tp - return self + class Movie(TypedDict, total=False): + title: Required[str] + year: int - def __instancecheck__(self, obj): - raise TypeError("TypeGuard cannot be used with isinstance().") + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) - def __subclasscheck__(self, cls): - raise TypeError("TypeGuard cannot be used with issubclass().") - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is not None: - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - param = typing._type_check( - item, - '{} accepts only single type.'.format(cls.__name__[1:])) - return cls(self.__name__, self.__bases__, - dict(self.__dict__), tp=param, _root=True) - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(self.__name__, self.__bases__, - dict(self.__dict__), tp=self.__type__, - _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not hasattr(other, "__type__"): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - class TypeGuard(typing.Final, metaclass=_TypeGuardMeta, _root=True): - """Special typing form used to annotate the return type of a user-defined - type guard function. ``TypeGuard`` only accepts a single type argument. - At runtime, functions marked this way should return a boolean. - - ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static - type checkers to determine a more precise type of an expression within a - program's code flow. Usually type narrowing is done by analyzing - conditional code flow and applying the narrowing to a block of code. The - conditional expression here is sometimes referred to as a "type guard". - - Sometimes it would be convenient to use a user-defined boolean function - as a type guard. Such a function should use ``TypeGuard[...]`` as its - return type to alert static type checkers to this intention. - - Using ``-> TypeGuard`` tells the static type checker that for a given - function: - - 1. The return value is a boolean. - 2. If the return value is ``True``, the type of its argument - is the type inside ``TypeGuard``. - - For example:: - - def is_str(val: Union[str, float]): - # "isinstance" type guard - if isinstance(val, str): - # Type of ``val`` is narrowed to ``str`` - ... - else: - # Else, type of ``val`` is narrowed to ``float``. - ... - - Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower - form of ``TypeA`` (it can even be a wider form) and this may lead to - type-unsafe results. The main reason is to allow for things like - narrowing ``List[object]`` to ``List[str]`` even though the latter is not - a subtype of the former, since ``List`` is invariant. The responsibility of - writing type-safe type guards is left to the user. - - ``TypeGuard`` also works with type variables. For more information, see - PEP 647 (User-Defined Type Guards). + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. """ - __type__ = None + + class _NotRequired(_MaybeRequired, _root=True): + """A special typing construct to mark a key of a TypedDict as + potentially missing. For example: + + class Movie(TypedDict): + title: str + year: NotRequired[int] + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + """ + + Required = _Required(_root=True) + NotRequired = _NotRequired(_root=True) diff --git a/requirements.txt b/requirements.txt index d9f84441..d8f17a52 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ appdirs==1.4.4 apscheduler==3.8.0 -arrow==1.2.1 +arrow==1.2.2 backports.csv==1.0.7 backports.functools-lru-cache==1.6.4 backports.zoneinfo==0.2.1 From 5523d4ba88d4b4a51f704958269b4d04081aa11b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Feb 2022 21:57:29 -0800 Subject: [PATCH 012/743] Bump httpagentparser from 1.9.1 to 1.9.2 (#1631) * Bump httpagentparser from 1.9.1 to 1.9.2 Bumps [httpagentparser](https://github.com/shon/httpagentparser) from 1.9.1 to 1.9.2. - [Release notes](https://github.com/shon/httpagentparser/releases) - [Commits](https://github.com/shon/httpagentparser/commits) --- updated-dependencies: - dependency-name: httpagentparser dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Update httpagentparser==1.9.2 Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci] --- lib/httpagentparser/__init__.py | 37 +++++++++++++++++++++++++++------ requirements.txt | 2 +- 2 files changed, 32 insertions(+), 7 deletions(-) diff --git a/lib/httpagentparser/__init__.py b/lib/httpagentparser/__init__.py index 9bac5148..0e30e617 100644 --- a/lib/httpagentparser/__init__.py +++ b/lib/httpagentparser/__init__.py @@ -8,7 +8,7 @@ Tries to * assist python web apps to detect clients. """ -__version__ = '1.9.1' +__version__ = '1.9.2' class DetectorsHub(dict): @@ -671,11 +671,21 @@ def detect(agent, fill_none=False): return result -def simple_detect(agent): +UNKNOWN_OS_NAME = 'Unknown OS' +UNKNOWN_BROWSER_NAME = 'Unknown Browser' + + +def simple_detect_tuple(agent, parsed_agent=None): """ - -> (os, browser) # tuple of strings + @params: + agent::str + parsed_agent::dict + The result of detect, used to save calculations + + @return: + (os_name, os_version, browser_name, browser_version)::Tuple(str) """ - result = detect(agent) + result = parsed_agent or detect(agent) os_list = [] if 'flavor' in result: os_list.append(result['flavor']['name']) @@ -684,11 +694,26 @@ def simple_detect(agent): if 'os' in result: os_list.append(result['os']['name']) - os = os_list and " ".join(os_list) or "Unknown OS" + os = os_list and " ".join(os_list) or UNKNOWN_OS_NAME os_version = os_list and (result.get('flavor') and result['flavor'].get('version')) or \ (result.get('dist') and result['dist'].get('version')) or (result.get('os') and result['os'].get('version')) or "" - browser = 'browser' in result and result['browser'].get('name') or 'Unknown Browser' + browser = 'browser' in result and result['browser'].get('name') or UNKNOWN_BROWSER_NAME browser_version = 'browser' in result and result['browser'].get('version') or "" + + return os, os_version, browser, browser_version + + +def simple_detect(agent, parsed_agent=None): + """ + @params: + agent::str + parsed_agent::dict + The result of detect, used to save calculations + + @return: + (os_name_version, browser_name_version)::Tuple(str) + """ + os, os_version, browser, browser_version = simple_detect_tuple(agent, parsed_agent=parsed_agent) if browser_version: browser = " ".join((browser, browser_version)) if os_version: diff --git a/requirements.txt b/requirements.txt index d8f17a52..6388dc9f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -16,7 +16,7 @@ facebook-sdk==3.1.0 future==0.18.2 gntp==1.0.3 html5lib==1.1 -httpagentparser==1.9.1 +httpagentparser==1.9.2 idna==3.3 importlib-resources==5.4.0 git+https://github.com/Tautulli/ipwhois.git@master#egg=ipwhois From 61960aa7449d53a0c4e64c505c0c258523614bc0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Feb 2022 21:57:50 -0800 Subject: [PATCH 013/743] Bump requests-oauthlib from 1.3.0 to 1.3.1 (#1636) * Bump requests-oauthlib from 1.3.0 to 1.3.1 Bumps [requests-oauthlib](https://github.com/requests/requests-oauthlib) from 1.3.0 to 1.3.1. - [Release notes](https://github.com/requests/requests-oauthlib/releases) - [Changelog](https://github.com/requests/requests-oauthlib/blob/master/HISTORY.rst) - [Commits](https://github.com/requests/requests-oauthlib/compare/v1.3.0...v1.3.1) --- updated-dependencies: - dependency-name: requests-oauthlib dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Update requests-oauthlib==1.3.1 Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci] --- lib/charset_normalizer/api.py | 103 +++++++++++------ lib/charset_normalizer/constant.py | 3 + lib/charset_normalizer/version.py | 2 +- lib/oauthlib/__init__.py | 2 +- lib/oauthlib/oauth2/__init__.py | 1 + lib/oauthlib/oauth2/rfc6749/clients/base.py | 109 +++++++++++++++++- .../oauth2/rfc6749/clients/web_application.py | 25 +++- .../oauth2/rfc6749/endpoints/metadata.py | 3 +- lib/oauthlib/oauth2/rfc6749/errors.py | 7 +- .../rfc6749/grant_types/authorization_code.py | 21 ++++ .../rfc6749/grant_types/refresh_token.py | 2 +- lib/oauthlib/oauth2/rfc6749/parameters.py | 20 +++- .../oauth2/rfc6749/request_validator.py | 25 ++++ lib/oauthlib/oauth2/rfc8628/__init__.py | 10 ++ .../oauth2/rfc8628/clients/__init__.py | 8 ++ lib/oauthlib/oauth2/rfc8628/clients/device.py | 94 +++++++++++++++ .../connect/core/grant_types/__init__.py | 1 + .../connect/core/grant_types/refresh_token.py | 34 ++++++ .../openid/connect/core/request_validator.py | 12 ++ lib/requests_oauthlib/__init__.py | 2 +- .../compliance_fixes/__init__.py | 2 +- .../compliance_fixes/ebay.py | 23 ++++ .../compliance_fixes/linkedin.py | 21 ---- lib/requests_oauthlib/oauth1_session.py | 2 +- lib/requests_oauthlib/oauth2_session.py | 6 + requirements.txt | 3 +- 26 files changed, 464 insertions(+), 77 deletions(-) create mode 100644 lib/oauthlib/oauth2/rfc8628/__init__.py create mode 100644 lib/oauthlib/oauth2/rfc8628/clients/__init__.py create mode 100644 lib/oauthlib/oauth2/rfc8628/clients/device.py create mode 100644 lib/oauthlib/openid/connect/core/grant_types/refresh_token.py create mode 100644 lib/requests_oauthlib/compliance_fixes/ebay.py delete mode 100644 lib/requests_oauthlib/compliance_fixes/linkedin.py diff --git a/lib/charset_normalizer/api.py b/lib/charset_normalizer/api.py index b3e198a7..bdc8ed98 100644 --- a/lib/charset_normalizer/api.py +++ b/lib/charset_normalizer/api.py @@ -13,7 +13,7 @@ from .cd import ( mb_encoding_languages, merge_coherence_ratios, ) -from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE +from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE, TRACE from .md import mess_ratio from .models import CharsetMatch, CharsetMatches from .utils import ( @@ -25,6 +25,8 @@ from .utils import ( should_strip_sig_or_bom, ) +# Will most likely be controversial +# logging.addLevelName(TRACE, "TRACE") logger = logging.getLogger("charset_normalizer") explain_handler = logging.StreamHandler() explain_handler.setFormatter( @@ -70,19 +72,20 @@ def from_bytes( if explain: previous_logger_level = logger.level # type: int logger.addHandler(explain_handler) - logger.setLevel(logging.DEBUG) + logger.setLevel(TRACE) length = len(sequences) # type: int if length == 0: - logger.warning("Encoding detection on empty bytes, assuming utf_8 intention.") + logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.") if explain: logger.removeHandler(explain_handler) logger.setLevel(previous_logger_level or logging.WARNING) return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")]) if cp_isolation is not None: - logger.debug( + logger.log( + TRACE, "cp_isolation is set. use this flag for debugging purpose. " "limited list of encoding allowed : %s.", ", ".join(cp_isolation), @@ -92,7 +95,8 @@ def from_bytes( cp_isolation = [] if cp_exclusion is not None: - logger.debug( + logger.log( + TRACE, "cp_exclusion is set. use this flag for debugging purpose. " "limited list of encoding excluded : %s.", ", ".join(cp_exclusion), @@ -102,7 +106,8 @@ def from_bytes( cp_exclusion = [] if length <= (chunk_size * steps): - logger.debug( + logger.log( + TRACE, "override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.", steps, chunk_size, @@ -118,16 +123,18 @@ def from_bytes( is_too_large_sequence = len(sequences) >= TOO_BIG_SEQUENCE # type: bool if is_too_small_sequence: - logger.warning( + logger.log( + TRACE, "Trying to detect encoding from a tiny portion of ({}) byte(s).".format( length - ) + ), ) elif is_too_large_sequence: - logger.info( + logger.log( + TRACE, "Using lazy str decoding because the payload is quite large, ({}) byte(s).".format( length - ) + ), ) prioritized_encodings = [] # type: List[str] @@ -138,7 +145,8 @@ def from_bytes( if specified_encoding is not None: prioritized_encodings.append(specified_encoding) - logger.info( + logger.log( + TRACE, "Detected declarative mark in sequence. Priority +1 given for %s.", specified_encoding, ) @@ -157,7 +165,8 @@ def from_bytes( if sig_encoding is not None: prioritized_encodings.append(sig_encoding) - logger.info( + logger.log( + TRACE, "Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.", len(sig_payload), sig_encoding, @@ -188,7 +197,8 @@ def from_bytes( ) # type: bool if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available: - logger.debug( + logger.log( + TRACE, "Encoding %s wont be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.", encoding_iana, ) @@ -197,8 +207,10 @@ def from_bytes( try: is_multi_byte_decoder = is_multi_byte_encoding(encoding_iana) # type: bool except (ModuleNotFoundError, ImportError): - logger.debug( - "Encoding %s does not provide an IncrementalDecoder", encoding_iana + logger.log( + TRACE, + "Encoding %s does not provide an IncrementalDecoder", + encoding_iana, ) continue @@ -219,7 +231,8 @@ def from_bytes( ) except (UnicodeDecodeError, LookupError) as e: if not isinstance(e, LookupError): - logger.debug( + logger.log( + TRACE, "Code page %s does not fit given bytes sequence at ALL. %s", encoding_iana, str(e), @@ -235,7 +248,8 @@ def from_bytes( break if similar_soft_failure_test: - logger.debug( + logger.log( + TRACE, "%s is deemed too similar to code page %s and was consider unsuited already. Continuing!", encoding_iana, encoding_soft_failed, @@ -255,7 +269,8 @@ def from_bytes( ) # type: bool if multi_byte_bonus: - logger.debug( + logger.log( + TRACE, "Code page %s is a multi byte encoding table and it appear that at least one character " "was encoded using n-bytes.", encoding_iana, @@ -285,7 +300,8 @@ def from_bytes( errors="ignore" if is_multi_byte_decoder else "strict", ) # type: str except UnicodeDecodeError as e: # Lazy str loading may have missed something there - logger.debug( + logger.log( + TRACE, "LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s", encoding_iana, str(e), @@ -337,7 +353,8 @@ def from_bytes( try: sequences[int(50e3) :].decode(encoding_iana, errors="strict") except UnicodeDecodeError as e: - logger.debug( + logger.log( + TRACE, "LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s", encoding_iana, str(e), @@ -350,7 +367,8 @@ def from_bytes( ) # type: float if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up: tested_but_soft_failure.append(encoding_iana) - logger.info( + logger.log( + TRACE, "%s was excluded because of initial chaos probing. Gave up %i time(s). " "Computed mean chaos is %f %%.", encoding_iana, @@ -373,7 +391,8 @@ def from_bytes( fallback_u8 = fallback_entry continue - logger.info( + logger.log( + TRACE, "%s passed initial chaos probing. Mean measured chaos is %f %%", encoding_iana, round(mean_mess_ratio * 100, ndigits=3), @@ -385,10 +404,11 @@ def from_bytes( target_languages = mb_encoding_languages(encoding_iana) if target_languages: - logger.debug( + logger.log( + TRACE, "{} should target any language(s) of {}".format( encoding_iana, str(target_languages) - ) + ), ) cd_ratios = [] @@ -406,10 +426,11 @@ def from_bytes( cd_ratios_merged = merge_coherence_ratios(cd_ratios) if cd_ratios_merged: - logger.info( + logger.log( + TRACE, "We detected language {} using {}".format( cd_ratios_merged, encoding_iana - ) + ), ) results.append( @@ -427,8 +448,8 @@ def from_bytes( encoding_iana in [specified_encoding, "ascii", "utf_8"] and mean_mess_ratio < 0.1 ): - logger.info( - "%s is most likely the one. Stopping the process.", encoding_iana + logger.debug( + "Encoding detection: %s is most likely the one.", encoding_iana ) if explain: logger.removeHandler(explain_handler) @@ -436,8 +457,9 @@ def from_bytes( return CharsetMatches([results[encoding_iana]]) if encoding_iana == sig_encoding: - logger.info( - "%s is most likely the one as we detected a BOM or SIG within the beginning of the sequence.", + logger.debug( + "Encoding detection: %s is most likely the one as we detected a BOM or SIG within " + "the beginning of the sequence.", encoding_iana, ) if explain: @@ -447,13 +469,15 @@ def from_bytes( if len(results) == 0: if fallback_u8 or fallback_ascii or fallback_specified: - logger.debug( - "Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback." + logger.log( + TRACE, + "Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback.", ) if fallback_specified: logger.debug( - "%s will be used as a fallback match", fallback_specified.encoding + "Encoding detection: %s will be used as a fallback match", + fallback_specified.encoding, ) results.append(fallback_specified) elif ( @@ -465,12 +489,21 @@ def from_bytes( ) or (fallback_u8 is not None) ): - logger.warning("utf_8 will be used as a fallback match") + logger.debug("Encoding detection: utf_8 will be used as a fallback match") results.append(fallback_u8) elif fallback_ascii: - logger.warning("ascii will be used as a fallback match") + logger.debug("Encoding detection: ascii will be used as a fallback match") results.append(fallback_ascii) + if results: + logger.debug( + "Encoding detection: Found %s as plausible (best-candidate) for content. With %i alternatives.", + results.best().encoding, # type: ignore + len(results) - 1, + ) + else: + logger.debug("Encoding detection: Unable to determine any suitable charset.") + if explain: logger.removeHandler(explain_handler) logger.setLevel(previous_logger_level) diff --git a/lib/charset_normalizer/constant.py b/lib/charset_normalizer/constant.py index 3d5d6457..c32f5cf2 100644 --- a/lib/charset_normalizer/constant.py +++ b/lib/charset_normalizer/constant.py @@ -498,3 +498,6 @@ ZH_NAMES = {"big5", "cp950", "big5hkscs", "hz"} # type: Set[str] NOT_PRINTABLE_PATTERN = re_compile(r"[0-9\W\n\r\t]+") LANGUAGE_SUPPORTED_COUNT = len(FREQUENCIES) # type: int + +# Logging LEVEL bellow DEBUG +TRACE = 5 # type: int diff --git a/lib/charset_normalizer/version.py b/lib/charset_normalizer/version.py index a8d66597..69bf0503 100644 --- a/lib/charset_normalizer/version.py +++ b/lib/charset_normalizer/version.py @@ -2,5 +2,5 @@ Expose version """ -__version__ = "2.0.10" +__version__ = "2.0.11" VERSION = __version__.split(".") diff --git a/lib/oauthlib/__init__.py b/lib/oauthlib/__init__.py index a94cf941..5dbffc96 100644 --- a/lib/oauthlib/__init__.py +++ b/lib/oauthlib/__init__.py @@ -12,7 +12,7 @@ import logging from logging import NullHandler __author__ = 'The OAuthlib Community' -__version__ = '3.1.1' +__version__ = '3.2.0' logging.getLogger('oauthlib').addHandler(NullHandler()) diff --git a/lib/oauthlib/oauth2/__init__.py b/lib/oauthlib/oauth2/__init__.py index a6e1cccd..deefb1af 100644 --- a/lib/oauthlib/oauth2/__init__.py +++ b/lib/oauthlib/oauth2/__init__.py @@ -33,3 +33,4 @@ from .rfc6749.grant_types import ( from .rfc6749.request_validator import RequestValidator from .rfc6749.tokens import BearerToken, OAuth2Token from .rfc6749.utils import is_secure_transport +from .rfc8628.clients import DeviceClient diff --git a/lib/oauthlib/oauth2/rfc6749/clients/base.py b/lib/oauthlib/oauth2/rfc6749/clients/base.py index 88065ab3..bb4c1338 100644 --- a/lib/oauthlib/oauth2/rfc6749/clients/base.py +++ b/lib/oauthlib/oauth2/rfc6749/clients/base.py @@ -8,6 +8,10 @@ for consuming OAuth 2.0 RFC6749. """ import time import warnings +import secrets +import re +import hashlib +import base64 from oauthlib.common import generate_token from oauthlib.oauth2.rfc6749 import tokens @@ -61,6 +65,9 @@ class Client: state=None, redirect_url=None, state_generator=generate_token, + code_verifier=None, + code_challenge=None, + code_challenge_method=None, **kwargs): """Initialize a client with commonly used attributes. @@ -99,6 +106,15 @@ class Client: :param state_generator: A no argument state generation callable. Defaults to :py:meth:`oauthlib.common.generate_token`. + + :param code_verifier: PKCE parameter. A cryptographically random string that is used to correlate the + authorization request to the token request. + + :param code_challenge: PKCE parameter. A challenge derived from the code verifier that is sent in the + authorization request, to be verified against later. + + :param code_challenge_method: PKCE parameter. A method that was used to derive code challenge. + Defaults to "plain" if not present in the request. """ self.client_id = client_id @@ -113,6 +129,9 @@ class Client: self.state_generator = state_generator self.state = state self.redirect_url = redirect_url + self.code_verifier = code_verifier + self.code_challenge = code_challenge + self.code_challenge_method = code_challenge_method self.code = None self.expires_in = None self._expires_at = None @@ -471,6 +490,91 @@ class Client: raise ValueError("Invalid token placement.") return uri, headers, body + def create_code_verifier(self, length): + """Create PKCE **code_verifier** used in computing **code_challenge**. + + :param length: REQUIRED. The length of the code_verifier. + + The client first creates a code verifier, "code_verifier", for each + OAuth 2.0 [RFC6749] Authorization Request, in the following manner: + + code_verifier = high-entropy cryptographic random STRING using the + unreserved characters [A-Z] / [a-z] / [0-9] / "-" / "." / "_" / "~" + from Section 2.3 of [RFC3986], with a minimum length of 43 characters + and a maximum length of 128 characters. + + .. _`Section 4.1`: https://tools.ietf.org/html/rfc7636#section-4.1 + """ + code_verifier = None + + if not length >= 43: + raise ValueError("Length must be greater than or equal to 43") + + if not length <= 128: + raise ValueError("Length must be less than or equal to 128") + + allowed_characters = re.compile('^[A-Zaa-z0-9-._~]') + code_verifier = secrets.token_urlsafe(length) + + if not re.search(allowed_characters, code_verifier): + raise ValueError("code_verifier contains invalid characters") + + self.code_verifier = code_verifier + + return code_verifier + + def create_code_challenge(self, code_verifier, code_challenge_method=None): + """Create PKCE **code_challenge** derived from the **code_verifier**. + + :param code_verifier: REQUIRED. The **code_verifier** generated from create_code_verifier(). + :param code_challenge_method: OPTIONAL. The method used to derive the **code_challenge**. Acceptable + values include "S256". DEFAULT is "plain". + + + The client then creates a code challenge derived from the code + verifier by using one of the following transformations on the code + verifier: + + plain + code_challenge = code_verifier + + S256 + code_challenge = BASE64URL-ENCODE(SHA256(ASCII(code_verifier))) + + If the client is capable of using "S256", it MUST use "S256", as + "S256" is Mandatory To Implement (MTI) on the server. Clients are + permitted to use "plain" only if they cannot support "S256" for some + technical reason and know via out-of-band configuration that the + server supports "plain". + + The plain transformation is for compatibility with existing + deployments and for constrained environments that can't use the S256 + transformation. + + .. _`Section 4.2`: https://tools.ietf.org/html/rfc7636#section-4.2 + """ + code_challenge = None + + if code_verifier == None: + raise ValueError("Invalid code_verifier") + + if code_challenge_method == None: + code_challenge_method = "plain" + self.code_challenge_method = code_challenge_method + code_challenge = code_verifier + self.code_challenge = code_challenge + + if code_challenge_method == "S256": + h = hashlib.sha256() + h.update(code_verifier.encode(encoding='ascii')) + sha256_val = h.digest() + code_challenge = bytes.decode(base64.urlsafe_b64encode(sha256_val)) + # replace '+' with '-', '/' with '_', and remove trailing '=' + code_challenge = code_challenge.replace("+", "-").replace("/", "_").replace("=", "") + self.code_challenge = code_challenge + + return code_challenge + def _add_mac_token(self, uri, http_method='GET', body=None, headers=None, token_placement=AUTH_HEADER, ext=None, **kwargs): """Add a MAC token to the request authorization header. @@ -513,7 +617,10 @@ class Client: self._expires_at = time.time() + int(self.expires_in) if 'expires_at' in response: - self._expires_at = int(response.get('expires_at')) + try: + self._expires_at = int(response.get('expires_at')) + except: + self._expires_at = None if 'mac_key' in response: self.mac_key = response.get('mac_key') diff --git a/lib/oauthlib/oauth2/rfc6749/clients/web_application.py b/lib/oauthlib/oauth2/rfc6749/clients/web_application.py index a1f3db1d..1d3b2b5b 100644 --- a/lib/oauthlib/oauth2/rfc6749/clients/web_application.py +++ b/lib/oauthlib/oauth2/rfc6749/clients/web_application.py @@ -41,7 +41,7 @@ class WebApplicationClient(Client): self.code = code def prepare_request_uri(self, uri, redirect_uri=None, scope=None, - state=None, **kwargs): + state=None, code_challenge=None, code_challenge_method='plain', **kwargs): """Prepare the authorization code request URI The client constructs the request URI by adding the following @@ -62,6 +62,13 @@ class WebApplicationClient(Client): to the client. The parameter SHOULD be used for preventing cross-site request forgery as described in `Section 10.12`_. + :param code_challenge: OPTIONAL. PKCE parameter. REQUIRED if PKCE is enforced. + A challenge derived from the code_verifier that is sent in the + authorization request, to be verified against later. + + :param code_challenge_method: OPTIONAL. PKCE parameter. A method that was used to derive code challenge. + Defaults to "plain" if not present in the request. + :param kwargs: Extra arguments to include in the request URI. In addition to supplied parameters, OAuthLib will append the ``client_id`` @@ -76,6 +83,10 @@ class WebApplicationClient(Client): 'https://example.com?client_id=your_id&response_type=code&redirect_uri=https%3A%2F%2Fa.b%2Fcallback' >>> client.prepare_request_uri('https://example.com', scope=['profile', 'pictures']) 'https://example.com?client_id=your_id&response_type=code&scope=profile+pictures' + >>> client.prepare_request_uri('https://example.com', code_challenge='kjasBS523KdkAILD2k78NdcJSk2k3KHG6') + 'https://example.com?client_id=your_id&response_type=code&code_challenge=kjasBS523KdkAILD2k78NdcJSk2k3KHG6' + >>> client.prepare_request_uri('https://example.com', code_challenge_method='S256') + 'https://example.com?client_id=your_id&response_type=code&code_challenge_method=S256' >>> client.prepare_request_uri('https://example.com', foo='bar') 'https://example.com?client_id=your_id&response_type=code&foo=bar' @@ -87,10 +98,11 @@ class WebApplicationClient(Client): """ scope = self.scope if scope is None else scope return prepare_grant_uri(uri, self.client_id, 'code', - redirect_uri=redirect_uri, scope=scope, state=state, **kwargs) + redirect_uri=redirect_uri, scope=scope, state=state, code_challenge=code_challenge, + code_challenge_method=code_challenge_method, **kwargs) def prepare_request_body(self, code=None, redirect_uri=None, body='', - include_client_id=True, **kwargs): + include_client_id=True, code_verifier=None, **kwargs): """Prepare the access token request body. The client makes a request to the token endpoint by adding the @@ -113,6 +125,9 @@ class WebApplicationClient(Client): authorization server as described in `Section 3.2.1`_. :type include_client_id: Boolean + :param code_verifier: OPTIONAL. A cryptographically random string that is used to correlate the + authorization request to the token request. + :param kwargs: Extra parameters to include in the token request. In addition OAuthLib will add the ``grant_type`` parameter set to @@ -127,6 +142,8 @@ class WebApplicationClient(Client): >>> client = WebApplicationClient('your_id') >>> client.prepare_request_body(code='sh35ksdf09sf') 'grant_type=authorization_code&code=sh35ksdf09sf' + >>> client.prepare_request_body(code_verifier='KB46DCKJ873NCGXK5GD682NHDKK34GR') + 'grant_type=authorization_code&code_verifier=KB46DCKJ873NCGXK5GD682NHDKK34GR' >>> client.prepare_request_body(code='sh35ksdf09sf', foo='bar') 'grant_type=authorization_code&code=sh35ksdf09sf&foo=bar' @@ -154,7 +171,7 @@ class WebApplicationClient(Client): kwargs['client_id'] = self.client_id kwargs['include_client_id'] = include_client_id return prepare_token_request(self.grant_type, code=code, body=body, - redirect_uri=redirect_uri, **kwargs) + redirect_uri=redirect_uri, code_verifier=code_verifier, **kwargs) def parse_request_uri_response(self, uri, state=None): """Parse the URI query for code and state. diff --git a/lib/oauthlib/oauth2/rfc6749/endpoints/metadata.py b/lib/oauthlib/oauth2/rfc6749/endpoints/metadata.py index 81ee1def..d43a8247 100644 --- a/lib/oauthlib/oauth2/rfc6749/endpoints/metadata.py +++ b/lib/oauthlib/oauth2/rfc6749/endpoints/metadata.py @@ -54,7 +54,8 @@ class MetadataEndpoint(BaseEndpoint): """Create metadata response """ headers = { - 'Content-Type': 'application/json' + 'Content-Type': 'application/json', + 'Access-Control-Allow-Origin': '*', } return headers, json.dumps(self.claims), 200 diff --git a/lib/oauthlib/oauth2/rfc6749/errors.py b/lib/oauthlib/oauth2/rfc6749/errors.py index b01e247b..da24feab 100644 --- a/lib/oauthlib/oauth2/rfc6749/errors.py +++ b/lib/oauthlib/oauth2/rfc6749/errors.py @@ -103,15 +103,12 @@ class OAuth2Error(Exception): value "Bearer". This scheme MUST be followed by one or more auth-param values. """ - authvalues = [ - "Bearer", - 'error="{}"'.format(self.error) - ] + authvalues = ['error="{}"'.format(self.error)] if self.description: authvalues.append('error_description="{}"'.format(self.description)) if self.uri: authvalues.append('error_uri="{}"'.format(self.uri)) - return {"WWW-Authenticate": ", ".join(authvalues)} + return {"WWW-Authenticate": "Bearer " + ", ".join(authvalues)} return {} diff --git a/lib/oauthlib/oauth2/rfc6749/grant_types/authorization_code.py b/lib/oauthlib/oauth2/rfc6749/grant_types/authorization_code.py index bf42d889..b799823e 100644 --- a/lib/oauthlib/oauth2/rfc6749/grant_types/authorization_code.py +++ b/lib/oauthlib/oauth2/rfc6749/grant_types/authorization_code.py @@ -10,6 +10,7 @@ import logging from oauthlib import common from .. import errors +from ..utils import is_secure_transport from .base import GrantTypeBase log = logging.getLogger(__name__) @@ -272,6 +273,8 @@ class AuthorizationCodeGrant(GrantTypeBase): grant = self.create_authorization_code(request) for modifier in self._code_modifiers: grant = modifier(grant, token_handler, request) + if 'access_token' in grant: + self.request_validator.save_token(grant, request) log.debug('Saving grant %r for %r.', grant, request) self.request_validator.save_authorization_code( request.client_id, grant, request) @@ -310,6 +313,7 @@ class AuthorizationCodeGrant(GrantTypeBase): self.request_validator.save_token(token, request) self.request_validator.invalidate_authorization_code( request.client_id, request.code, request) + headers.update(self._create_cors_headers(request)) return headers, json.dumps(token), 200 def validate_authorization_request(self, request): @@ -543,3 +547,20 @@ class AuthorizationCodeGrant(GrantTypeBase): if challenge_method in self._code_challenge_methods: return self._code_challenge_methods[challenge_method](verifier, challenge) raise NotImplementedError('Unknown challenge_method %s' % challenge_method) + + def _create_cors_headers(self, request): + """If CORS is allowed, create the appropriate headers.""" + if 'origin' not in request.headers: + return {} + + origin = request.headers['origin'] + if not is_secure_transport(origin): + log.debug('Origin "%s" is not HTTPS, CORS not allowed.', origin) + return {} + elif not self.request_validator.is_origin_allowed( + request.client_id, origin, request): + log.debug('Invalid origin "%s", CORS not allowed.', origin) + return {} + else: + log.debug('Valid origin "%s", injecting CORS headers.', origin) + return {'Access-Control-Allow-Origin': origin} diff --git a/lib/oauthlib/oauth2/rfc6749/grant_types/refresh_token.py b/lib/oauthlib/oauth2/rfc6749/grant_types/refresh_token.py index 8698a3d5..f801de4a 100644 --- a/lib/oauthlib/oauth2/rfc6749/grant_types/refresh_token.py +++ b/lib/oauthlib/oauth2/rfc6749/grant_types/refresh_token.py @@ -63,7 +63,7 @@ class RefreshTokenGrant(GrantTypeBase): refresh_token=self.issue_new_refresh_tokens) for modifier in self._token_modifiers: - token = modifier(token) + token = modifier(token, token_handler, request) self.request_validator.save_token(token, request) diff --git a/lib/oauthlib/oauth2/rfc6749/parameters.py b/lib/oauthlib/oauth2/rfc6749/parameters.py index f07b8bd2..44738bb4 100644 --- a/lib/oauthlib/oauth2/rfc6749/parameters.py +++ b/lib/oauthlib/oauth2/rfc6749/parameters.py @@ -23,7 +23,7 @@ from .utils import is_secure_transport, list_to_scope, scope_to_list def prepare_grant_uri(uri, client_id, response_type, redirect_uri=None, - scope=None, state=None, **kwargs): + scope=None, state=None, code_challenge=None, code_challenge_method='plain', **kwargs): """Prepare the authorization grant request URI. The client constructs the request URI by adding the following @@ -45,6 +45,11 @@ def prepare_grant_uri(uri, client_id, response_type, redirect_uri=None, back to the client. The parameter SHOULD be used for preventing cross-site request forgery as described in `Section 10.12`_. + :param code_challenge: PKCE paramater. A challenge derived from the + code_verifier that is sent in the authorization + request, to be verified against later. + :param code_challenge_method: PKCE parameter. A method that was used to derive the + code_challenge. Defaults to "plain" if not present in the request. :param kwargs: Extra arguments to embed in the grant/authorization URL. An example of an authorization code grant authorization URL: @@ -52,6 +57,7 @@ def prepare_grant_uri(uri, client_id, response_type, redirect_uri=None, .. code-block:: http GET /authorize?response_type=code&client_id=s6BhdRkqt3&state=xyz + &code_challenge=kjasBS523KdkAILD2k78NdcJSk2k3KHG6&code_challenge_method=S256 &redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb HTTP/1.1 Host: server.example.com @@ -73,6 +79,9 @@ def prepare_grant_uri(uri, client_id, response_type, redirect_uri=None, params.append(('scope', list_to_scope(scope))) if state: params.append(('state', state)) + if code_challenge is not None: + params.append(('code_challenge', code_challenge)) + params.append(('code_challenge_method', code_challenge_method)) for k in kwargs: if kwargs[k]: @@ -81,7 +90,7 @@ def prepare_grant_uri(uri, client_id, response_type, redirect_uri=None, return add_params_to_uri(uri, params) -def prepare_token_request(grant_type, body='', include_client_id=True, **kwargs): +def prepare_token_request(grant_type, body='', include_client_id=True, code_verifier=None, **kwargs): """Prepare the access token request. The client makes a request to the token endpoint by adding the @@ -116,6 +125,9 @@ def prepare_token_request(grant_type, body='', include_client_id=True, **kwargs) authorization request as described in `Section 4.1.1`_, and their values MUST be identical. * + :param code_verifier: PKCE parameter. A cryptographically random string that is used to correlate the + authorization request to the token request. + :param kwargs: Extra arguments to embed in the request body. Parameters marked with a `*` above are not explicit arguments in the @@ -142,6 +154,10 @@ def prepare_token_request(grant_type, body='', include_client_id=True, **kwargs) if client_id is not None: params.append(('client_id', client_id)) + # use code_verifier if code_challenge was passed in the authorization request + if code_verifier is not None: + params.append(('code_verifier', code_verifier)) + # the kwargs iteration below only supports including boolean truth (truthy) # values, but some servers may require an empty string for `client_secret` client_secret = kwargs.pop('client_secret', None) diff --git a/lib/oauthlib/oauth2/rfc6749/request_validator.py b/lib/oauthlib/oauth2/rfc6749/request_validator.py index 817d594b..610a708d 100644 --- a/lib/oauthlib/oauth2/rfc6749/request_validator.py +++ b/lib/oauthlib/oauth2/rfc6749/request_validator.py @@ -649,3 +649,28 @@ class RequestValidator: """ raise NotImplementedError('Subclasses must implement this method.') + + def is_origin_allowed(self, client_id, origin, request, *args, **kwargs): + """Indicate if the given origin is allowed to access the token endpoint + via Cross-Origin Resource Sharing (CORS). CORS is used by browser-based + clients, such as Single-Page Applications, to perform the Authorization + Code Grant. + + (Note: If performing Authorization Code Grant via a public client such + as a browser, you should use PKCE as well.) + + If this method returns true, the appropriate CORS headers will be added + to the response. By default this method always returns False, meaning + CORS is disabled. + + :param client_id: Unicode client identifier. + :param redirect_uri: Unicode origin. + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: bool + + Method is used by: + - Authorization Code Grant + + """ + return False diff --git a/lib/oauthlib/oauth2/rfc8628/__init__.py b/lib/oauthlib/oauth2/rfc8628/__init__.py new file mode 100644 index 00000000..531929dc --- /dev/null +++ b/lib/oauthlib/oauth2/rfc8628/__init__.py @@ -0,0 +1,10 @@ +""" +oauthlib.oauth2.rfc8628 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 Device Authorization RFC8628. +""" +import logging + +log = logging.getLogger(__name__) diff --git a/lib/oauthlib/oauth2/rfc8628/clients/__init__.py b/lib/oauthlib/oauth2/rfc8628/clients/__init__.py new file mode 100644 index 00000000..130b52e3 --- /dev/null +++ b/lib/oauthlib/oauth2/rfc8628/clients/__init__.py @@ -0,0 +1,8 @@ +""" +oauthlib.oauth2.rfc8628 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming OAuth 2.0 Device Authorization RFC8628. +""" +from .device import DeviceClient diff --git a/lib/oauthlib/oauth2/rfc8628/clients/device.py b/lib/oauthlib/oauth2/rfc8628/clients/device.py new file mode 100644 index 00000000..95c4f5a2 --- /dev/null +++ b/lib/oauthlib/oauth2/rfc8628/clients/device.py @@ -0,0 +1,94 @@ +""" +oauthlib.oauth2.rfc8628 +~~~~~~~~~~~~~~~~~~~~~~~ + +This module is an implementation of various logic needed +for consuming and providing OAuth 2.0 Device Authorization RFC8628. +""" + +from oauthlib.oauth2 import BackendApplicationClient, Client +from oauthlib.oauth2.rfc6749.errors import InsecureTransportError +from oauthlib.oauth2.rfc6749.parameters import prepare_token_request +from oauthlib.oauth2.rfc6749.utils import is_secure_transport, list_to_scope +from oauthlib.common import add_params_to_uri + + +class DeviceClient(Client): + + """A public client utilizing the device authorization workflow. + + The client can request an access token using a device code and + a public client id associated with the device code as defined + in RFC8628. + + The device authorization grant type can be used to obtain both + access tokens and refresh tokens and is intended to be used in + a scenario where the device being authorized does not have a + user interface that is suitable for performing authentication. + """ + + grant_type = 'urn:ietf:params:oauth:grant-type:device_code' + + def __init__(self, client_id, **kwargs): + super().__init__(client_id, **kwargs) + self.client_secret = kwargs.get('client_secret') + + def prepare_request_uri(self, uri, scope=None, **kwargs): + if not is_secure_transport(uri): + raise InsecureTransportError() + + scope = self.scope if scope is None else scope + params = [(('client_id', self.client_id)), (('grant_type', self.grant_type))] + + if self.client_secret is not None: + params.append(('client_secret', self.client_secret)) + + if scope: + params.append(('scope', list_to_scope(scope))) + + for k in kwargs: + if kwargs[k]: + params.append((str(k), kwargs[k])) + + return add_params_to_uri(uri, params) + + def prepare_request_body(self, device_code, body='', scope=None, + include_client_id=False, **kwargs): + """Add device_code to request body + + The client makes a request to the token endpoint by adding the + device_code as a parameter using the + "application/x-www-form-urlencoded" format to the HTTP request + body. + + :param body: Existing request body (URL encoded string) to embed parameters + into. This may contain extra paramters. Default ''. + :param scope: The scope of the access request as described by + `Section 3.3`_. + + :param include_client_id: `True` to send the `client_id` in the + body of the upstream request. This is required + if the client is not authenticating with the + authorization server as described in + `Section 3.2.1`_. False otherwise (default). + :type include_client_id: Boolean + + :param kwargs: Extra credentials to include in the token request. + + The prepared body will include all provided device_code as well as + the ``grant_type`` parameter set to + ``urn:ietf:params:oauth:grant-type:device_code``:: + + >>> from oauthlib.oauth2 import DeviceClient + >>> client = DeviceClient('your_id', 'your_code') + >>> client.prepare_request_body(scope=['hello', 'world']) + 'grant_type=urn:ietf:params:oauth:grant-type:device_code&scope=hello+world' + + .. _`Section 3.4`: https://datatracker.ietf.org/doc/html/rfc8628#section-3.4 + """ + + kwargs['client_id'] = self.client_id + kwargs['include_client_id'] = include_client_id + scope = self.scope if scope is None else scope + return prepare_token_request(self.grant_type, body=body, device_code=device_code, + scope=scope, **kwargs) diff --git a/lib/oauthlib/openid/connect/core/grant_types/__init__.py b/lib/oauthlib/openid/connect/core/grant_types/__init__.py index 887a5850..8dad5f60 100644 --- a/lib/oauthlib/openid/connect/core/grant_types/__init__.py +++ b/lib/oauthlib/openid/connect/core/grant_types/__init__.py @@ -10,3 +10,4 @@ from .dispatchers import ( ) from .hybrid import HybridGrant from .implicit import ImplicitGrant +from .refresh_token import RefreshTokenGrant diff --git a/lib/oauthlib/openid/connect/core/grant_types/refresh_token.py b/lib/oauthlib/openid/connect/core/grant_types/refresh_token.py new file mode 100644 index 00000000..43e4499c --- /dev/null +++ b/lib/oauthlib/openid/connect/core/grant_types/refresh_token.py @@ -0,0 +1,34 @@ +""" +oauthlib.openid.connect.core.grant_types +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +""" +import logging + +from oauthlib.oauth2.rfc6749.grant_types.refresh_token import ( + RefreshTokenGrant as OAuth2RefreshTokenGrant, +) + +from .base import GrantTypeBase + +log = logging.getLogger(__name__) + + +class RefreshTokenGrant(GrantTypeBase): + + def __init__(self, request_validator=None, **kwargs): + self.proxy_target = OAuth2RefreshTokenGrant( + request_validator=request_validator, **kwargs) + self.register_token_modifier(self.add_id_token) + + def add_id_token(self, token, token_handler, request): + """ + Construct an initial version of id_token, and let the + request_validator sign or encrypt it. + + The authorization_code version of this method is used to + retrieve the nonce accordingly to the code storage. + """ + if not self.request_validator.refresh_id_token(request): + return token + + return super().add_id_token(token, token_handler, request) diff --git a/lib/oauthlib/openid/connect/core/request_validator.py b/lib/oauthlib/openid/connect/core/request_validator.py index e8f334b0..47c4cd94 100644 --- a/lib/oauthlib/openid/connect/core/request_validator.py +++ b/lib/oauthlib/openid/connect/core/request_validator.py @@ -306,3 +306,15 @@ class RequestValidator(OAuth2RequestValidator): Method is used by: UserInfoEndpoint """ + + def refresh_id_token(self, request): + """Whether the id token should be refreshed. Default, True + + :param request: OAuthlib request. + :type request: oauthlib.common.Request + :rtype: True or False + + Method is used by: + RefreshTokenGrant + """ + return True diff --git a/lib/requests_oauthlib/__init__.py b/lib/requests_oauthlib/__init__.py index a4e03a4e..0d3e49f9 100644 --- a/lib/requests_oauthlib/__init__.py +++ b/lib/requests_oauthlib/__init__.py @@ -5,7 +5,7 @@ from .oauth1_session import OAuth1Session from .oauth2_auth import OAuth2 from .oauth2_session import OAuth2Session, TokenUpdated -__version__ = "1.3.0" +__version__ = "1.3.1" import requests diff --git a/lib/requests_oauthlib/compliance_fixes/__init__.py b/lib/requests_oauthlib/compliance_fixes/__init__.py index 02fa5120..0e8e3ac8 100644 --- a/lib/requests_oauthlib/compliance_fixes/__init__.py +++ b/lib/requests_oauthlib/compliance_fixes/__init__.py @@ -2,9 +2,9 @@ from __future__ import absolute_import from .facebook import facebook_compliance_fix from .fitbit import fitbit_compliance_fix -from .linkedin import linkedin_compliance_fix from .slack import slack_compliance_fix from .instagram import instagram_compliance_fix from .mailchimp import mailchimp_compliance_fix from .weibo import weibo_compliance_fix from .plentymarkets import plentymarkets_compliance_fix +from .ebay import ebay_compliance_fix diff --git a/lib/requests_oauthlib/compliance_fixes/ebay.py b/lib/requests_oauthlib/compliance_fixes/ebay.py new file mode 100644 index 00000000..4aa423b3 --- /dev/null +++ b/lib/requests_oauthlib/compliance_fixes/ebay.py @@ -0,0 +1,23 @@ +import json +from oauthlib.common import to_unicode + + +def ebay_compliance_fix(session): + def _compliance_fix(response): + token = json.loads(response.text) + + # eBay responds with non-compliant token types. + # https://developer.ebay.com/api-docs/static/oauth-client-credentials-grant.html + # https://developer.ebay.com/api-docs/static/oauth-auth-code-grant-request.html + # Modify these to be "Bearer". + if token.get("token_type") in ["Application Access Token", "User Access Token"]: + token["token_type"] = "Bearer" + fixed_token = json.dumps(token) + response._content = to_unicode(fixed_token).encode("utf-8") + + return response + + session.register_compliance_hook("access_token_response", _compliance_fix) + session.register_compliance_hook("refresh_token_response", _compliance_fix) + + return session diff --git a/lib/requests_oauthlib/compliance_fixes/linkedin.py b/lib/requests_oauthlib/compliance_fixes/linkedin.py deleted file mode 100644 index cd5b4ace..00000000 --- a/lib/requests_oauthlib/compliance_fixes/linkedin.py +++ /dev/null @@ -1,21 +0,0 @@ -from json import loads, dumps - -from oauthlib.common import add_params_to_uri, to_unicode - - -def linkedin_compliance_fix(session): - def _missing_token_type(r): - token = loads(r.text) - token["token_type"] = "Bearer" - r._content = to_unicode(dumps(token)).encode("UTF-8") - return r - - def _non_compliant_param_name(url, headers, data): - token = [("oauth2_access_token", session.access_token)] - url = add_params_to_uri(url, token) - return url, headers, data - - session._client.default_token_placement = "query" - session.register_compliance_hook("access_token_response", _missing_token_type) - session.register_compliance_hook("protected_request", _non_compliant_param_name) - return session diff --git a/lib/requests_oauthlib/oauth1_session.py b/lib/requests_oauthlib/oauth1_session.py index aa17f28f..88f2853c 100644 --- a/lib/requests_oauthlib/oauth1_session.py +++ b/lib/requests_oauthlib/oauth1_session.py @@ -268,7 +268,7 @@ class OAuth1Session(requests.Session): :param url: The request token endpoint URL. :param realm: A list of realms to request access to. :param \*\*request_kwargs: Optional arguments passed to ''post'' - function in ''requests.Session'' + function in ''requests.Session'' :returns: The response in dict format. Note that a previously set callback_uri will be reset for your diff --git a/lib/requests_oauthlib/oauth2_session.py b/lib/requests_oauthlib/oauth2_session.py index eea4ac6f..db446808 100644 --- a/lib/requests_oauthlib/oauth2_session.py +++ b/lib/requests_oauthlib/oauth2_session.py @@ -189,6 +189,7 @@ class OAuth2Session(requests.Session): proxies=None, include_client_id=None, client_secret=None, + cert=None, **kwargs ): """Generic method for fetching an access token from the token endpoint. @@ -229,6 +230,10 @@ class OAuth2Session(requests.Session): `auth` tuple. If the value is `None`, it will be omitted from the request, however if the value is an empty string, an empty string will be sent. + :param cert: Client certificate to send for OAuth 2.0 Mutual-TLS Client + Authentication (draft-ietf-oauth-mtls). Can either be the + path of a file containing the private key and certificate or + a tuple of two filenames for certificate and key. :param kwargs: Extra parameters to include in the token request. :return: A token dict """ @@ -341,6 +346,7 @@ class OAuth2Session(requests.Session): auth=auth, verify=verify, proxies=proxies, + cert=cert, **request_kwargs ) diff --git a/requirements.txt b/requirements.txt index 6388dc9f..25a4ecf8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -24,7 +24,6 @@ IPy==1.01 Mako==1.1.6 MarkupSafe==2.0.1 musicbrainzngs==0.7.1 -oauthlib==3.1.1 packaging==21.3 paho-mqtt==1.6.1 plexapi==4.9.1 @@ -36,7 +35,7 @@ python-dateutil==2.8.2 python-twitter==3.5 pytz==2021.3 requests==2.27.1 -requests-oauthlib==1.3.0 +requests-oauthlib==1.3.1 rumps==0.3.0; platform_system == "Darwin" simplejson==3.17.6 six==1.16.0 From 9c3d7005845f4de7a2df373e6f4ae552a749e899 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Feb 2022 21:58:10 -0800 Subject: [PATCH 014/743] Bump tempora from 5.0.0 to 5.0.1 (#1642) Bumps [tempora](https://github.com/jaraco/tempora) from 5.0.0 to 5.0.1. - [Release notes](https://github.com/jaraco/tempora/releases) - [Changelog](https://github.com/jaraco/tempora/blob/main/CHANGES.rst) - [Commits](https://github.com/jaraco/tempora/compare/v5.0.0...v5.0.1) --- updated-dependencies: - dependency-name: tempora dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> [skip ci] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 25a4ecf8..5ec7d931 100644 --- a/requirements.txt +++ b/requirements.txt @@ -40,7 +40,7 @@ rumps==0.3.0; platform_system == "Darwin" simplejson==3.17.6 six==1.16.0 soupsieve==2.3.1 -tempora==5.0.0 +tempora==5.0.1 tokenize-rt==4.2.1 tzdata==2021.5 tzlocal==2.1 # apscheduler==3.8.0 requires tzlocal~=2.0 From 44e65952f9fa969ceccf7102a40019ae4f933b75 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Feb 2022 21:58:28 -0800 Subject: [PATCH 015/743] Bump cloudinary from 1.28.1 to 1.29.0 (#1643) * Bump cloudinary from 1.28.1 to 1.29.0 Bumps [cloudinary](https://github.com/cloudinary/pycloudinary) from 1.28.1 to 1.29.0. - [Release notes](https://github.com/cloudinary/pycloudinary/releases) - [Changelog](https://github.com/cloudinary/pycloudinary/blob/master/CHANGELOG.md) - [Commits](https://github.com/cloudinary/pycloudinary/compare/1.28.1...1.29.0) --- updated-dependencies: - dependency-name: cloudinary dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Update cloudinary==1.29.0 Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci] --- lib/cloudinary/__init__.py | 2 +- .../api_client/tcp_keep_alive_manager.py | 119 ++++++++++++++++++ lib/cloudinary/uploader.py | 32 ++++- lib/cloudinary/utils.py | 15 ++- requirements.txt | 2 +- 5 files changed, 163 insertions(+), 7 deletions(-) create mode 100644 lib/cloudinary/api_client/tcp_keep_alive_manager.py diff --git a/lib/cloudinary/__init__.py b/lib/cloudinary/__init__.py index 56e7b5b7..5e315049 100644 --- a/lib/cloudinary/__init__.py +++ b/lib/cloudinary/__init__.py @@ -38,7 +38,7 @@ CL_BLANK = "data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAA URI_SCHEME = "cloudinary" API_VERSION = "v1_1" -VERSION = "1.28.1" +VERSION = "1.29.0" USER_AGENT = "CloudinaryPython/{} (Python {})".format(VERSION, python_version()) """ :const: USER_AGENT """ diff --git a/lib/cloudinary/api_client/tcp_keep_alive_manager.py b/lib/cloudinary/api_client/tcp_keep_alive_manager.py new file mode 100644 index 00000000..b2c7b75f --- /dev/null +++ b/lib/cloudinary/api_client/tcp_keep_alive_manager.py @@ -0,0 +1,119 @@ +import socket +import sys + +from urllib3 import HTTPSConnectionPool, HTTPConnectionPool, PoolManager, ProxyManager + +# Inspired by: +# https://github.com/finbourne/lusid-sdk-python/blob/b813882e4f1777ea78670a03a7596486639e6f40/sdk/lusid/tcp/tcp_keep_alive_probes.py + +# The content to send on Mac OS in the TCP Keep Alive probe +TCP_KEEPALIVE = 0x10 +# The maximum time to keep the connection idle before sending probes +TCP_KEEP_IDLE = 60 +# The interval between probes +TCP_KEEPALIVE_INTERVAL = 60 +# The maximum number of failed probes before terminating the connection +TCP_KEEP_CNT = 3 + + +class TCPKeepAliveValidationMethods: + """ + This class contains a single method whose sole purpose is to set up TCP Keep Alive probes on the socket for a + connection. This is necessary for long-running requests which will be silently terminated by the AWS Network Load + Balancer which kills a connection if it is idle for more than 350 seconds. + """ + + @staticmethod + def adjust_connection_socket(conn, protocol="https"): + """ + Adjusts the socket settings so that the client sends a TCP keep alive probe over the connection. This is only + applied where possible, if the ability to set the socket options is not available, for example using Anaconda, + then the settings will be left as is. + :param conn: The connection to update the socket settings for + :param str protocol: The protocol of the connection + :return: None + """ + + if protocol == "http": + # It isn't clear how to set this up over HTTP, it seems to differ from HTTPs + return + + # TCP Keep Alive Probes for different platforms + platform = sys.platform + # TCP Keep Alive Probes for Linux + if (platform == 'linux' and hasattr(conn.sock, "setsockopt") and hasattr(socket, "SO_KEEPALIVE") and + hasattr(socket, "TCP_KEEPIDLE") and hasattr(socket, "TCP_KEEPINTVL") and hasattr(socket, + "TCP_KEEPCNT")): + conn.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) + conn.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, TCP_KEEP_IDLE) + conn.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, TCP_KEEPALIVE_INTERVAL) + conn.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, TCP_KEEP_CNT) + + # TCP Keep Alive Probes for Windows OS + elif platform == 'win32' and hasattr(socket, "SIO_KEEPALIVE_VALS") and getattr(conn.sock, "ioctl", + None) is not None: + conn.sock.ioctl(socket.SIO_KEEPALIVE_VALS, (1, TCP_KEEP_IDLE * 1000, TCP_KEEPALIVE_INTERVAL * 1000)) + + # TCP Keep Alive Probes for Mac OS + elif platform == 'darwin' and getattr(conn.sock, "setsockopt", None) is not None: + conn.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) + conn.sock.setsockopt(socket.IPPROTO_TCP, TCP_KEEPALIVE, TCP_KEEPALIVE_INTERVAL) + + +class TCPKeepAliveHTTPSConnectionPool(HTTPSConnectionPool): + """ + This class overrides the _validate_conn method in the HTTPSConnectionPool class. This is the entry point to use + for modifying the socket as it is called after the socket is created and before the request is made. + """ + + def _validate_conn(self, conn): + """ + Called right before a request is made, after the socket is created. + """ + # Call the method on the base class + super(TCPKeepAliveHTTPSConnectionPool, self)._validate_conn(conn) + + # Set up TCP Keep Alive probes, this is the only line added to this function + TCPKeepAliveValidationMethods.adjust_connection_socket(conn, "https") + + +class TCPKeepAliveHTTPConnectionPool(HTTPConnectionPool): + """ + This class overrides the _validate_conn method in the HTTPSConnectionPool class. This is the entry point to use + for modifying the socket as it is called after the socket is created and before the request is made. + In the base class this method is passed completely. + """ + + def _validate_conn(self, conn): + """ + Called right before a request is made, after the socket is created. + """ + # Call the method on the base class + super(TCPKeepAliveHTTPConnectionPool, self)._validate_conn(conn) + + # Set up TCP Keep Alive probes, this is the only line added to this function + TCPKeepAliveValidationMethods.adjust_connection_socket(conn, "http") + + +class TCPKeepAlivePoolManager(PoolManager): + """ + This Pool Manager has only had the pool_classes_by_scheme variable changed. This now points at the TCPKeepAlive + connection pools rather than the default connection pools. + """ + + def __init__(self, num_pools=10, headers=None, **connection_pool_kw): + super(TCPKeepAlivePoolManager, self).__init__(num_pools=num_pools, headers=headers, **connection_pool_kw) + self.pool_classes_by_scheme = {"http": TCPKeepAliveHTTPConnectionPool, "https": TCPKeepAliveHTTPSConnectionPool} + + +class TCPKeepAliveProxyManager(ProxyManager): + """ + This Proxy Manager has only had the pool_classes_by_scheme variable changed. This now points at the TCPKeepAlive + connection pools rather than the default connection pools. + """ + + def __init__(self, proxy_url, num_pools=10, headers=None, proxy_headers=None, **connection_pool_kw): + super(TCPKeepAliveProxyManager, self).__init__(proxy_url=proxy_url, num_pools=num_pools, headers=headers, + proxy_headers=proxy_headers, + **connection_pool_kw) + self.pool_classes_by_scheme = {"http": TCPKeepAliveHTTPConnectionPool, "https": TCPKeepAliveHTTPSConnectionPool} diff --git a/lib/cloudinary/uploader.py b/lib/cloudinary/uploader.py index 3b1c63b3..3b118142 100644 --- a/lib/cloudinary/uploader.py +++ b/lib/cloudinary/uploader.py @@ -277,18 +277,48 @@ def explode(public_id, **options): return call_api("explode", params, **options) -# options may include 'exclusive' (boolean) which causes clearing this tag from all other resources def add_tag(tag, public_ids=None, **options): + """ + Adds a single tag or a list of tags or a comma-separated tags to the assets. + + :param tag: The tag or tags to assign. Can specify multiple tags in a single string, + separated by commas - "t1,t2,t3" or list of tags - ["t1","t2","t3"]. + :param public_ids: A list of public IDs (up to 1000). + :param options: Configuration options may include 'exclusive' (boolean) which causes + clearing this tag from all other assets. + + :return: Dictionary with a list of public IDs that were updated. + """ exclusive = options.pop("exclusive", None) command = "set_exclusive" if exclusive else "add" return call_tags_api(tag, command, public_ids, **options) def remove_tag(tag, public_ids=None, **options): + """ + Removes a single tag or a list of tags or a comma-separated tags from the assets. + + :param tag: The tag or tags to assign. Can specify multiple tags in a single string, + separated by commas - "t1,t2,t3" or list of tags - ["t1","t2","t3"]. + :param public_ids: A list of public IDs (up to 1000). + :param options: Additional options. + + :return: Dictionary with a list of public IDs that were updated. + """ return call_tags_api(tag, "remove", public_ids, **options) def replace_tag(tag, public_ids=None, **options): + """ + Replaces all existing tags with a single tag or a list of tags or a comma-separated tags of the assets. + + :param tag: The tag or tags to assign. Can specify multiple tags in a single string, + separated by commas - "t1,t2,t3" or list of tags - ["t1","t2","t3"]. + :param public_ids: A list of public IDs (up to 1000). + :param options: Additional options. + + :return: Dictionary with a list of public IDs that were updated. + """ return call_tags_api(tag, "replace", public_ids, **options) diff --git a/lib/cloudinary/utils.py b/lib/cloudinary/utils.py index f936b845..1fade15b 100644 --- a/lib/cloudinary/utils.py +++ b/lib/cloudinary/utils.py @@ -17,11 +17,12 @@ from fractions import Fraction from numbers import Number import six.moves.urllib.parse -from six import iteritems, string_types +from six import iteritems from urllib3 import ProxyManager, PoolManager import cloudinary from cloudinary import auth_token +from cloudinary.api_client.tcp_keep_alive_manager import TCPKeepAlivePoolManager, TCPKeepAliveProxyManager from cloudinary.compat import PY3, to_bytes, to_bytearray, to_string, string_types, urlparse VAR_NAME_RE = r'(\$\([a-zA-Z]\w+\))' @@ -1509,7 +1510,7 @@ def verify_notification_signature(body, timestamp, signature, valid_for=7200, al def get_http_connector(conf, options): """ - Used to create http connector, depends on api_proxy configuration parameter + Used to create http connector, depends on api_proxy and disable_tcp_keep_alive configuration parameters. :param conf: configuration object :param options: additional options @@ -1517,10 +1518,16 @@ def get_http_connector(conf, options): :return: ProxyManager if api_proxy is set, otherwise PoolManager object """ if conf.api_proxy: - return ProxyManager(conf.api_proxy, **options) - else: + if conf.disable_tcp_keep_alive: + return ProxyManager(conf.api_proxy, **options) + + return TCPKeepAliveProxyManager(conf.api_proxy, **options) + + if conf.disable_tcp_keep_alive: return PoolManager(**options) + return TCPKeepAlivePoolManager(**options) + def encode_list(obj): if isinstance(obj, list): diff --git a/requirements.txt b/requirements.txt index 5ec7d931..ed3dc9c5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,7 +9,7 @@ bleach==4.1.0 certifi==2021.10.8 cheroot==8.6.0 cherrypy==18.6.1 -cloudinary==1.28.1 +cloudinary==1.29.0 distro==1.6.0 dnspython==2.2.0 facebook-sdk==3.1.0 From 9a9c98d0a0e95d80fe27ddf02ecdc19cfdc96f95 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Feb 2022 21:58:50 -0800 Subject: [PATCH 016/743] Bump plexapi from 4.9.1 to 4.9.2 (#1647) * Bump plexapi from 4.9.1 to 4.9.2 Bumps [plexapi](https://github.com/pkkid/python-plexapi) from 4.9.1 to 4.9.2. - [Release notes](https://github.com/pkkid/python-plexapi/releases) - [Commits](https://github.com/pkkid/python-plexapi/compare/4.9.1...4.9.2) --- updated-dependencies: - dependency-name: plexapi dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Update plexapi==4.9.2 Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> --- lib/plexapi/audio.py | 2 +- lib/plexapi/const.py | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/plexapi/audio.py b/lib/plexapi/audio.py index 60b119d9..bef951d8 100644 --- a/lib/plexapi/audio.py +++ b/lib/plexapi/audio.py @@ -177,7 +177,7 @@ class Artist(Audio, AdvancedSettingsMixin, ArtMixin, PosterMixin, RatingMixin, S def albums(self, **kwargs): """ Returns a list of :class:`~plexapi.audio.Album` objects by the artist. """ - key = '/library/metadata/%s/children' % self.ratingKey + key = f"/library/sections/{self.librarySectionID}/all?artist.id={self.ratingKey}&type=9" return self.fetchItems(key, Album, **kwargs) def track(self, title=None, album=None, track=None): diff --git a/lib/plexapi/const.py b/lib/plexapi/const.py index dc8b5693..72ad282f 100644 --- a/lib/plexapi/const.py +++ b/lib/plexapi/const.py @@ -4,6 +4,6 @@ # Library version MAJOR_VERSION = 4 MINOR_VERSION = 9 -PATCH_VERSION = 1 +PATCH_VERSION = 2 __short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__ = f"{__short_version__}.{PATCH_VERSION}" diff --git a/requirements.txt b/requirements.txt index ed3dc9c5..9c3cc11a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -26,7 +26,7 @@ MarkupSafe==2.0.1 musicbrainzngs==0.7.1 packaging==21.3 paho-mqtt==1.6.1 -plexapi==4.9.1 +plexapi==4.9.2 portend==3.1.0 profilehooks==1.12.0 PyJWT==2.3.0 From 8170244812fc89edfc8ae6c66d91f9824af7bedf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Feb 2022 22:21:23 -0800 Subject: [PATCH 017/743] Bump pyopenssl from 21.0.0 to 22.0.0 (#1638) Bumps [pyopenssl](https://github.com/pyca/pyopenssl) from 21.0.0 to 22.0.0. - [Release notes](https://github.com/pyca/pyopenssl/releases) - [Changelog](https://github.com/pyca/pyopenssl/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/pyopenssl/compare/21.0.0...22.0.0) --- updated-dependencies: - dependency-name: pyopenssl dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> [skip ci] --- package/requirements-package.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package/requirements-package.txt b/package/requirements-package.txt index 58a47c02..2c1cc15f 100644 --- a/package/requirements-package.txt +++ b/package/requirements-package.txt @@ -1,7 +1,7 @@ apscheduler==3.8.0 importlib-resources==5.4.0 pyinstaller==4.8 -pyopenssl==21.0.0 +pyopenssl==22.0.0 pycryptodomex==3.13.0 pyobjc-framework-Cocoa==8.2; platform_system == "Darwin" From 922ab07ff1185c1fefb8cd2635f3ca0ee741f1ee Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Feb 2022 22:22:00 -0800 Subject: [PATCH 018/743] Bump pycryptodomex from 3.13.0 to 3.14.1 (#1646) Bumps [pycryptodomex](https://github.com/Legrandin/pycryptodome) from 3.13.0 to 3.14.1. - [Release notes](https://github.com/Legrandin/pycryptodome/releases) - [Changelog](https://github.com/Legrandin/pycryptodome/blob/master/Changelog.rst) - [Commits](https://github.com/Legrandin/pycryptodome/compare/v3.13.0...v3.14.1) --- updated-dependencies: - dependency-name: pycryptodomex dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> [skip ci] --- package/requirements-package.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package/requirements-package.txt b/package/requirements-package.txt index 2c1cc15f..8ff19236 100644 --- a/package/requirements-package.txt +++ b/package/requirements-package.txt @@ -2,7 +2,7 @@ apscheduler==3.8.0 importlib-resources==5.4.0 pyinstaller==4.8 pyopenssl==22.0.0 -pycryptodomex==3.13.0 +pycryptodomex==3.14.1 pyobjc-framework-Cocoa==8.2; platform_system == "Darwin" pyobjc-core==8.2; platform_system == "Darwin" From 97ef7c49816242207776cc9a521c1f2805e89bc2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Feb 2022 22:23:02 -0800 Subject: [PATCH 019/743] Bump pyinstaller from 4.8 to 4.9 (#1644) Bumps [pyinstaller](https://github.com/pyinstaller/pyinstaller) from 4.8 to 4.9. - [Release notes](https://github.com/pyinstaller/pyinstaller/releases) - [Changelog](https://github.com/pyinstaller/pyinstaller/blob/v4.9/doc/CHANGES.rst) - [Commits](https://github.com/pyinstaller/pyinstaller/compare/v4.8...v4.9) --- updated-dependencies: - dependency-name: pyinstaller dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package/requirements-package.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package/requirements-package.txt b/package/requirements-package.txt index 8ff19236..0a291e78 100644 --- a/package/requirements-package.txt +++ b/package/requirements-package.txt @@ -1,6 +1,6 @@ apscheduler==3.8.0 importlib-resources==5.4.0 -pyinstaller==4.8 +pyinstaller==4.9 pyopenssl==22.0.0 pycryptodomex==3.14.1 From 31aa9965d130402bb4bb8aeb36cdbcde68f23963 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Mon, 7 Feb 2022 22:46:25 -0800 Subject: [PATCH 020/743] Fix screen rotation in PWA * Fixes #1633 --- data/interfaces/default/base.html | 2 +- data/interfaces/default/images/favicon/manifest.json | 3 +-- data/interfaces/default/login.html | 2 +- data/interfaces/default/welcome.html | 2 +- 4 files changed, 4 insertions(+), 5 deletions(-) diff --git a/data/interfaces/default/base.html b/data/interfaces/default/base.html index 2fcb36c3..5e8d4447 100644 --- a/data/interfaces/default/base.html +++ b/data/interfaces/default/base.html @@ -30,7 +30,7 @@ - + diff --git a/data/interfaces/default/images/favicon/manifest.json b/data/interfaces/default/images/favicon/manifest.json index cf995208..7ed483cc 100644 --- a/data/interfaces/default/images/favicon/manifest.json +++ b/data/interfaces/default/images/favicon/manifest.json @@ -18,6 +18,5 @@ ], "theme_color": "#282a2d", "background_color": "#282a2d", - "display": "standalone", - "orientation": "any" + "display": "standalone" } \ No newline at end of file diff --git a/data/interfaces/default/login.html b/data/interfaces/default/login.html index 71a355d4..63b48deb 100644 --- a/data/interfaces/default/login.html +++ b/data/interfaces/default/login.html @@ -24,7 +24,7 @@ - + diff --git a/data/interfaces/default/welcome.html b/data/interfaces/default/welcome.html index 7c406e79..8f220da7 100644 --- a/data/interfaces/default/welcome.html +++ b/data/interfaces/default/welcome.html @@ -27,7 +27,7 @@ - + From 17800df0aa823e9ade07b479df0369a7a2a5f674 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Mon, 7 Feb 2022 22:59:20 -0800 Subject: [PATCH 021/743] v2.9.0 --- CHANGELOG.md | 10 +++++++++- plexpy/version.py | 4 ++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 59ff7ab1..a62f6980 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,8 +2,15 @@ ## v2.9.0-beta (2022-01-25) +* Notification: + * New: Added track disc number notification parameter. + * Change: Default Telegram messages to MarkdownV2 when HTML is disabled. (#1635) +* Exporter: + * Fix: Images not being included in export zip file download. * UI: * Fix: Favicon missing from the newsletter authentication page. + * Fix: IPv6 details not being shown in IP address modal. (#1629) + * Fix: PWA not respecting device rotation settings. (#1633) * New: Added intermediary login page to the Plex XML shortcuts. * New: Added setting to mask usernames in logs (enabled by default). * New: Added location, secure connection, and Plex Relay details to IP address modal. @@ -12,9 +19,10 @@ * API: * New: Added get_tautulli_info API command. * New: Added location, secure, and relayed to get_history API response. - * Change: Null pms_token and jwt_token in the response of the get_settings API command. + * Change: Null pms_token and jwt_token in the response of the get_settings API command. (#1616) * Other: * Fix: Better validation of config when saving settings. + * Fix: Correct section_id and prevent rating_key collisions when updating metadata. (#1640) * Change: Proxy Plex token check and Plex downloads json through the Tautulli server. * Change: Remove tokens from downloaded database and config files. * Change: Do not import pms_token or jwt_secret when importing a config file. diff --git a/plexpy/version.py b/plexpy/version.py index cef5088a..f70608b8 100644 --- a/plexpy/version.py +++ b/plexpy/version.py @@ -17,5 +17,5 @@ from __future__ import unicode_literals -PLEXPY_BRANCH = "beta" -PLEXPY_RELEASE_VERSION = "v2.9.0-beta" +PLEXPY_BRANCH = "master" +PLEXPY_RELEASE_VERSION = "v2.9.0" From e970a260a1e93f04d7532f8e75f31af33c29dbbb Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Mon, 7 Feb 2022 23:18:18 -0800 Subject: [PATCH 022/743] v2.9.1 --- CHANGELOG.md | 8 +++++++- plexpy/version.py | 2 +- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a62f6980..4bd29a91 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,12 @@ # Changelog -## v2.9.0-beta (2022-01-25) +## v2.9.1 (2022-02-07) + +* Other: + * Fix: Incorrect changelog version number and date. + + +## v2.9.0 (2022-02-07) * Notification: * New: Added track disc number notification parameter. diff --git a/plexpy/version.py b/plexpy/version.py index f70608b8..5a6de435 100644 --- a/plexpy/version.py +++ b/plexpy/version.py @@ -18,4 +18,4 @@ from __future__ import unicode_literals PLEXPY_BRANCH = "master" -PLEXPY_RELEASE_VERSION = "v2.9.0" +PLEXPY_RELEASE_VERSION = "v2.9.1" From 088bbd3bb635541a36247a740ecc59e183995297 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Mon, 7 Feb 2022 23:36:03 -0800 Subject: [PATCH 023/743] Skip Local user when masking logs --- plexpy/logger.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/plexpy/logger.py b/plexpy/logger.py index 53f18662..dd2e581a 100644 --- a/plexpy/logger.py +++ b/plexpy/logger.py @@ -127,6 +127,9 @@ class UsernameFilter(logging.Filter): username = item['username'] friendly_name = item['friendly_name'] + if username == 'Local': + continue + try: record.msg = self.replace(record.msg, username) record.msg = self.replace(record.msg, friendly_name) From 32ef43fe6e5f91c5012df1cb180870723105e1ad Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sun, 30 Jan 2022 16:50:23 -0800 Subject: [PATCH 024/743] Revert "Default Telegram to MarkdownV2 when HTML disabled" This reverts commit b9f04ac71ae8a67e1fc37534cc8d1cf85d8af38b. Ref #1635 --- plexpy/notifiers.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py index 5eae6b55..1c8bf0fb 100644 --- a/plexpy/notifiers.py +++ b/plexpy/notifiers.py @@ -3869,7 +3869,7 @@ class TELEGRAM(Notifier): 'chat_id': '', 'disable_web_preview': 0, 'silent_notification': 0, - 'html_support': 0, + 'html_support': 1, 'incl_subject': 1, 'incl_poster': 0 } @@ -3884,8 +3884,6 @@ class TELEGRAM(Notifier): if self.config['html_support']: data['parse_mode'] = 'HTML' - else: - data['parse_mode'] = 'MarkdownV2' if self.config['incl_poster'] and kwargs.get('parameters'): # Grab formatted metadata @@ -3965,7 +3963,7 @@ class TELEGRAM(Notifier): {'label': 'Enable HTML Support', 'value': self.config['html_support'], 'name': 'telegram_html_support', - 'description': 'Enable to style your messages using HTML. Disable to use Markdown instead.', + 'description': 'Style your messages using these HTML tags: b, i, a[href], code, pre.', 'input_type': 'checkbox' }, {'label': 'Disable Web Page Previews', From 6d092b4984fda97b6e250cc5a9c4c88eb0827339 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Tue, 8 Feb 2022 10:29:06 -0800 Subject: [PATCH 025/743] Add additional Telegram HTML tags --- plexpy/notification_handler.py | 16 ++++++++++------ plexpy/notifiers.py | 4 +++- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/plexpy/notification_handler.py b/plexpy/notification_handler.py index 9a662e2c..b8a4b902 100644 --- a/plexpy/notification_handler.py +++ b/plexpy/notification_handler.py @@ -1375,12 +1375,16 @@ def strip_tag(data, agent_id=None): data = bleach.clean(data, tags=whitelist.keys(), attributes=whitelist, strip=True) elif agent_id == 13: - # Allow tags b, i, code, pre, a[href] for Telegram - whitelist = {'b': [], - 'i': [], - 'code': [], - 'pre': [], - 'a': ['href']} + # Allow tags for Telegram + # https://core.telegram.org/bots/api#html-style + whitelist = {'b': [], 'strong': [], + 'i': [], 'em': [], + 'u': [], 'ins': [], + 's': [], 'strike': [], 'del': [], + 'span': ['class'], 'tg-spoiler': [], + 'a': ['href'], + 'code': ['class'], + 'pre': []} data = bleach.clean(data, tags=whitelist.keys(), attributes=whitelist, strip=True) elif agent_id in (10, 14, 20, 25): diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py index 1c8bf0fb..42df413d 100644 --- a/plexpy/notifiers.py +++ b/plexpy/notifiers.py @@ -3963,7 +3963,9 @@ class TELEGRAM(Notifier): {'label': 'Enable HTML Support', 'value': self.config['html_support'], 'name': 'telegram_html_support', - 'description': 'Style your messages using these HTML tags: b, i, a[href], code, pre.', + 'description': 'Enable to style your messages using these HTML tags:
' + 'b, strong, i, em, u, ins, s, strike, del, span[class], ' + 'tg-spoiler, a[href], code[class], pre', 'input_type': 'checkbox' }, {'label': 'Disable Web Page Previews', From b73aa888c29d02b5fecded624a88752e3a3343b6 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Tue, 8 Feb 2022 10:35:23 -0800 Subject: [PATCH 026/743] v2.9.2 --- CHANGELOG.md | 9 +++++++++ plexpy/version.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4bd29a91..cc596ffe 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,14 @@ # Changelog +## v2.9.2 (2022-02-08) + +* Notification: + * New: Added support for additional Telegram HTML tags. + * Removed: Revert Telegram defaulting to MarkdownV2 and only support HTML. (#1635) +* Other: + * Fix: The Local user being masked in the logs. + + ## v2.9.1 (2022-02-07) * Other: diff --git a/plexpy/version.py b/plexpy/version.py index 5a6de435..06a18a93 100644 --- a/plexpy/version.py +++ b/plexpy/version.py @@ -18,4 +18,4 @@ from __future__ import unicode_literals PLEXPY_BRANCH = "master" -PLEXPY_RELEASE_VERSION = "v2.9.1" +PLEXPY_RELEASE_VERSION = "v2.9.2" From 51d701521ad48834d149efa6cefac28217a02a5c Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Wed, 9 Feb 2022 09:22:13 -0800 Subject: [PATCH 027/743] Fix username logger blacklist preventing Tautulli from starting --- plexpy/logger.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/plexpy/logger.py b/plexpy/logger.py index dd2e581a..cf7fb294 100644 --- a/plexpy/logger.py +++ b/plexpy/logger.py @@ -123,7 +123,12 @@ class UsernameFilter(logging.Filter): if not plexpy.CONFIG.LOG_BLACKLIST_USERNAMES: return True - for item in users.Users().get_users(): + if not plexpy._INITIALIZED: + return True + + items = users.Users().get_users() or [] + + for item in items: username = item['username'] friendly_name = item['friendly_name'] From 3eef2b7f21a83542f36fdbb9065c7ceafc72ee69 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Wed, 9 Feb 2022 09:22:50 -0800 Subject: [PATCH 028/743] Fix missing first run complete flag causing setup wizard loop --- data/interfaces/default/welcome.html | 1 + plexpy/config.py | 1 + 2 files changed, 2 insertions(+) diff --git a/data/interfaces/default/welcome.html b/data/interfaces/default/welcome.html index 8f220da7..a6a6d04f 100644 --- a/data/interfaces/default/welcome.html +++ b/data/interfaces/default/welcome.html @@ -239,6 +239,7 @@ + diff --git a/plexpy/config.py b/plexpy/config.py index c296d628..7c8f95c4 100644 --- a/plexpy/config.py +++ b/plexpy/config.py @@ -307,6 +307,7 @@ CHECKED_SETTINGS = [ 'CACHE_IMAGES', 'CHECK_GITHUB', 'ENABLE_HTTPS', + 'FIRST_RUN_COMPLETE', 'GET_FILE_SIZES', 'GROUP_HISTORY_TABLES', 'HISTORY_TABLE_ACTIVITY', From ee43f001e4810b8c6858d0fb5eca60d8b692f906 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Wed, 9 Feb 2022 09:30:19 -0800 Subject: [PATCH 029/743] v2.9.3 --- CHANGELOG.md | 8 ++++++++ plexpy/version.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cc596ffe..ad366b9d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## v2.9.3 (2022-02-09) + +* UI: + * Fix: Setup wizard looping. +* Other: + * Fix: Logger username masking preventing Tautulli from starting on new installs. + + ## v2.9.2 (2022-02-08) * Notification: diff --git a/plexpy/version.py b/plexpy/version.py index 06a18a93..503793d6 100644 --- a/plexpy/version.py +++ b/plexpy/version.py @@ -18,4 +18,4 @@ from __future__ import unicode_literals PLEXPY_BRANCH = "master" -PLEXPY_RELEASE_VERSION = "v2.9.2" +PLEXPY_RELEASE_VERSION = "v2.9.3" From 40dc6e08efca39a0ad0c4f9a99c5f3df2d204336 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sat, 12 Feb 2022 17:59:14 -0800 Subject: [PATCH 030/743] Fix first_run_complete config flag --- data/interfaces/default/welcome.html | 1 - plexpy/config.py | 1 - plexpy/webserve.py | 4 ++++ 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/data/interfaces/default/welcome.html b/data/interfaces/default/welcome.html index a6a6d04f..ed44eac3 100644 --- a/data/interfaces/default/welcome.html +++ b/data/interfaces/default/welcome.html @@ -244,7 +244,6 @@ - diff --git a/plexpy/config.py b/plexpy/config.py index 7c8f95c4..c296d628 100644 --- a/plexpy/config.py +++ b/plexpy/config.py @@ -307,7 +307,6 @@ CHECKED_SETTINGS = [ 'CACHE_IMAGES', 'CHECK_GITHUB', 'ENABLE_HTTPS', - 'FIRST_RUN_COMPLETE', 'GET_FILE_SIZES', 'GROUP_HISTORY_TABLES', 'HISTORY_TABLE_ACTIVITY', diff --git a/plexpy/webserve.py b/plexpy/webserve.py index dd6e66a4..24c47361 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -3261,6 +3261,10 @@ class WebInterface(object): all_settings = config.SETTINGS + config.CHECKED_SETTINGS kwargs = {k: v for k, v in kwargs.items() if k.upper() in all_settings} + + if first_run: + kwargs['first_run_complete'] = 1 + plexpy.CONFIG.process_kwargs(kwargs) # Write the config From 636806ae17db835cb6af9c07f72aeb37c85e5ff0 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sat, 12 Feb 2022 18:05:17 -0800 Subject: [PATCH 031/743] Fix Tautulli starting multiple instances on Windows --- plexpy/windows.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/plexpy/windows.py b/plexpy/windows.py index 5b9519b3..3f94b9f0 100644 --- a/plexpy/windows.py +++ b/plexpy/windows.py @@ -154,7 +154,7 @@ def set_startup(): else: args = [exe, plexpy.FULL_PATH] + run_args - registry_key_name = '{}_{}'.format(common.PRODUCT, plexpy.CONFIG.PMS_UUID) + registry_key_name = common.PRODUCT cmd = ' '.join(cmd_quote(arg) for arg in args).replace('python.exe', 'pythonw.exe').replace("'", '"') @@ -162,14 +162,14 @@ def set_startup(): # Rename old Tautulli registry key try: registry_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, startup_reg_path, 0, winreg.KEY_ALL_ACCESS) - winreg.QueryValueEx(registry_key, common.PRODUCT) + winreg.QueryValueEx(registry_key, '{}_{}'.format(common.PRODUCT, plexpy.CONFIG.PMS_UUID)) reg_value_exists = True except WindowsError: reg_value_exists = False if reg_value_exists: try: - winreg.DeleteValue(registry_key, common.PRODUCT) + winreg.DeleteValue(registry_key, '{}_{}'.format(common.PRODUCT, plexpy.CONFIG.PMS_UUID)) winreg.CloseKey(registry_key) except WindowsError: pass From b4600c8306e65aa408e248628b0a3d12b0ce0720 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sat, 12 Feb 2022 18:56:08 -0800 Subject: [PATCH 032/743] Bump config version to set first_run_complete flag --- plexpy/config.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/plexpy/config.py b/plexpy/config.py index c296d628..8f08a5b0 100644 --- a/plexpy/config.py +++ b/plexpy/config.py @@ -698,3 +698,9 @@ class Config(object): self.HTTP_HASHED_PASSWORD = 1 self.CONFIG_VERSION = 20 + + if self.CONFIG_VERSION == 20: + if self.PMS_UUID: + self.FIRST_RUN_COMPLETE = 1 + + self.CONFIG_VERSION = 21 From cfd1bf445f7f5332b9183d98ea8893f325c5ca92 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sat, 12 Feb 2022 19:17:01 -0800 Subject: [PATCH 033/743] v2.9.4 --- CHANGELOG.md | 8 ++++++++ plexpy/version.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ad366b9d..0cb03f75 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## v2.9.4 (2022-02-12) + +* UI: + * Fix: Setup wizard appearing when restarting after saving settings. +* Other: + * Fix: Stop Tautulli from starting multiple instances on Windows after a clean reinstall. Check the startup items in Windows Task Manager if it is still occurring. + + ## v2.9.3 (2022-02-09) * UI: diff --git a/plexpy/version.py b/plexpy/version.py index 503793d6..bb6feecc 100644 --- a/plexpy/version.py +++ b/plexpy/version.py @@ -18,4 +18,4 @@ from __future__ import unicode_literals PLEXPY_BRANCH = "master" -PLEXPY_RELEASE_VERSION = "v2.9.3" +PLEXPY_RELEASE_VERSION = "v2.9.4" From 6d84d1b4c94d904de435daa7467f0c43675dcb0c Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sun, 13 Feb 2022 11:59:06 -0800 Subject: [PATCH 034/743] Improve calculation for transcode progress bar --- data/interfaces/default/current_activity_instance.html | 5 +++-- data/interfaces/default/index.html | 6 ++++-- plexpy/pmsconnect.py | 6 ++++++ 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/data/interfaces/default/current_activity_instance.html b/data/interfaces/default/current_activity_instance.html index e64f2dd7..6cd017e3 100644 --- a/data/interfaces/default/current_activity_instance.html +++ b/data/interfaces/default/current_activity_instance.html @@ -62,7 +62,7 @@ DOCUMENTATION :: END % if session is not None: <% from collections import defaultdict - from plexpy.helpers import cast_to_int, page, short_season + from plexpy.helpers import cast_to_int, get_percent, page, short_season from plexpy.common import VIDEO_RESOLUTION_OVERRIDES, AUDIO_CODEC_OVERRIDES, EXTRA_TYPES import plexpy %> @@ -392,7 +392,8 @@ DOCUMENTATION :: END % if data['live']:
Live
% else: -
${data['transcode_progress']}%
+ <% transcode_progress = get_percent(data['transcode_max_offset_available'] * 1000, data['duration']) or data['transcode_progress'] %> +
${transcode_progress}%
${data['progress_percent']}%
% endif diff --git a/data/interfaces/default/index.html b/data/interfaces/default/index.html index 11f9ddf3..652af7c6 100644 --- a/data/interfaces/default/index.html +++ b/data/interfaces/default/index.html @@ -590,8 +590,10 @@ } // Update the progress bars - $('#buffer-bar-' + key).css({width: parseInt(s.transcode_progress) + '%'}).html(s.transcode_progress + '%') - .attr('data-original-title', 'Transcoder Progress ' + s.transcode_progress + '%'); + var duration = parseInt(s.duration); + var transcode_progress = duration ? Math.round(s.transcode_max_offset_available * 1000 / duration * 100) : s.transcode_progress; + $('#buffer-bar-' + key).css({width: parseInt(transcode_progress) + '%'}).html(transcode_progress + '%') + .attr('data-original-title', 'Transcoder Progress ' + transcode_progress + '%'); if (s.live !== 1) { var progress_bar = $('#progress-bar-' + key); progress_bar.data('state', s.state); diff --git a/plexpy/pmsconnect.py b/plexpy/pmsconnect.py index fc89def6..94b0fdeb 100644 --- a/plexpy/pmsconnect.py +++ b/plexpy/pmsconnect.py @@ -1734,6 +1734,8 @@ class PmsConnect(object): transcode_progress = helpers.get_xml_attr(transcode_info, 'progress') transcode_speed = helpers.get_xml_attr(transcode_info, 'speed') + transcode_min_offset = helpers.get_xml_attr(transcode_info, 'minOffsetAvailable') + transcode_max_offset = helpers.get_xml_attr(transcode_info, 'maxOffsetAvailable') transcode_details = {'transcode_key': helpers.get_xml_attr(transcode_info, 'key'), 'transcode_throttled': int(helpers.get_xml_attr(transcode_info, 'throttled') == '1'), @@ -1746,6 +1748,8 @@ class PmsConnect(object): 'transcode_height': helpers.get_xml_attr(transcode_info, 'height'), # Blank but keep backwards compatibility 'transcode_container': helpers.get_xml_attr(transcode_info, 'container'), 'transcode_protocol': helpers.get_xml_attr(transcode_info, 'protocol'), + 'transcode_min_offset_available': int(round(helpers.cast_to_float(transcode_min_offset), 0)), + 'transcode_max_offset_available': int(round(helpers.cast_to_float(transcode_max_offset), 0)), 'transcode_hw_requested': int(helpers.get_xml_attr(transcode_info, 'transcodeHwRequested') == '1'), 'transcode_hw_decode': helpers.get_xml_attr(transcode_info, 'transcodeHwDecoding'), 'transcode_hw_decode_title': helpers.get_xml_attr(transcode_info, 'transcodeHwDecodingTitle'), @@ -1771,6 +1775,8 @@ class PmsConnect(object): 'transcode_height': '', 'transcode_container': '', 'transcode_protocol': '', + 'transcode_min_offset_available': 0, + 'transcode_max_offset_available': 0, 'transcode_hw_requested': 0, 'transcode_hw_decode': '', 'transcode_hw_decode_title': '', From ec363b2ef3f18ec45a66f2c7dd8863179c84ef77 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sun, 13 Feb 2022 12:17:57 -0800 Subject: [PATCH 035/743] Add transcode offset keys to get_activity API docs --- plexpy/webserve.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/plexpy/webserve.py b/plexpy/webserve.py index 24c47361..62069cc9 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -5893,6 +5893,8 @@ class WebInterface(object): "transcode_hw_full_pipeline": 0, "transcode_hw_requested": 0, "transcode_key": "", + "transcode_max_offset_available": 0, + "transcode_min_offset_available": 0, "transcode_progress": 0, "transcode_protocol": "", "transcode_speed": "", From 3c48ea49f375f069bc62b649f481c093a600f5b5 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 18 Feb 2022 23:27:02 -0800 Subject: [PATCH 036/743] Retrieve season summary and fallback to show summary * Fixes #1657 --- plexpy/pmsconnect.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plexpy/pmsconnect.py b/plexpy/pmsconnect.py index 94b0fdeb..2bdd4370 100644 --- a/plexpy/pmsconnect.py +++ b/plexpy/pmsconnect.py @@ -870,7 +870,7 @@ class PmsConnect(object): 'parent_media_index': helpers.get_xml_attr(metadata_main, 'parentIndex'), 'studio': show_details.get('studio', ''), 'content_rating': show_details.get('content_rating', ''), - 'summary': show_details.get('summary', ''), + 'summary': helpers.get_xml_attr(metadata_main, 'summary') or show_details.get('summary', ''), 'tagline': helpers.get_xml_attr(metadata_main, 'tagline'), 'rating': helpers.get_xml_attr(metadata_main, 'rating'), 'rating_image': helpers.get_xml_attr(metadata_main, 'ratingImage'), From d657a609ffa3c99b074a7d86399037157469fcca Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sat, 19 Feb 2022 16:37:03 -0800 Subject: [PATCH 037/743] Reschedule tasks after changing backup interval * Fixes #1662 --- plexpy/webserve.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/plexpy/webserve.py b/plexpy/webserve.py index 62069cc9..76a4d600 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -3205,7 +3205,8 @@ class WebInterface(object): kwargs.get('refresh_users_interval') != str(plexpy.CONFIG.REFRESH_USERS_INTERVAL) or \ kwargs.get('pms_update_check_interval') != str(plexpy.CONFIG.PMS_UPDATE_CHECK_INTERVAL) or \ kwargs.get('monitor_pms_updates') != plexpy.CONFIG.MONITOR_PMS_UPDATES or \ - kwargs.get('pms_url_manual') != plexpy.CONFIG.PMS_URL_MANUAL: + kwargs.get('pms_url_manual') != plexpy.CONFIG.PMS_URL_MANUAL or \ + kwargs.get('backup_interval') != str(plexpy.CONFIG.BACKUP_INTERVAL): reschedule = True # If we change the SSL setting for PMS or PMS remote setting, make sure we grab the new url. From 297620f4a903e0363a53aa3b1ccdf8d6a287c875 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Mon, 21 Feb 2022 09:42:50 -0800 Subject: [PATCH 038/743] Reorder db migrations --- plexpy/__init__.py | 100 ++++++++++++++++++++++----------------------- 1 file changed, 50 insertions(+), 50 deletions(-) diff --git a/plexpy/__init__.py b/plexpy/__init__.py index 151cabe7..7d5fb00d 100644 --- a/plexpy/__init__.py +++ b/plexpy/__init__.py @@ -1443,56 +1443,6 @@ def dbcheck(): except sqlite3.OperationalError: logger.warn("Unable to capitalize Windows platform values in session_history table.") - # Upgrade session_history table from earlier versions - try: - c_db.execute('SELECT section_id FROM session_history') - except sqlite3.OperationalError: - logger.debug("Altering database. Updating database table session_history.") - c_db.execute( - 'ALTER TABLE session_history ADD COLUMN section_id INTEGER' - ) - c_db.execute( - 'UPDATE session_history SET section_id = (' - 'SELECT section_id FROM session_history_metadata ' - 'WHERE session_history_metadata.id = session_history.id)' - ) - c_db.execute( - 'CREATE TABLE IF NOT EXISTS session_history_metadata_temp (id INTEGER PRIMARY KEY, ' - 'rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, ' - 'title TEXT, parent_title TEXT, grandparent_title TEXT, original_title TEXT, full_title TEXT, ' - 'media_index INTEGER, parent_media_index INTEGER, ' - 'thumb TEXT, parent_thumb TEXT, grandparent_thumb TEXT, ' - 'art TEXT, media_type TEXT, year INTEGER, originally_available_at TEXT, added_at INTEGER, updated_at INTEGER, ' - 'last_viewed_at INTEGER, content_rating TEXT, summary TEXT, tagline TEXT, rating TEXT, ' - 'duration INTEGER DEFAULT 0, guid TEXT, directors TEXT, writers TEXT, actors TEXT, genres TEXT, studio TEXT, ' - 'labels TEXT, live INTEGER DEFAULT 0, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT)' - ) - c_db.execute( - 'INSERT INTO session_history_metadata_temp (id, rating_key, parent_rating_key, grandparent_rating_key, ' - 'title, parent_title, grandparent_title, original_title, full_title, ' - 'media_index, parent_media_index, ' - 'thumb, parent_thumb, grandparent_thumb, ' - 'art, media_type, year, originally_available_at, added_at, updated_at, ' - 'last_viewed_at, content_rating, summary, tagline, rating, ' - 'duration, guid, directors, writers, actors, genres, studio, ' - 'labels, live, channel_call_sign, channel_identifier, channel_thumb) ' - 'SELECT id, rating_key, parent_rating_key, grandparent_rating_key, ' - 'title, parent_title, grandparent_title, original_title, full_title, ' - 'media_index, parent_media_index, ' - 'thumb, parent_thumb, grandparent_thumb, ' - 'art, media_type, year, originally_available_at, added_at, updated_at, ' - 'last_viewed_at, content_rating, summary, tagline, rating, ' - 'duration, guid, directors, writers, actors, genres, studio, ' - 'labels, live, channel_call_sign, channel_identifier, channel_thumb ' - 'FROM session_history_metadata' - ) - c_db.execute( - 'DROP TABLE session_history_metadata' - ) - c_db.execute( - 'ALTER TABLE session_history_metadata_temp RENAME TO session_history_metadata' - ) - # Upgrade session_history_metadata table from earlier versions try: c_db.execute('SELECT full_title FROM session_history_metadata') @@ -1816,6 +1766,56 @@ def dbcheck(): 'ALTER TABLE session_history_media_info ADD COLUMN stream_audio_language_code TEXT' ) + # Upgrade session_history table from earlier versions + try: + c_db.execute('SELECT section_id FROM session_history') + except sqlite3.OperationalError: + logger.debug("Altering database. Updating database table session_history.") + c_db.execute( + 'ALTER TABLE session_history ADD COLUMN section_id INTEGER' + ) + c_db.execute( + 'UPDATE session_history SET section_id = (' + 'SELECT section_id FROM session_history_metadata ' + 'WHERE session_history_metadata.id = session_history.id)' + ) + c_db.execute( + 'CREATE TABLE IF NOT EXISTS session_history_metadata_temp (id INTEGER PRIMARY KEY, ' + 'rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, ' + 'title TEXT, parent_title TEXT, grandparent_title TEXT, original_title TEXT, full_title TEXT, ' + 'media_index INTEGER, parent_media_index INTEGER, ' + 'thumb TEXT, parent_thumb TEXT, grandparent_thumb TEXT, ' + 'art TEXT, media_type TEXT, year INTEGER, originally_available_at TEXT, added_at INTEGER, updated_at INTEGER, ' + 'last_viewed_at INTEGER, content_rating TEXT, summary TEXT, tagline TEXT, rating TEXT, ' + 'duration INTEGER DEFAULT 0, guid TEXT, directors TEXT, writers TEXT, actors TEXT, genres TEXT, studio TEXT, ' + 'labels TEXT, live INTEGER DEFAULT 0, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT)' + ) + c_db.execute( + 'INSERT INTO session_history_metadata_temp (id, rating_key, parent_rating_key, grandparent_rating_key, ' + 'title, parent_title, grandparent_title, original_title, full_title, ' + 'media_index, parent_media_index, ' + 'thumb, parent_thumb, grandparent_thumb, ' + 'art, media_type, year, originally_available_at, added_at, updated_at, ' + 'last_viewed_at, content_rating, summary, tagline, rating, ' + 'duration, guid, directors, writers, actors, genres, studio, ' + 'labels, live, channel_call_sign, channel_identifier, channel_thumb) ' + 'SELECT id, rating_key, parent_rating_key, grandparent_rating_key, ' + 'title, parent_title, grandparent_title, original_title, full_title, ' + 'media_index, parent_media_index, ' + 'thumb, parent_thumb, grandparent_thumb, ' + 'art, media_type, year, originally_available_at, added_at, updated_at, ' + 'last_viewed_at, content_rating, summary, tagline, rating, ' + 'duration, guid, directors, writers, actors, genres, studio, ' + 'labels, live, channel_call_sign, channel_identifier, channel_thumb ' + 'FROM session_history_metadata' + ) + c_db.execute( + 'DROP TABLE session_history_metadata' + ) + c_db.execute( + 'ALTER TABLE session_history_metadata_temp RENAME TO session_history_metadata' + ) + # Upgrade users table from earlier versions try: c_db.execute('SELECT do_notify FROM users') From 74cd49416293fa586ab327a97d175cca86a26b4c Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Mon, 21 Feb 2022 12:14:14 -0800 Subject: [PATCH 039/743] Pass media_type when retrieving show and artist children for newsletter * Fix different album types not shown on newsletter. (#1559) --- plexpy/newsletters.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plexpy/newsletters.py b/plexpy/newsletters.py index d3910942..03513f1e 100644 --- a/plexpy/newsletters.py +++ b/plexpy/newsletters.py @@ -752,7 +752,7 @@ class RecentlyAdded(Newsletter): continue show_metadata = pms_connect.get_metadata_details(show_rating_key, media_info=False) - children = pms_connect.get_item_children(show_rating_key, get_grandchildren=True) + children = pms_connect.get_item_children(show_rating_key, media_type=media_type, get_grandchildren=True) filtered_children = [i for i in children['children_list'] if self.start_time < helpers.cast_to_int(i['added_at']) < self.end_time] filtered_children.sort(key=lambda x: helpers.cast_to_int(x['parent_media_index'])) @@ -802,7 +802,7 @@ class RecentlyAdded(Newsletter): continue artist_metadata = pms_connect.get_metadata_details(artist_rating_key, media_info=False) - children = pms_connect.get_item_children(artist_rating_key) + children = pms_connect.get_item_children(artist_rating_key, media_type=media_type) filtered_children = [i for i in children['children_list'] if self.start_time < helpers.cast_to_int(i['added_at']) < self.end_time] filtered_children.sort(key=lambda x: x['added_at']) From bd9fe54fd7b0dec86499b31975bb7c15eb70d42d Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 24 Feb 2022 19:11:30 -0800 Subject: [PATCH 040/743] Enable dynamic anonymous redirect by default in setup wizard --- data/interfaces/default/welcome.html | 3 ++- plexpy/config.py | 6 ++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/data/interfaces/default/welcome.html b/data/interfaces/default/welcome.html index ed44eac3..adb77e45 100644 --- a/data/interfaces/default/welcome.html +++ b/data/interfaces/default/welcome.html @@ -239,7 +239,8 @@ - + + diff --git a/plexpy/config.py b/plexpy/config.py index 8f08a5b0..6f36f55e 100644 --- a/plexpy/config.py +++ b/plexpy/config.py @@ -704,3 +704,9 @@ class Config(object): self.FIRST_RUN_COMPLETE = 1 self.CONFIG_VERSION = 21 + + if self.CONFIG_VERSION == 21: + if not self.ANON_REDIRECT_DYNAMIC and not self.ANON_REDIRECT: + self.ANON_REDIRECT_DYNAMIC = 1 + + self.CONFIG_VERSION = 22 From 84a14c0f2587f02460ad78ee8cb180bc94c100a1 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sun, 27 Feb 2022 16:50:29 -0800 Subject: [PATCH 041/743] Colour active filter buttons --- data/interfaces/default/css/tautulli.css | 7 +++++++ data/interfaces/default/graphs.html | 4 ++-- data/interfaces/default/history.html | 14 +++++++------- data/interfaces/default/index.html | 14 +++++++------- data/interfaces/default/info.html | 6 +++--- data/interfaces/default/library.html | 6 +++--- data/interfaces/default/user.html | 14 +++++++------- 7 files changed, 36 insertions(+), 29 deletions(-) diff --git a/data/interfaces/default/css/tautulli.css b/data/interfaces/default/css/tautulli.css index 136644e2..514975c3 100644 --- a/data/interfaces/default/css/tautulli.css +++ b/data/interfaces/default/css/tautulli.css @@ -680,6 +680,13 @@ textarea.form-control:focus { .btn-form:focus { color: #eee; } +.btn-filter.active, +.btn-filter.active.focus { + background-color: #b7800a !important; +} +.btn-filter.active:hover { + background-color: #896007 !important; +} .form-control-feedback { color: #E5A00D; margin: 5px 40px 5px 0; diff --git a/data/interfaces/default/graphs.html b/data/interfaces/default/graphs.html index 4dea9b6f..2e71bd76 100644 --- a/data/interfaces/default/graphs.html +++ b/data/interfaces/default/graphs.html @@ -21,10 +21,10 @@
-
diff --git a/data/interfaces/default/history.html b/data/interfaces/default/history.html index 01317027..cf15c87f 100644 --- a/data/interfaces/default/history.html +++ b/data/interfaces/default/history.html @@ -39,27 +39,27 @@ % endif
-
-
diff --git a/data/interfaces/default/index.html b/data/interfaces/default/index.html index 652af7c6..47b0d87d 100644 --- a/data/interfaces/default/index.html +++ b/data/interfaces/default/index.html @@ -42,10 +42,10 @@

Watch Statistics

-
@@ -100,19 +100,19 @@
-
diff --git a/data/interfaces/default/info.html b/data/interfaces/default/info.html index 80ae1dfb..0b69d508 100644 --- a/data/interfaces/default/info.html +++ b/data/interfaces/default/info.html @@ -649,13 +649,13 @@ DOCUMENTATION :: END
% endif
-
diff --git a/data/interfaces/default/library.html b/data/interfaces/default/library.html index b1ce22c4..59de6497 100644 --- a/data/interfaces/default/library.html +++ b/data/interfaces/default/library.html @@ -217,13 +217,13 @@ DOCUMENTATION :: END
% endif
-
diff --git a/data/interfaces/default/user.html b/data/interfaces/default/user.html index 36beca13..501c68ca 100644 --- a/data/interfaces/default/user.html +++ b/data/interfaces/default/user.html @@ -167,27 +167,27 @@ DOCUMENTATION :: END % endif
-
-
From 35aca11feb213d59b416f009af139eaee8298d3a Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sun, 27 Feb 2022 17:03:11 -0800 Subject: [PATCH 042/743] Escape characters in username log filter * Remove filter for friendly names --- data/interfaces/default/settings.html | 2 +- plexpy/logger.py | 7 ++----- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/data/interfaces/default/settings.html b/data/interfaces/default/settings.html index adda3782..3d6ab932 100644 --- a/data/interfaces/default/settings.html +++ b/data/interfaces/default/settings.html @@ -164,7 +164,7 @@ Mask Usernames in Logs

- Enable to mask Plex usernames and Tautulli friendly names with asterisks (*) in the logs.
+ Enable to mask Plex usernames with asterisks (*) in the logs.
Note: Only logs from the time this setting is enabled will be masked.

diff --git a/plexpy/logger.py b/plexpy/logger.py index cf7fb294..bda5c4cc 100644 --- a/plexpy/logger.py +++ b/plexpy/logger.py @@ -130,20 +130,17 @@ class UsernameFilter(logging.Filter): for item in items: username = item['username'] - friendly_name = item['friendly_name'] - if username == 'Local': + if username.lower() in ('local', 'guest'): continue try: record.msg = self.replace(record.msg, username) - record.msg = self.replace(record.msg, friendly_name) args = [] for arg in record.args: if isinstance(arg, str): arg = self.replace(arg, username) - arg = self.replace(arg, friendly_name) args.append(arg) record.args = tuple(args) except: @@ -154,7 +151,7 @@ class UsernameFilter(logging.Filter): @staticmethod def replace(text, match): mask = match[:2] + 8 * '*' + match[-1] - return re.sub(r'\b{}\b'.format(match), mask, text, flags=re.IGNORECASE) + return re.sub(re.escape(match), mask, text, flags=re.IGNORECASE) class RegexFilter(logging.Filter): From 50d90d16bfd64e4a88f2cbb475da95ced52bce3e Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 11 Mar 2022 20:18:10 -0800 Subject: [PATCH 043/743] Update App Store link to specify iphone --- data/interfaces/default/settings.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/data/interfaces/default/settings.html b/data/interfaces/default/settings.html index 3d6ab932..a6a95cb0 100644 --- a/data/interfaces/default/settings.html +++ b/data/interfaces/default/settings.html @@ -1582,7 +1582,7 @@ Get the Tautulli Remote app on Google Play or the - App Store + App Store to access Tautulli from your mobile device.

@@ -1591,7 +1591,7 @@ Get it on Google Play - Download on the App Store + Download on the App Store Google Play and the Google Play logo are trademarks of Google LLC. From eb1d724417604bc30d9ead66a1e2df4cc435965d Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Wed, 16 Mar 2022 16:59:58 -0700 Subject: [PATCH 044/743] Add Tautulli to X-Plex-Device-Name header --- data/interfaces/default/js/script.js | 2 +- plexpy/http_handler.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/data/interfaces/default/js/script.js b/data/interfaces/default/js/script.js index 14e12b06..2df5daeb 100644 --- a/data/interfaces/default/js/script.js +++ b/data/interfaces/default/js/script.js @@ -619,7 +619,7 @@ function getPlexHeaders(clientID) { 'X-Plex-Platform-Version': p.version, 'X-Plex-Model': 'Plex OAuth', 'X-Plex-Device': p.os, - 'X-Plex-Device-Name': p.name, + 'X-Plex-Device-Name': p.name + ' (Tautulli)', 'X-Plex-Device-Screen-Resolution': window.screen.width + 'x' + window.screen.height, 'X-Plex-Language': 'en' }; diff --git a/plexpy/http_handler.py b/plexpy/http_handler.py index 6672e050..974b9ce4 100644 --- a/plexpy/http_handler.py +++ b/plexpy/http_handler.py @@ -60,7 +60,8 @@ class HTTPHandler(object): 'X-Plex-Platform-Version': plexpy.common.PLATFORM_RELEASE, 'X-Plex-Device': '{} {}'.format(plexpy.common.PLATFORM, plexpy.common.PLATFORM_RELEASE), - 'X-Plex-Device-Name': plexpy.common.PLATFORM_DEVICE_NAME + 'X-Plex-Device-Name': '{} ({})'.format(plexpy.common.PLATFORM_DEVICE_NAME, + plexpy.common.PRODUCT) } self.token = token From 87389320fbbc059b49635ce85325e23db3b56fa4 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 18 Mar 2022 15:49:44 -0700 Subject: [PATCH 045/743] Fix live tv filter in history query * Fixes #1691 --- plexpy/datafactory.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plexpy/datafactory.py b/plexpy/datafactory.py index 82b8a193..c05224a2 100644 --- a/plexpy/datafactory.py +++ b/plexpy/datafactory.py @@ -112,7 +112,7 @@ class DataFactory(object): 'secure', 'relayed', 'session_history.media_type', - '(CASE WHEN session_history_metadata.live = 1 THEN "live" ELSE session_history.media_type END) \ + '(CASE WHEN session_history_metadata.live = 1 THEN \'live\' ELSE session_history.media_type END) \ AS media_type_live', 'session_history_metadata.rating_key', 'session_history_metadata.parent_rating_key', @@ -170,7 +170,7 @@ class DataFactory(object): 'secure', 'relayed', 'media_type', - '(CASE WHEN live = 1 THEN "live" ELSE media_type END) AS media_type_live', + '(CASE WHEN live = 1 THEN \'live\' ELSE media_type END) AS media_type_live', 'rating_key', 'parent_rating_key', 'grandparent_rating_key', From 9c8e8b435161647b70824e13934104880deafdd9 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sat, 26 Mar 2022 14:57:44 -0700 Subject: [PATCH 046/743] Simplify default settings in setup wizard --- data/interfaces/default/welcome.html | 18 ------------------ plexpy/config.py | 10 +--------- plexpy/datafactory.py | 3 --- plexpy/libraries.py | 10 +++------- plexpy/webserve.py | 24 ++++++++++-------------- 5 files changed, 14 insertions(+), 51 deletions(-) diff --git a/data/interfaces/default/welcome.html b/data/interfaces/default/welcome.html index adb77e45..33cd290c 100644 --- a/data/interfaces/default/welcome.html +++ b/data/interfaces/default/welcome.html @@ -229,24 +229,6 @@
- - - - - - - - - - - - - - - - - -
diff --git a/plexpy/config.py b/plexpy/config.py index 6f36f55e..42a1ce8a 100644 --- a/plexpy/config.py +++ b/plexpy/config.py @@ -116,7 +116,7 @@ _CONFIG_DEFINITIONS = { 'GROUP_HISTORY_TABLES': (int, 'General', 1), 'HISTORY_TABLE_ACTIVITY': (int, 'General', 1), 'HOME_SECTIONS': (list, 'General', ['current_activity', 'watch_stats', 'library_stats', 'recently_added']), - 'HOME_LIBRARY_CARDS': (list, 'General', ['first_run']), + 'HOME_LIBRARY_CARDS': (list, 'General', []), 'HOME_STATS_CARDS': (list, 'General', ['top_movies', 'popular_movies', 'top_tv', 'popular_tv', 'top_music', 'popular_music', 'last_watched', 'top_libraries', 'top_users', 'top_platforms', 'most_concurrent']), 'HOME_REFRESH_INTERVAL': (int, 'General', 10), @@ -601,14 +601,6 @@ class Config(object): self.CONFIG_VERSION = 4 if self.CONFIG_VERSION == 4: - if not len(self.HOME_STATS_CARDS) and 'watch_stats' in self.HOME_SECTIONS: - home_sections = self.HOME_SECTIONS - home_sections.remove('watch_stats') - self.HOME_SECTIONS = home_sections - if not len(self.HOME_LIBRARY_CARDS) and 'library_stats' in self.HOME_SECTIONS: - home_sections = self.HOME_SECTIONS - home_sections.remove('library_stats') - self.HOME_SECTIONS = home_sections self.CONFIG_VERSION = 5 diff --git a/plexpy/datafactory.py b/plexpy/datafactory.py index c05224a2..031c125c 100644 --- a/plexpy/datafactory.py +++ b/plexpy/datafactory.py @@ -988,9 +988,6 @@ class DataFactory(object): if session.get_session_shared_libraries(): library_cards = session.get_session_shared_libraries() - if 'first_run_wizard' in library_cards: - return None - library_stats = [] try: diff --git a/plexpy/libraries.py b/plexpy/libraries.py index 985ec0f7..011f8201 100644 --- a/plexpy/libraries.py +++ b/plexpy/libraries.py @@ -99,13 +99,9 @@ def refresh_libraries(): 'section_id NOT IN ({})'.format(', '.join(['?'] * len(section_ids))) monitor_db.action(query=query, args=[plexpy.CONFIG.PMS_IDENTIFIER] + section_ids) - if plexpy.CONFIG.HOME_LIBRARY_CARDS == ['first_run_wizard']: - plexpy.CONFIG.__setattr__('HOME_LIBRARY_CARDS', library_keys) - plexpy.CONFIG.write() - else: - new_keys = plexpy.CONFIG.HOME_LIBRARY_CARDS + new_keys - plexpy.CONFIG.__setattr__('HOME_LIBRARY_CARDS', new_keys) - plexpy.CONFIG.write() + new_keys = plexpy.CONFIG.HOME_LIBRARY_CARDS + new_keys + plexpy.CONFIG.__setattr__('HOME_LIBRARY_CARDS', new_keys) + plexpy.CONFIG.write() logger.info("Tautulli Libraries :: Libraries list refreshed.") return True diff --git a/plexpy/webserve.py b/plexpy/webserve.py index 76a4d600..63952807 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -3171,14 +3171,16 @@ class WebInterface(object): # First run from the setup wizard if kwargs.pop('first_run', None): first_run = True + server_changed = True - for checked_config in config.CHECKED_SETTINGS: - checked_config = checked_config.lower() - if checked_config not in kwargs: - # checked items should be zero or one. if they were not sent then the item was not checked - kwargs[checked_config] = 0 - else: - kwargs[checked_config] = 1 + if not first_run: + for checked_config in config.CHECKED_SETTINGS: + checked_config = checked_config.lower() + if checked_config not in kwargs: + # checked items should be zero or one. if they were not sent then the item was not checked + kwargs[checked_config] = 0 + else: + kwargs[checked_config] = 1 # If http password exists in config, do not overwrite when blank value received if kwargs.get('http_password') == ' ': @@ -3237,9 +3239,6 @@ class WebInterface(object): del kwargs[k] kwargs['home_stats_cards'] = kwargs['home_stats_cards'].split(',') - if kwargs['home_stats_cards'] == ['first_run_wizard']: - kwargs['home_stats_cards'] = plexpy.CONFIG.HOME_STATS_CARDS - # Remove config with 'hlcard-' prefix and change home_library_cards to list if kwargs.get('home_library_cards'): for k in list(kwargs.keys()): @@ -3247,11 +3246,8 @@ class WebInterface(object): del kwargs[k] kwargs['home_library_cards'] = kwargs['home_library_cards'].split(',') - if kwargs['home_library_cards'] == ['first_run_wizard']: - refresh_libraries = True - # If we change the server, make sure we grab the new url and refresh libraries and users lists. - if kwargs.pop('server_changed', None): + if kwargs.pop('server_changed', None) or server_changed: server_changed = True refresh_users = True refresh_libraries = True From 9a4ca69590bacbbc5ec0076e078b9e38aea85e35 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sat, 26 Mar 2022 15:11:20 -0700 Subject: [PATCH 047/743] v2.9.5 --- CHANGELOG.md | 21 +++++++++++++++++++++ plexpy/version.py | 2 +- 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0cb03f75..afffb6f2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,26 @@ # Changelog +## v2.9.5 (2022-03-26) + +* Note: + * Updated Snap packages are currently unavailable due to an upstream issue. +* Activity: + * Change: Improve calculation for transcode progress bar percentage on the activity cards. +* History: + * Fix: Live TV history filter not working. (#1691) +* Newsletter: + * Fix: Newsletter not showing different album types. (#1559) +* UI: + * Fix: Display season summary on the media info page if available with a fallback to show summary. (#1657) + * Change: Colour active filter buttons to improve contrast. (#1663) +* API: + * New: Added transcode offset keys to get_activity command. +* Other: + * Fix: Reschedule backup task after changing backup interval. (#1662) + * Fix: Dynamic anonymous redirect setting not being enabled by default after the setup wizard. + * Fix: Usernames with special characters not being filtered in the logs. + + ## v2.9.4 (2022-02-12) * UI: diff --git a/plexpy/version.py b/plexpy/version.py index bb6feecc..854f96d2 100644 --- a/plexpy/version.py +++ b/plexpy/version.py @@ -18,4 +18,4 @@ from __future__ import unicode_literals PLEXPY_BRANCH = "master" -PLEXPY_RELEASE_VERSION = "v2.9.4" +PLEXPY_RELEASE_VERSION = "v2.9.5" From 5d84587ee26b992bb66ab85d759e0b13a33b203f Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sat, 26 Mar 2022 15:40:28 -0700 Subject: [PATCH 048/743] Fix validating Plex login in setup wizard * Fixes #1697 --- data/interfaces/default/welcome.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data/interfaces/default/welcome.html b/data/interfaces/default/welcome.html index 33cd290c..c1e043cc 100644 --- a/data/interfaces/default/welcome.html +++ b/data/interfaces/default/welcome.html @@ -291,7 +291,7 @@ var valid_pms_token = el.val(); var retValue = {}; - if (valid_pms_token === 0) { + if (valid_pms_token !== "1") { retValue.status = false; retValue.msg = "Please authenticate."; $("#pms-token-status").html(' Please authenticate.'); From 944a231e5bda3a7f9978f61f45937a6c7da572b6 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sat, 26 Mar 2022 15:47:17 -0700 Subject: [PATCH 049/743] Change websocket header to a dict --- plexpy/web_socket.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/plexpy/web_socket.py b/plexpy/web_socket.py index 5fc034a4..de4bb3cd 100644 --- a/plexpy/web_socket.py +++ b/plexpy/web_socket.py @@ -1,4 +1,4 @@ -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # This file is part of Tautulli. # @@ -178,9 +178,9 @@ def run(): # Set authentication token (if one is available) if plexpy.CONFIG.PMS_TOKEN: - header = ["X-Plex-Token: %s" % plexpy.CONFIG.PMS_TOKEN] + header = {"X-Plex-Token": plexpy.CONFIG.PMS_TOKEN} else: - header = [] + header = None global ws_shutdown ws_shutdown = False From 3cd98457c26916bcbc703169d70a19683dd95256 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sat, 26 Mar 2022 15:47:53 -0700 Subject: [PATCH 050/743] Apply PMS_TIMEOUT setting to websocket connection --- plexpy/web_socket.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/plexpy/web_socket.py b/plexpy/web_socket.py index de4bb3cd..b70d4b6e 100644 --- a/plexpy/web_socket.py +++ b/plexpy/web_socket.py @@ -1,4 +1,4 @@ -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # This file is part of Tautulli. # @@ -182,6 +182,8 @@ def run(): else: header = None + timeout = plexpy.CONFIG.PMS_TIMEOUT + global ws_shutdown ws_shutdown = False reconnects = 0 @@ -189,7 +191,7 @@ def run(): # Try an open the websocket connection logger.info("Tautulli WebSocket :: Opening %swebsocket." % secure) try: - plexpy.WEBSOCKET = create_connection(uri, header=header, sslopt=sslopt) + plexpy.WEBSOCKET = create_connection(uri, timeout=timeout, header=header, sslopt=sslopt) logger.info("Tautulli WebSocket :: Ready") plexpy.WS_CONNECTED = True except (websocket.WebSocketException, IOError, Exception) as e: @@ -222,7 +224,7 @@ def run(): logger.warn("Tautulli WebSocket :: Reconnection attempt %s." % str(reconnects)) try: - plexpy.WEBSOCKET = create_connection(uri, header=header, sslopt=sslopt) + plexpy.WEBSOCKET = create_connection(uri, timeout=timeout, header=header, sslopt=sslopt) logger.info("Tautulli WebSocket :: Ready") plexpy.WS_CONNECTED = True except (websocket.WebSocketException, IOError, Exception) as e: From 5c06accca0fcc4b99963cfac90c763c969b88bea Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 1 Apr 2022 10:19:41 -0700 Subject: [PATCH 051/743] Mask emails and longer usernames first --- plexpy/logger.py | 8 ++++++-- plexpy/users.py | 2 +- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/plexpy/logger.py b/plexpy/logger.py index bda5c4cc..5a1c0f0f 100644 --- a/plexpy/logger.py +++ b/plexpy/logger.py @@ -126,7 +126,11 @@ class UsernameFilter(logging.Filter): if not plexpy._INITIALIZED: return True - items = users.Users().get_users() or [] + items = sorted( + users.Users().get_users(), + key=lambda x: len(x['username']), + reverse=True + ) for item in items: username = item['username'] @@ -339,11 +343,11 @@ def initLogger(console=False, log_dir=False, verbose=False): logger_plex_websocket.handlers + \ cherrypy.log.error_log.handlers for handler in log_handlers: - handler.addFilter(UsernameFilter()) handler.addFilter(BlacklistFilter()) handler.addFilter(PublicIPFilter()) handler.addFilter(PlexDirectIPFilter()) handler.addFilter(EmailFilter()) + handler.addFilter(UsernameFilter()) handler.addFilter(PlexTokenFilter()) # Install exception hooks diff --git a/plexpy/users.py b/plexpy/users.py index dbb064bf..571b565f 100644 --- a/plexpy/users.py +++ b/plexpy/users.py @@ -668,7 +668,7 @@ class Users(object): result = monitor_db.select(query=query) except Exception as e: logger.warn("Tautulli Users :: Unable to execute database query for get_users: %s." % e) - return None + return [] users = [] for item in result: From b737ab96b4e6045ac3b418b5b2fe98e6cee416fc Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 1 Apr 2022 21:08:10 -0700 Subject: [PATCH 052/743] Change include summary option for Discord and Slack for all media types --- plexpy/notifiers.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py index 42df413d..7c9627f0 100644 --- a/plexpy/notifiers.py +++ b/plexpy/notifiers.py @@ -1084,7 +1084,7 @@ class DISCORD(Notifier): else: attachment['image'] = {'url': poster_url} - if self.config['incl_description'] or pretty_metadata.media_type in ('artist', 'album', 'track'): + if self.config['incl_description']: attachment['description'] = description[:2045] + (description[2045:] and '...') fields = [] @@ -1153,10 +1153,10 @@ class DISCORD(Notifier): 'must be enabled under the 3rd Party APIs settings tab.', 'input_type': 'checkbox' }, - {'label': 'Include Plot Summaries', + {'label': 'Include Summary', 'value': self.config['incl_description'], 'name': 'discord_incl_description', - 'description': 'Include a plot summary for movies and TV shows on the info card.', + 'description': 'Include a summary for the media on the info card.', 'input_type': 'checkbox' }, {'label': 'Include Link to Plex Web', @@ -2298,10 +2298,10 @@ class MICROSOFTTEAMS(Notifier): 'must be enabled under the 3rd Party APIs settings tab.', 'input_type': 'checkbox' }, - {'label': 'Include Plot Summaries', + {'label': 'Include Summary', 'value': self.config['incl_description'], 'name': 'microsoftteams_incl_description', - 'description': 'Include a plot summary for movies and TV shows on the info card.', + 'description': 'Include a summary for the media on the info card.', 'input_type': 'checkbox' }, {'label': 'Include Link to Plex Web', @@ -3548,7 +3548,7 @@ class SLACK(Notifier): else: attachment['image_url'] = poster_url - if self.config['incl_description'] or pretty_metadata.media_type in ('artist', 'album', 'track'): + if self.config['incl_description']: attachment['text'] = description fields = [] @@ -3616,10 +3616,10 @@ class SLACK(Notifier): 'must be enabled under the 3rd Party APIs settings tab.', 'input_type': 'checkbox' }, - {'label': 'Include Plot Summaries', + {'label': 'Include Summary', 'value': self.config['incl_description'], 'name': 'slack_incl_description', - 'description': 'Include a plot summary for movies and TV shows on the info card.', + 'description': 'Include a summary for the media on the info card.', 'input_type': 'checkbox' }, {'label': 'Include Link to Plex Web', From 73b94782f9ca939cd0c7daa60fd6233808615250 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 1 Apr 2022 21:31:06 -0700 Subject: [PATCH 053/743] Add hidden username and email columns to users table --- data/interfaces/default/js/tables/users.js | 36 ++++++++++++++++++---- data/interfaces/default/users.html | 2 ++ plexpy/users.py | 2 ++ plexpy/webserve.py | 2 ++ 4 files changed, 36 insertions(+), 6 deletions(-) diff --git a/data/interfaces/default/js/tables/users.js b/data/interfaces/default/js/tables/users.js index 82caf116..67230361 100644 --- a/data/interfaces/default/js/tables/users.js +++ b/data/interfaces/default/js/tables/users.js @@ -90,6 +90,30 @@ users_list_table_options = { }, { "targets": [3], + "data": "username", + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + $(td).html(cellData); + } + }, + "visible": false, + "width": "10%", + "className": "no-wrap" + }, + { + "targets": [4], + "data": "email", + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + $(td).html(cellData); + } + }, + "visible": false, + "width": "10%", + "className": "no-wrap" + }, + { + "targets": [5], "data": "last_seen", "createdCell": function (td, cellData, rowData, row, col) { if (cellData !== null && cellData !== '') { @@ -103,7 +127,7 @@ users_list_table_options = { "className": "no-wrap" }, { - "targets": [4], + "targets": [6], "data": "ip_address", "createdCell": function (td, cellData, rowData, row, col) { if (cellData) { @@ -121,7 +145,7 @@ users_list_table_options = { "className": "no-wrap modal-control-ip" }, { - "targets": [5], + "targets": [7], "data": "platform", "createdCell": function (td, cellData, rowData, row, col) { if (cellData !== null && cellData !== '') { @@ -134,7 +158,7 @@ users_list_table_options = { "className": "no-wrap modal-control" }, { - "targets": [6], + "targets": [8], "data":"player", "createdCell": function (td, cellData, rowData, row, col) { if (cellData !== null && cellData !== '') { @@ -155,7 +179,7 @@ users_list_table_options = { "className": "no-wrap modal-control" }, { - "targets": [7], + "targets": [9], "data":"last_played", "createdCell": function (td, cellData, rowData, row, col) { if (cellData !== null && cellData !== '') { @@ -196,7 +220,7 @@ users_list_table_options = { "className": "datatable-wrap" }, { - "targets": [8], + "targets": [10], "data": "plays", "createdCell": function (td, cellData, rowData, row, col) { if (cellData !== null && cellData !== '') { @@ -208,7 +232,7 @@ users_list_table_options = { "className": "no-wrap" }, { - "targets": [9], + "targets": [11], "data": "duration", "createdCell": function (td, cellData, rowData, row, col) { if (cellData !== null && cellData !== '') { diff --git a/data/interfaces/default/users.html b/data/interfaces/default/users.html index 495a8056..37a0faa0 100644 --- a/data/interfaces/default/users.html +++ b/data/interfaces/default/users.html @@ -33,6 +33,8 @@ Edit User + Username + Email Last Streamed Last Known IP Last Platform diff --git a/plexpy/users.py b/plexpy/users.py index 571b565f..c79b62d5 100644 --- a/plexpy/users.py +++ b/plexpy/users.py @@ -127,6 +127,7 @@ class Users(object): 'users.username', '(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \ THEN users.username ELSE users.friendly_name END) AS friendly_name', + 'users.email', 'users.thumb AS user_thumb', 'users.custom_avatar_url AS custom_thumb', 'COUNT(DISTINCT %s) AS plays' % group_by, @@ -202,6 +203,7 @@ class Users(object): 'user_id': item['user_id'], 'username': item['username'], 'friendly_name': item['friendly_name'], + 'email': item['email'], 'user_thumb': user_thumb, 'plays': item['plays'], 'duration': item['duration'], diff --git a/plexpy/webserve.py b/plexpy/webserve.py index 63952807..a7358ec7 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -1305,6 +1305,8 @@ class WebInterface(object): # TODO: Find some one way to automatically get the columns dt_columns = [("user_thumb", False, False), ("friendly_name", True, True), + ("username", True, True), + ("email", True, True), ("last_seen", True, False), ("ip_address", True, True), ("platform", True, True), From e996c4b375c00e797d3608c10ce59be1eb846757 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Mon, 4 Apr 2022 21:06:25 -0700 Subject: [PATCH 054/743] Change default update check interval to hours instead of minutes --- plexpy/config.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/plexpy/config.py b/plexpy/config.py index 42a1ce8a..724e1a08 100644 --- a/plexpy/config.py +++ b/plexpy/config.py @@ -91,7 +91,7 @@ _CONFIG_DEFINITIONS = { 'CACHE_SIZEMB': (int, 'Advanced', 32), 'CHECK_DOCKER_MOUNT': (int, 'Advanced', 1), 'CHECK_GITHUB': (int, 'General', 1), - 'CHECK_GITHUB_INTERVAL': (int, 'General', 360), + 'CHECK_GITHUB_INTERVAL': (int, 'General', 6), 'CHECK_GITHUB_ON_STARTUP': (int, 'General', 1), 'CHECK_GITHUB_CACHE_SECONDS': (int, 'Advanced', 3600), 'CLEANUP_FILES': (int, 'General', 0), @@ -676,10 +676,11 @@ class Config(object): self.CONFIG_VERSION = 18 if self.CONFIG_VERSION == 18: - self.CHECK_GITHUB_INTERVAL = ( - int(self.CHECK_GITHUB_INTERVAL // 60) - + (self.CHECK_GITHUB_INTERVAL % 60 > 0) - ) + if self.CHECK_GITHUB_INTERVAL > 24: + self.CHECK_GITHUB_INTERVAL = ( + int(self.CHECK_GITHUB_INTERVAL // 60) + + (self.CHECK_GITHUB_INTERVAL % 60 > 0) + ) self.CONFIG_VERSION = 19 From aa6592eec7c28f7cbd27545d6f098a8445accbd8 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 8 Apr 2022 20:00:48 -0700 Subject: [PATCH 055/743] Fix importing Plex usernames * Fixes #1710 * Also import the user title (Full Name) --- plexpy/__init__.py | 14 ++++++++++++-- plexpy/plextv.py | 4 +++- plexpy/users.py | 4 ++++ 3 files changed, 19 insertions(+), 3 deletions(-) diff --git a/plexpy/__init__.py b/plexpy/__init__.py index 7d5fb00d..a7313b49 100644 --- a/plexpy/__init__.py +++ b/plexpy/__init__.py @@ -714,8 +714,9 @@ def dbcheck(): c_db.execute( 'CREATE TABLE IF NOT EXISTS users (id INTEGER PRIMARY KEY AUTOINCREMENT, ' 'user_id INTEGER DEFAULT NULL UNIQUE, username TEXT NOT NULL, friendly_name TEXT, ' - 'thumb TEXT, custom_avatar_url TEXT, email TEXT, is_active INTEGER DEFAULT 1, is_admin INTEGER DEFAULT 0, ' - 'is_home_user INTEGER DEFAULT NULL, is_allow_sync INTEGER DEFAULT NULL, is_restricted INTEGER DEFAULT NULL, ' + 'thumb TEXT, custom_avatar_url TEXT, title TEXT, email TEXT, ' + 'is_active INTEGER DEFAULT 1, is_admin INTEGER DEFAULT 0, is_home_user INTEGER DEFAULT NULL, ' + 'is_allow_sync INTEGER DEFAULT NULL, is_restricted INTEGER DEFAULT NULL, ' 'do_notify INTEGER DEFAULT 1, keep_history INTEGER DEFAULT 1, deleted_user INTEGER DEFAULT 0, ' 'allow_guest INTEGER DEFAULT 0, user_token TEXT, server_token TEXT, shared_libraries TEXT, ' 'filter_all TEXT, filter_movies TEXT, filter_tv TEXT, filter_music TEXT, filter_photos TEXT)' @@ -1909,6 +1910,15 @@ def dbcheck(): 'ALTER TABLE users ADD COLUMN is_active INTEGER DEFAULT 1' ) + # Upgrade users table from earlier versions + try: + c_db.execute('SELECT title FROM users') + except sqlite3.OperationalError: + logger.debug("Altering database. Updating database table users.") + c_db.execute( + 'ALTER TABLE users ADD COLUMN title TEXT' + ) + # Upgrade notify_log table from earlier versions try: c_db.execute('SELECT poster_url FROM notify_log') diff --git a/plexpy/plextv.py b/plexpy/plextv.py index d95f8d16..1cb53dd5 100644 --- a/plexpy/plextv.py +++ b/plexpy/plextv.py @@ -357,6 +357,7 @@ class PlexTV(object): for a in xml_head: own_details = {"user_id": helpers.get_xml_attr(a, 'id'), "username": helpers.get_xml_attr(a, 'username'), + "title": helpers.get_xml_attr(a, 'title'), "thumb": helpers.get_xml_attr(a, 'thumb'), "email": helpers.get_xml_attr(a, 'email'), "is_active": 1, @@ -384,7 +385,8 @@ class PlexTV(object): for a in xml_head: friend = {"user_id": helpers.get_xml_attr(a, 'id'), - "username": helpers.get_xml_attr(a, 'title'), + "username": helpers.get_xml_attr(a, 'username'), + "title": helpers.get_xml_attr(a, 'title'), "thumb": helpers.get_xml_attr(a, 'thumb'), "email": helpers.get_xml_attr(a, 'email'), "is_active": 1, diff --git a/plexpy/users.py b/plexpy/users.py index c79b62d5..81276e08 100644 --- a/plexpy/users.py +++ b/plexpy/users.py @@ -84,6 +84,10 @@ def refresh_users(): else: item['custom_avatar_url'] = item['thumb'] + # Check if title is the same as the username + if item['title'] == item['username']: + item['title'] = None + monitor_db.upsert('users', key_dict=keys_dict, value_dict=item) query = 'UPDATE users SET is_active = 0 WHERE user_id NOT IN ({})'.format(', '.join(['?'] * len(user_ids))) From 4862aee9a111c843138472c8b47cd5edb133b1ff Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 8 Apr 2022 20:02:53 -0700 Subject: [PATCH 056/743] Add hidden user title (Full Name) column to users table --- data/interfaces/default/js/tables/users.js | 26 ++++++++++++++++------ data/interfaces/default/users.html | 1 + plexpy/users.py | 2 ++ plexpy/webserve.py | 4 ++++ 4 files changed, 26 insertions(+), 7 deletions(-) diff --git a/data/interfaces/default/js/tables/users.js b/data/interfaces/default/js/tables/users.js index 67230361..1b0793c8 100644 --- a/data/interfaces/default/js/tables/users.js +++ b/data/interfaces/default/js/tables/users.js @@ -102,7 +102,7 @@ users_list_table_options = { }, { "targets": [4], - "data": "email", + "data": "title", "createdCell": function (td, cellData, rowData, row, col) { if (cellData !== null && cellData !== '') { $(td).html(cellData); @@ -114,6 +114,18 @@ users_list_table_options = { }, { "targets": [5], + "data": "email", + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + $(td).html(cellData); + } + }, + "visible": false, + "width": "10%", + "className": "no-wrap" + }, + { + "targets": [6], "data": "last_seen", "createdCell": function (td, cellData, rowData, row, col) { if (cellData !== null && cellData !== '') { @@ -127,7 +139,7 @@ users_list_table_options = { "className": "no-wrap" }, { - "targets": [6], + "targets": [7], "data": "ip_address", "createdCell": function (td, cellData, rowData, row, col) { if (cellData) { @@ -145,7 +157,7 @@ users_list_table_options = { "className": "no-wrap modal-control-ip" }, { - "targets": [7], + "targets": [8], "data": "platform", "createdCell": function (td, cellData, rowData, row, col) { if (cellData !== null && cellData !== '') { @@ -158,7 +170,7 @@ users_list_table_options = { "className": "no-wrap modal-control" }, { - "targets": [8], + "targets": [9], "data":"player", "createdCell": function (td, cellData, rowData, row, col) { if (cellData !== null && cellData !== '') { @@ -179,7 +191,7 @@ users_list_table_options = { "className": "no-wrap modal-control" }, { - "targets": [9], + "targets": [10], "data":"last_played", "createdCell": function (td, cellData, rowData, row, col) { if (cellData !== null && cellData !== '') { @@ -220,7 +232,7 @@ users_list_table_options = { "className": "datatable-wrap" }, { - "targets": [10], + "targets": [11], "data": "plays", "createdCell": function (td, cellData, rowData, row, col) { if (cellData !== null && cellData !== '') { @@ -232,7 +244,7 @@ users_list_table_options = { "className": "no-wrap" }, { - "targets": [11], + "targets": [12], "data": "duration", "createdCell": function (td, cellData, rowData, row, col) { if (cellData !== null && cellData !== '') { diff --git a/data/interfaces/default/users.html b/data/interfaces/default/users.html index 37a0faa0..d7dd84e8 100644 --- a/data/interfaces/default/users.html +++ b/data/interfaces/default/users.html @@ -34,6 +34,7 @@ User Username + Full Name Email Last Streamed Last Known IP diff --git a/plexpy/users.py b/plexpy/users.py index 81276e08..f434ae6f 100644 --- a/plexpy/users.py +++ b/plexpy/users.py @@ -131,6 +131,7 @@ class Users(object): 'users.username', '(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \ THEN users.username ELSE users.friendly_name END) AS friendly_name', + 'users.title', 'users.email', 'users.thumb AS user_thumb', 'users.custom_avatar_url AS custom_thumb', @@ -207,6 +208,7 @@ class Users(object): 'user_id': item['user_id'], 'username': item['username'], 'friendly_name': item['friendly_name'], + 'title': item['title'], 'email': item['email'], 'user_thumb': user_thumb, 'plays': item['plays'], diff --git a/plexpy/webserve.py b/plexpy/webserve.py index a7358ec7..6c8a1ad6 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -1268,6 +1268,9 @@ class WebInterface(object): "do_notify": "Checked", "duration": 2998290, "friendly_name": "Jon Snow", + "username": "LordCommanderSnow", + "title": "Jon Snow", + "email": "Jon.Snow.1337@CastleBlack.com", "guid": "com.plexapp.agents.thetvdb://121361/6/1?lang=en", "history_row_id": 1121, "ip_address": "xxx.xxx.xxx.xxx", @@ -1306,6 +1309,7 @@ class WebInterface(object): dt_columns = [("user_thumb", False, False), ("friendly_name", True, True), ("username", True, True), + ("title", True, True), ("email", True, True), ("last_seen", True, False), ("ip_address", True, True), From a2fac2b11c9a372c1a09932b04556fca2301592d Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sat, 9 Apr 2022 17:52:17 -0700 Subject: [PATCH 057/743] Improve display of dynamic range on activity cards * Ref #1654. Thanks @herby2212. --- .../default/current_activity_instance.html | 7 ++- data/interfaces/default/index.html | 4 +- plexpy/helpers.py | 2 +- plexpy/pmsconnect.py | 57 +++++++++++++------ plexpy/webserve.py | 1 + 5 files changed, 49 insertions(+), 22 deletions(-) diff --git a/data/interfaces/default/current_activity_instance.html b/data/interfaces/default/current_activity_instance.html index 6cd017e3..e9838b8a 100644 --- a/data/interfaces/default/current_activity_instance.html +++ b/data/interfaces/default/current_activity_instance.html @@ -231,11 +231,14 @@ DOCUMENTATION :: END
% if data['media_type'] in ('movie', 'episode', 'clip') and data['stream_video_decision']: <% - if data['video_dynamic_range'] == 'HDR': + if data['video_dynamic_range'] != 'SDR': video_dynamic_range = ' ' + data['video_dynamic_range'] + else: + video_dynamic_range = '' + if data['stream_video_dynamic_range'] != 'SDR' or video_dynamic_range: stream_video_dynamic_range = ' ' + data['stream_video_dynamic_range'] else: - video_dynamic_range = stream_video_dynamic_range = '' + stream_video_dynamic_range = '' %> % if data['stream_video_decision'] == 'transcode': <% diff --git a/data/interfaces/default/index.html b/data/interfaces/default/index.html index 47b0d87d..715996c1 100644 --- a/data/interfaces/default/index.html +++ b/data/interfaces/default/index.html @@ -483,8 +483,8 @@ var video_decision = ''; if (['movie', 'episode', 'clip'].indexOf(s.media_type) > -1 && s.stream_video_decision) { - var v_bd = (s.video_dynamic_range === 'HDR') ? ' ' + s.video_dynamic_range : ''; - var sv_bd = (s.video_dynamic_range === 'HDR') ? ' ' + s.stream_video_dynamic_range : ''; + var v_bd = (s.video_dynamic_range !== 'SDR') ? ' ' + s.video_dynamic_range : ''; + var sv_bd = (s.stream_video_dynamic_range !== 'SDR' || v_bd) ? ' ' + s.stream_video_dynamic_range : ''; var v_res= ''; switch (s.video_resolution.toLowerCase()) { case 'sd': diff --git a/plexpy/helpers.py b/plexpy/helpers.py index fb4afb80..d5ef887f 100644 --- a/plexpy/helpers.py +++ b/plexpy/helpers.py @@ -1545,7 +1545,7 @@ def is_hdr(bit_depth, color_space): def version_to_tuple(version): - return tuple(cast_to_int(v) for v in version.strip('v').split('.')) + return tuple(cast_to_int(v) for v in version.strip('v').replace('-', '.').split('.')) # https://stackoverflow.com/a/1855118 diff --git a/plexpy/pmsconnect.py b/plexpy/pmsconnect.py index 2bdd4370..a993f6af 100644 --- a/plexpy/pmsconnect.py +++ b/plexpy/pmsconnect.py @@ -1401,6 +1401,7 @@ class PmsConnect(object): 'video_color_range': helpers.get_xml_attr(stream, 'colorRange'), 'video_color_space': helpers.get_xml_attr(stream, 'colorSpace'), 'video_color_trc': helpers.get_xml_attr(stream, 'colorTrc'), + 'video_dynamic_range': self.get_dynamic_range(stream), 'video_frame_rate': helpers.get_xml_attr(stream, 'frameRate'), 'video_ref_frames': helpers.get_xml_attr(stream, 'refFrames'), 'video_height': helpers.get_xml_attr(stream, 'height'), @@ -1857,6 +1858,7 @@ class PmsConnect(object): 'stream_video_color_space': helpers.get_xml_attr(video_stream_info, 'colorSpace'), 'stream_video_color_trc': helpers.get_xml_attr(video_stream_info, 'colorTrc'), 'stream_video_codec_level': helpers.get_xml_attr(video_stream_info, 'level'), + 'stream_video_dynamic_range': self.get_dynamic_range(video_stream_info), 'stream_video_ref_frames': helpers.get_xml_attr(video_stream_info, 'refFrames'), 'stream_video_language': helpers.get_xml_attr(video_stream_info, 'language'), 'stream_video_language_code': helpers.get_xml_attr(video_stream_info, 'languageCode'), @@ -1872,6 +1874,7 @@ class PmsConnect(object): 'stream_video_color_space': '', 'stream_video_color_trc': '', 'stream_video_codec_level': '', + 'stream_video_dynamic_range': '', 'stream_video_ref_frames': '', 'stream_video_language': '', 'stream_video_language_code': '', @@ -2064,6 +2067,7 @@ class PmsConnect(object): 'video_color_range': '', 'video_color_space': '', 'video_color_trc': '', + 'video_dynamic_range': '', 'video_frame_rate': '', 'video_ref_frames': '', 'video_height': '', @@ -2152,23 +2156,6 @@ class PmsConnect(object): stream_details['stream_video_resolution'], stream_details['stream_video_resolution'] + (video_details['stream_video_scan_type'][:1] or 'p')) - if helpers.cast_to_int(source_video_details.get('video_bit_depth')) > 8 \ - and source_video_details.get('video_color_space') == 'bt2020nc': - stream_details['video_dynamic_range'] = 'HDR' - else: - stream_details['video_dynamic_range'] = 'SDR' - - if stream_details['video_dynamic_range'] == 'HDR' \ - and video_details['stream_video_decision'] != 'transcode' \ - or helpers.cast_to_int(video_details['stream_video_bit_depth']) > 8 \ - and video_details['stream_video_color_space'] == 'bt2020nc': - stream_details['stream_video_dynamic_range'] = 'HDR' - else: - stream_details['stream_video_dynamic_range'] = 'SDR' - else: - stream_details['video_dynamic_range'] = '' - stream_details['stream_video_dynamic_range'] = '' - # Get the quality profile if media_type in ('movie', 'episode', 'clip') and 'stream_bitrate' in stream_details: if sync_id: @@ -3182,3 +3169,39 @@ class PmsConnect(object): return 'public' return plexpy.CONFIG.PMS_UPDATE_CHANNEL + + @staticmethod + def get_dynamic_range(stream): + extended_display_title = helpers.get_xml_attr(stream, 'extendedDisplayTitle') + bit_depth = helpers.cast_to_int(helpers.get_xml_attr(stream, 'bitDepth')) + color_space = helpers.get_xml_attr(stream, 'colorSpace') + DOVI_profile = helpers.get_xml_attr(stream, 'DOVIProfile') + + HDR = bool(bit_depth > 8 and 'bt2020' in color_space) + DV = bool(DOVI_profile) + + if not HDR and not DV: + return 'SDR' + + video_dynamic_range = [] + + # HDR details got introduced with PMS version 1.25.6.5545 + if helpers.version_to_tuple(plexpy.CONFIG.PMS_VERSION) >= helpers.version_to_tuple('1.25.6.5545'): + if 'Dolby Vision' in extended_display_title: + video_dynamic_range.append('Dolby Vision') + if 'HLG' in extended_display_title: + video_dynamic_range.append('HLG') + if 'HDR10' in extended_display_title: + video_dynamic_range.append('HDR10') + elif 'HDR' in extended_display_title: + video_dynamic_range.append('HDR') + else: + if DV: + video_dynamic_range.append('Dolby Vision') + elif HDR: + # Exact HDR version needs PMS version 1.25.6.5545 or newer + video_dynamic_range.append('HDR') + + if not video_dynamic_range: + return 'SDR' + return '/'.join(video_dynamic_range) diff --git a/plexpy/webserve.py b/plexpy/webserve.py index 6c8a1ad6..ecf42b47 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -5349,6 +5349,7 @@ class WebInterface(object): "video_color_range": "tv", "video_color_space": "bt709", "video_color_trc": "", + "video_dynamic_range": "SDR", "video_frame_rate": "23.976", "video_height": "1078", "video_language": "", From 1ce292a0b6d3dbb8abfc15856abc722962611ac8 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sun, 10 Apr 2022 15:40:57 -0700 Subject: [PATCH 058/743] Only build snap on amd64, arm64, armhf --- snap/snapcraft.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/snap/snapcraft.yaml b/snap/snapcraft.yaml index 31dcf29c..5cdeeeb9 100644 --- a/snap/snapcraft.yaml +++ b/snap/snapcraft.yaml @@ -10,6 +10,11 @@ description: > base: core18 confinement: strict +architectures: + - build-on: amd64 + - build-on: arm64 + - build-on: armhf + parts: tautulli: plugin: dump From dd389b0f943d1e2bdb586cc440bc6b5f1e657b54 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sun, 10 Apr 2022 15:48:32 -0700 Subject: [PATCH 059/743] v2.9.6 --- CHANGELOG.md | 14 ++++++++++++++ plexpy/version.py | 2 +- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index afffb6f2..4c8ba49a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## v2.9.6 (2022-04-10) + +* Activity: + * New: Improved display of dynamic range on the activity cards. (Thanks @herby2212) +* Notifications: + * Change: Make include summary option apply to all media types for Discord and Slack notifications. +* UI: + * Fix: Validating Plex login in the setup wizard. (#1697) + * New: Added hidden username, email, and full name columns to users table. +* Other: + * Fix: Apply pms_timeout setting to websocket connection. + * Fix: Importing of Plex username instead of the full name. (#1710) + + ## v2.9.5 (2022-03-26) * Note: diff --git a/plexpy/version.py b/plexpy/version.py index 854f96d2..88024d9c 100644 --- a/plexpy/version.py +++ b/plexpy/version.py @@ -18,4 +18,4 @@ from __future__ import unicode_literals PLEXPY_BRANCH = "master" -PLEXPY_RELEASE_VERSION = "v2.9.5" +PLEXPY_RELEASE_VERSION = "v2.9.6" From a9860fe62a20adf29bce5440cd9eb73b9d0fadc1 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Mon, 11 Apr 2022 15:10:53 -0700 Subject: [PATCH 060/743] Fix managed users no username --- plexpy/users.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/plexpy/users.py b/plexpy/users.py index f434ae6f..b7f467e2 100644 --- a/plexpy/users.py +++ b/plexpy/users.py @@ -88,6 +88,10 @@ def refresh_users(): if item['title'] == item['username']: item['title'] = None + # Check if username is blank (Managed Users) + if not item['username']: + item['username'] = item['title'] + monitor_db.upsert('users', key_dict=keys_dict, value_dict=item) query = 'UPDATE users SET is_active = 0 WHERE user_id NOT IN ({})'.format(', '.join(['?'] * len(user_ids))) From b0950abdd9baf8cfa4edd7549e85401e48529fc1 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Mon, 11 Apr 2022 15:26:25 -0700 Subject: [PATCH 061/743] v2.9.7 --- CHANGELOG.md | 6 ++++++ plexpy/version.py | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4c8ba49a..9c8c3c69 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # Changelog +## v2.9.7 (2022-04-11) + +* UI: + * Fix: Managed user missing the username in the Users table. + + ## v2.9.6 (2022-04-10) * Activity: diff --git a/plexpy/version.py b/plexpy/version.py index 88024d9c..db1a06eb 100644 --- a/plexpy/version.py +++ b/plexpy/version.py @@ -18,4 +18,4 @@ from __future__ import unicode_literals PLEXPY_BRANCH = "master" -PLEXPY_RELEASE_VERSION = "v2.9.6" +PLEXPY_RELEASE_VERSION = "v2.9.7" From d4002b2e2ba0b91675619b608f05d8f782784b00 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Tue, 12 Apr 2022 12:43:12 -0700 Subject: [PATCH 062/743] Change values to int instead of "Checked" in users and libraries table * get_users_table: allow_guest, do_notify, keep_history * get_libraries_table: do_notify, do_notify_created, keep_history --- data/interfaces/default/js/tables/libraries.js | 2 +- data/interfaces/default/js/tables/users.js | 4 ++-- plexpy/libraries.py | 6 +++--- plexpy/users.py | 6 +++--- plexpy/webserve.py | 12 ++++++------ 5 files changed, 15 insertions(+), 15 deletions(-) diff --git a/data/interfaces/default/js/tables/libraries.js b/data/interfaces/default/js/tables/libraries.js index a7e703a2..b3e702a0 100644 --- a/data/interfaces/default/js/tables/libraries.js +++ b/data/interfaces/default/js/tables/libraries.js @@ -33,7 +33,7 @@ libraries_list_table_options = { $(td).html('
' + ' ' + '   ' + - ' ' + + ' ' + '
'); }, "width": "7%", diff --git a/data/interfaces/default/js/tables/users.js b/data/interfaces/default/js/tables/users.js index 1b0793c8..524ee09a 100644 --- a/data/interfaces/default/js/tables/users.js +++ b/data/interfaces/default/js/tables/users.js @@ -50,8 +50,8 @@ users_list_table_options = { $(td).html('
' + ' ' + '   ' + - ' ' + - ' ' + + ' ' + + ' ' + '
'); }, "width": "7%", diff --git a/plexpy/libraries.py b/plexpy/libraries.py index 011f8201..0b0f16bb 100644 --- a/plexpy/libraries.py +++ b/plexpy/libraries.py @@ -432,9 +432,9 @@ class Libraries(object): 'live': item['live'], 'originally_available_at': item['originally_available_at'], 'guid': item['guid'], - 'do_notify': helpers.checked(item['do_notify']), - 'do_notify_created': helpers.checked(item['do_notify_created']), - 'keep_history': helpers.checked(item['keep_history']), + 'do_notify': item['do_notify'], + 'do_notify_created': item['do_notify_created'], + 'keep_history': item['keep_history'], 'is_active': item['is_active'] } diff --git a/plexpy/users.py b/plexpy/users.py index b7f467e2..a0095573 100644 --- a/plexpy/users.py +++ b/plexpy/users.py @@ -234,9 +234,9 @@ class Users(object): 'originally_available_at': item['originally_available_at'], 'guid': item['guid'], 'transcode_decision': item['transcode_decision'], - 'do_notify': helpers.checked(item['do_notify']), - 'keep_history': helpers.checked(item['keep_history']), - 'allow_guest': helpers.checked(item['allow_guest']), + 'do_notify': item['do_notify'], + 'keep_history': item['keep_history'], + 'allow_guest': item['allow_guest'], 'is_active': item['is_active'] } diff --git a/plexpy/webserve.py b/plexpy/webserve.py index ecf42b47..7e386af4 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -495,13 +495,13 @@ class WebInterface(object): [{"child_count": 3745, "content_rating": "TV-MA", "count": 62, - "do_notify": "Checked", - "do_notify_created": "Checked", + "do_notify": 1, + "do_notify_created": 1, "duration": 1578037, "guid": "com.plexapp.agents.thetvdb://121361/6/1?lang=en", "histroy_row_id": 1128, "is_active": 1, - "keep_history": "Checked", + "keep_history": 1, "labels": [], "last_accessed": 1462693216, "last_played": "Game of Thrones - The Red Woman", @@ -1264,8 +1264,8 @@ class WebInterface(object): "recordsTotal": 10, "recordsFiltered": 10, "data": - [{"allow_guest": "Checked", - "do_notify": "Checked", + [{"allow_guest": 1, + "do_notify": 1, "duration": 2998290, "friendly_name": "Jon Snow", "username": "LordCommanderSnow", @@ -1275,7 +1275,7 @@ class WebInterface(object): "history_row_id": 1121, "ip_address": "xxx.xxx.xxx.xxx", "is_active": 1, - "keep_history": "Checked", + "keep_history": 1, "last_played": "Game of Thrones - The Red Woman", "last_seen": 1462591869, "live": 0, From ab24116b1b6b3e32979ad1ab9c08f3c21c47e1c7 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Tue, 12 Apr 2022 13:28:13 -0700 Subject: [PATCH 063/743] Fix get_users_table API docs --- plexpy/webserve.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/plexpy/webserve.py b/plexpy/webserve.py index 7e386af4..925bf49b 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -1267,10 +1267,8 @@ class WebInterface(object): [{"allow_guest": 1, "do_notify": 1, "duration": 2998290, - "friendly_name": "Jon Snow", - "username": "LordCommanderSnow", - "title": "Jon Snow", "email": "Jon.Snow.1337@CastleBlack.com", + "friendly_name": "Jon Snow", "guid": "com.plexapp.agents.thetvdb://121361/6/1?lang=en", "history_row_id": 1121, "ip_address": "xxx.xxx.xxx.xxx", @@ -1290,6 +1288,7 @@ class WebInterface(object): "rating_key": 153037, "row_id": 1, "thumb": "/library/metadata/153036/thumb/1462175062", + "title": "Jon Snow", "transcode_decision": "transcode", "user_id": 133788, "user_thumb": "https://plex.tv/users/568gwwoib5t98a3a/avatar", From addf9ea3246a518ea416cfcd666326c6da0b8958 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Tue, 12 Apr 2022 18:38:36 -0700 Subject: [PATCH 064/743] Update jaraco.classes==3.2.1 [skip ci] --- lib/jaraco/classes/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 lib/jaraco/classes/__init__.py diff --git a/lib/jaraco/classes/__init__.py b/lib/jaraco/classes/__init__.py new file mode 100644 index 00000000..e69de29b From 3e8ef3c1b40f6d996dc5f051c81d8c8a4f9346f5 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Tue, 12 Apr 2022 18:38:55 -0700 Subject: [PATCH 065/743] Update jaraco.collections==3.5.1 [skip ci] --- lib/jaraco/collections.py | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/lib/jaraco/collections.py b/lib/jaraco/collections.py index cfbb7c0a..8323db78 100644 --- a/lib/jaraco/collections.py +++ b/lib/jaraco/collections.py @@ -573,12 +573,16 @@ class DictStack(list, collections.abc.Mapping): 2 >>> stack['c'] 2 + >>> len(stack) + 3 >>> stack.push(dict(a=3)) >>> stack['a'] 3 >>> set(stack.keys()) == set(['a', 'b', 'c']) True - >>> dict(**stack) == dict(a=3, c=2, b=2) + >>> set(stack.items()) == set([('a', 3), ('b', 2), ('c', 2)]) + True + >>> dict(**stack) == dict(stack) == dict(a=3, c=2, b=2) True >>> d = stack.pop() >>> stack['a'] @@ -587,19 +591,28 @@ class DictStack(list, collections.abc.Mapping): >>> stack['a'] 1 >>> stack.get('b', None) + >>> 'c' in stack + True """ - def keys(self): - return list(set(itertools.chain.from_iterable(c.keys() for c in self))) + def __iter__(self): + dicts = list.__iter__(self) + return iter(set(itertools.chain.from_iterable(c.keys() for c in dicts))) def __getitem__(self, key): - for scope in reversed(self): + for scope in reversed(tuple(list.__iter__(self))): if key in scope: return scope[key] raise KeyError(key) push = list.append + def __contains__(self, other): + return collections.abc.Mapping.__contains__(self, other) + + def __len__(self): + return len(list(iter(self))) + class BijectiveMap(dict): """ From d5afac41045677b29a028d05d51e1d430deca3d9 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Tue, 12 Apr 2022 18:39:16 -0700 Subject: [PATCH 066/743] Add jaraco.context==4.1.1 [skip ci] --- lib/jaraco/context.py | 213 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 213 insertions(+) create mode 100644 lib/jaraco/context.py diff --git a/lib/jaraco/context.py b/lib/jaraco/context.py new file mode 100644 index 00000000..87a4e3dc --- /dev/null +++ b/lib/jaraco/context.py @@ -0,0 +1,213 @@ +import os +import subprocess +import contextlib +import functools +import tempfile +import shutil +import operator + + +@contextlib.contextmanager +def pushd(dir): + orig = os.getcwd() + os.chdir(dir) + try: + yield dir + finally: + os.chdir(orig) + + +@contextlib.contextmanager +def tarball_context(url, target_dir=None, runner=None, pushd=pushd): + """ + Get a tarball, extract it, change to that directory, yield, then + clean up. + `runner` is the function to invoke commands. + `pushd` is a context manager for changing the directory. + """ + if target_dir is None: + target_dir = os.path.basename(url).replace('.tar.gz', '').replace('.tgz', '') + if runner is None: + runner = functools.partial(subprocess.check_call, shell=True) + # In the tar command, use --strip-components=1 to strip the first path and + # then + # use -C to cause the files to be extracted to {target_dir}. This ensures + # that we always know where the files were extracted. + runner('mkdir {target_dir}'.format(**vars())) + try: + getter = 'wget {url} -O -' + extract = 'tar x{compression} --strip-components=1 -C {target_dir}' + cmd = ' | '.join((getter, extract)) + runner(cmd.format(compression=infer_compression(url), **vars())) + with pushd(target_dir): + yield target_dir + finally: + runner('rm -Rf {target_dir}'.format(**vars())) + + +def infer_compression(url): + """ + Given a URL or filename, infer the compression code for tar. + """ + # cheat and just assume it's the last two characters + compression_indicator = url[-2:] + mapping = dict(gz='z', bz='j', xz='J') + # Assume 'z' (gzip) if no match + return mapping.get(compression_indicator, 'z') + + +@contextlib.contextmanager +def temp_dir(remover=shutil.rmtree): + """ + Create a temporary directory context. Pass a custom remover + to override the removal behavior. + """ + temp_dir = tempfile.mkdtemp() + try: + yield temp_dir + finally: + remover(temp_dir) + + +@contextlib.contextmanager +def repo_context(url, branch=None, quiet=True, dest_ctx=temp_dir): + """ + Check out the repo indicated by url. + + If dest_ctx is supplied, it should be a context manager + to yield the target directory for the check out. + """ + exe = 'git' if 'git' in url else 'hg' + with dest_ctx() as repo_dir: + cmd = [exe, 'clone', url, repo_dir] + if branch: + cmd.extend(['--branch', branch]) + devnull = open(os.path.devnull, 'w') + stdout = devnull if quiet else None + subprocess.check_call(cmd, stdout=stdout) + yield repo_dir + + +@contextlib.contextmanager +def null(): + yield + + +class ExceptionTrap: + """ + A context manager that will catch certain exceptions and provide an + indication they occurred. + + >>> with ExceptionTrap() as trap: + ... raise Exception() + >>> bool(trap) + True + + >>> with ExceptionTrap() as trap: + ... pass + >>> bool(trap) + False + + >>> with ExceptionTrap(ValueError) as trap: + ... raise ValueError("1 + 1 is not 3") + >>> bool(trap) + True + + >>> with ExceptionTrap(ValueError) as trap: + ... raise Exception() + Traceback (most recent call last): + ... + Exception + + >>> bool(trap) + False + """ + + exc_info = None, None, None + + def __init__(self, exceptions=(Exception,)): + self.exceptions = exceptions + + def __enter__(self): + return self + + @property + def type(self): + return self.exc_info[0] + + @property + def value(self): + return self.exc_info[1] + + @property + def tb(self): + return self.exc_info[2] + + def __exit__(self, *exc_info): + type = exc_info[0] + matches = type and issubclass(type, self.exceptions) + if matches: + self.exc_info = exc_info + return matches + + def __bool__(self): + return bool(self.type) + + def raises(self, func, *, _test=bool): + """ + Wrap func and replace the result with the truth + value of the trap (True if an exception occurred). + + First, give the decorator an alias to support Python 3.8 + Syntax. + + >>> raises = ExceptionTrap(ValueError).raises + + Now decorate a function that always fails. + + >>> @raises + ... def fail(): + ... raise ValueError('failed') + >>> fail() + True + """ + + @functools.wraps(func) + def wrapper(*args, **kwargs): + with ExceptionTrap(self.exceptions) as trap: + func(*args, **kwargs) + return _test(trap) + + return wrapper + + def passes(self, func): + """ + Wrap func and replace the result with the truth + value of the trap (True if no exception). + + First, give the decorator an alias to support Python 3.8 + Syntax. + + >>> passes = ExceptionTrap(ValueError).passes + + Now decorate a function that always fails. + + >>> @passes + ... def fail(): + ... raise ValueError('failed') + + >>> fail() + False + """ + return self.raises(func, _test=operator.not_) + + +class suppress(contextlib.suppress, contextlib.ContextDecorator): + """ + A version of contextlib.suppress with decorator support. + + >>> @suppress(KeyError) + ... def key_error(): + ... {}[''] + >>> key_error() + """ From c54c811eec967e127b3e5eed980ca6e304541629 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Tue, 12 Apr 2022 18:39:30 -0700 Subject: [PATCH 067/743] Update jaraco.text==3.7.0 [skip ci] --- lib/jaraco/text/__init__.py | 105 ++++++++++++++++++++++++++++++------ 1 file changed, 88 insertions(+), 17 deletions(-) diff --git a/lib/jaraco/text/__init__.py b/lib/jaraco/text/__init__.py index 95e20b40..5f75519a 100644 --- a/lib/jaraco/text/__init__.py +++ b/lib/jaraco/text/__init__.py @@ -4,11 +4,12 @@ import textwrap import functools try: - from importlib import resources # type: ignore + from importlib.resources import files # type: ignore except ImportError: # pragma: nocover - import importlib_resources as resources # type: ignore + from importlib_resources import files # type: ignore from jaraco.functools import compose, method_cache +from jaraco.context import ExceptionTrap def substitution(old, new): @@ -109,7 +110,7 @@ class FoldedCase(str): return hash(self.lower()) def __contains__(self, other): - return super(FoldedCase, self).lower().__contains__(other.lower()) + return super().lower().__contains__(other.lower()) def in_(self, other): "Does self appear in other?" @@ -118,7 +119,7 @@ class FoldedCase(str): # cache lower since it's likely to be called frequently. @method_cache def lower(self): - return super(FoldedCase, self).lower() + return super().lower() def index(self, sub): return self.lower().index(sub.lower()) @@ -128,6 +129,11 @@ class FoldedCase(str): return pattern.split(self, maxsplit) +# Python 3.8 compatibility +_unicode_trap = ExceptionTrap(UnicodeDecodeError) + + +@_unicode_trap.passes def is_decodable(value): r""" Return True if the supplied value is decodable (using the default @@ -138,14 +144,7 @@ def is_decodable(value): >>> is_decodable(b'\x32') True """ - # TODO: This code could be expressed more consisely and directly - # with a jaraco.context.ExceptionTrap, but that adds an unfortunate - # long dependency tree, so for now, use boolean literals. - try: - value.decode() - except UnicodeDecodeError: - return False - return True + value.decode() def is_binary(value): @@ -225,7 +224,7 @@ def unwrap(s): return '\n'.join(cleaned) -lorem_ipsum = resources.read_text(__name__, 'Lorem ipsum.txt') # type: ignore +lorem_ipsum: str = files(__name__).joinpath('Lorem ipsum.txt').read_text() class Splitter(object): @@ -370,10 +369,6 @@ class WordSet(tuple): result = WordSet(result) return result - # for compatibility with Python 2 - def __getslice__(self, i, j): # pragma: nocover - return self.__getitem__(slice(i, j)) - @classmethod def parse(cls, identifier): matches = cls._pattern.finditer(identifier) @@ -527,3 +522,79 @@ def normalize_newlines(text): newlines = ['\r\n', '\r', '\n', '\u0085', '\u2028', '\u2029'] pattern = '|'.join(newlines) return re.sub(pattern, '\n', text) + + +def _nonblank(str): + return str and not str.startswith('#') + + +@functools.singledispatch +def yield_lines(iterable): + r""" + Yield valid lines of a string or iterable. + + >>> list(yield_lines('')) + [] + >>> list(yield_lines(['foo', 'bar'])) + ['foo', 'bar'] + >>> list(yield_lines('foo\nbar')) + ['foo', 'bar'] + >>> list(yield_lines('\nfoo\n#bar\nbaz #comment')) + ['foo', 'baz #comment'] + >>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n'])) + ['foo', 'bar', 'baz', 'bing'] + """ + return itertools.chain.from_iterable(map(yield_lines, iterable)) + + +@yield_lines.register(str) +def _(text): + return filter(_nonblank, map(str.strip, text.splitlines())) + + +def drop_comment(line): + """ + Drop comments. + + >>> drop_comment('foo # bar') + 'foo' + + A hash without a space may be in a URL. + + >>> drop_comment('http://example.com/foo#bar') + 'http://example.com/foo#bar' + """ + return line.partition(' #')[0] + + +def join_continuation(lines): + r""" + Join lines continued by a trailing backslash. + + >>> list(join_continuation(['foo \\', 'bar', 'baz'])) + ['foobar', 'baz'] + >>> list(join_continuation(['foo \\', 'bar', 'baz'])) + ['foobar', 'baz'] + >>> list(join_continuation(['foo \\', 'bar \\', 'baz'])) + ['foobarbaz'] + + Not sure why, but... + The character preceeding the backslash is also elided. + + >>> list(join_continuation(['goo\\', 'dly'])) + ['godly'] + + A terrible idea, but... + If no line is available to continue, suppress the lines. + + >>> list(join_continuation(['foo', 'bar\\', 'baz\\'])) + ['foo'] + """ + lines = iter(lines) + for item in lines: + while item.endswith('\\'): + try: + item = item[:-2].strip() + next(lines) + except StopIteration: + return + yield item From fa52d99691e9c85a00f0d508b36deb92534449b6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Apr 2022 18:42:48 -0700 Subject: [PATCH 068/743] Bump importlib-resources from 5.4.0 to 5.6.0 (#1699) * Bump importlib-resources from 5.4.0 to 5.6.0 Bumps [importlib-resources](https://github.com/python/importlib_resources) from 5.4.0 to 5.6.0. - [Release notes](https://github.com/python/importlib_resources/releases) - [Changelog](https://github.com/python/importlib_resources/blob/main/CHANGES.rst) - [Commits](https://github.com/python/importlib_resources/compare/v5.4.0...v5.6.0) --- updated-dependencies: - dependency-name: importlib-resources dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Update importlib-resources==5.6.0 Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci] --- lib/importlib_resources/_compat.py | 10 +++++++ lib/importlib_resources/abc.py | 28 +++++++++++--------- lib/importlib_resources/tests/update-zips.py | 2 +- package/requirements-package.txt | 2 +- requirements.txt | 2 +- 5 files changed, 29 insertions(+), 15 deletions(-) diff --git a/lib/importlib_resources/_compat.py b/lib/importlib_resources/_compat.py index 61e48d47..8d7ade08 100644 --- a/lib/importlib_resources/_compat.py +++ b/lib/importlib_resources/_compat.py @@ -1,9 +1,12 @@ # flake8: noqa import abc +import os import sys import pathlib from contextlib import suppress +from typing import Union + if sys.version_info >= (3, 10): from zipfile import Path as ZipPath # type: ignore @@ -96,3 +99,10 @@ def wrap_spec(package): from . import _adapters return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader) + + +if sys.version_info >= (3, 9): + StrPath = Union[str, os.PathLike[str]] +else: + # PathLike is only subscriptable at runtime in 3.9+ + StrPath = Union[str, "os.PathLike[str]"] diff --git a/lib/importlib_resources/abc.py b/lib/importlib_resources/abc.py index d39dc1ad..a2b0af62 100644 --- a/lib/importlib_resources/abc.py +++ b/lib/importlib_resources/abc.py @@ -1,7 +1,11 @@ import abc -from typing import BinaryIO, Iterable, Text +import io +from typing import Any, BinaryIO, Iterable, Iterator, NoReturn, Text, Optional -from ._compat import runtime_checkable, Protocol +from ._compat import runtime_checkable, Protocol, StrPath + + +__all__ = ["ResourceReader", "Traversable", "TraversableResources"] class ResourceReader(metaclass=abc.ABCMeta): @@ -54,19 +58,19 @@ class Traversable(Protocol): """ @abc.abstractmethod - def iterdir(self): + def iterdir(self) -> Iterator["Traversable"]: """ Yield Traversable objects in self """ - def read_bytes(self): + def read_bytes(self) -> bytes: """ Read contents of self as bytes """ with self.open('rb') as strm: return strm.read() - def read_text(self, encoding=None): + def read_text(self, encoding: Optional[str] = None) -> str: """ Read contents of self as text """ @@ -86,12 +90,12 @@ class Traversable(Protocol): """ @abc.abstractmethod - def joinpath(self, child): + def joinpath(self, child: StrPath) -> "Traversable": """ Return Traversable child in self """ - def __truediv__(self, child): + def __truediv__(self, child: StrPath) -> "Traversable": """ Return Traversable child in self """ @@ -121,17 +125,17 @@ class TraversableResources(ResourceReader): """ @abc.abstractmethod - def files(self): + def files(self) -> "Traversable": """Return a Traversable object for the loaded package.""" - def open_resource(self, resource): + def open_resource(self, resource: StrPath) -> io.BufferedReader: return self.files().joinpath(resource).open('rb') - def resource_path(self, resource): + def resource_path(self, resource: Any) -> NoReturn: raise FileNotFoundError(resource) - def is_resource(self, path): + def is_resource(self, path: StrPath) -> bool: return self.files().joinpath(path).is_file() - def contents(self): + def contents(self) -> Iterator[str]: return (item.name for item in self.files().iterdir()) diff --git a/lib/importlib_resources/tests/update-zips.py b/lib/importlib_resources/tests/update-zips.py index 9ef0224c..231334aa 100644 --- a/lib/importlib_resources/tests/update-zips.py +++ b/lib/importlib_resources/tests/update-zips.py @@ -42,7 +42,7 @@ def generate(suffix): def walk(datapath): for dirpath, dirnames, filenames in os.walk(datapath): - with contextlib.suppress(KeyError): + with contextlib.suppress(ValueError): dirnames.remove('__pycache__') for filename in filenames: res = pathlib.Path(dirpath) / filename diff --git a/package/requirements-package.txt b/package/requirements-package.txt index 0a291e78..c7d7245e 100644 --- a/package/requirements-package.txt +++ b/package/requirements-package.txt @@ -1,5 +1,5 @@ apscheduler==3.8.0 -importlib-resources==5.4.0 +importlib-resources==5.6.0 pyinstaller==4.9 pyopenssl==22.0.0 pycryptodomex==3.14.1 diff --git a/requirements.txt b/requirements.txt index 9c3cc11a..c4fe538f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -18,7 +18,7 @@ gntp==1.0.3 html5lib==1.1 httpagentparser==1.9.2 idna==3.3 -importlib-resources==5.4.0 +importlib-resources==5.6.0 git+https://github.com/Tautulli/ipwhois.git@master#egg=ipwhois IPy==1.01 Mako==1.1.6 From dd59a79005fe0e5e2e6592074503af319374fa62 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Apr 2022 18:42:59 -0700 Subject: [PATCH 069/743] Bump zipp from 3.7.0 to 3.8.0 (#1706) * Bump zipp from 3.7.0 to 3.8.0 Bumps [zipp](https://github.com/jaraco/zipp) from 3.7.0 to 3.8.0. - [Release notes](https://github.com/jaraco/zipp/releases) - [Changelog](https://github.com/jaraco/zipp/blob/main/CHANGES.rst) - [Commits](https://github.com/jaraco/zipp/compare/v3.7.0...v3.8.0) --- updated-dependencies: - dependency-name: zipp dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Update zipp==3.8.0 Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci] --- lib/zipp.py | 23 +++-------------------- requirements.txt | 2 +- 2 files changed, 4 insertions(+), 21 deletions(-) diff --git a/lib/zipp.py b/lib/zipp.py index 26b723c1..52c82a0e 100644 --- a/lib/zipp.py +++ b/lib/zipp.py @@ -3,14 +3,8 @@ import posixpath import zipfile import itertools import contextlib -import sys import pathlib -if sys.version_info < (3, 7): - from collections import OrderedDict -else: - OrderedDict = dict - __all__ = ['Path'] @@ -56,7 +50,7 @@ def _ancestry(path): path, tail = posixpath.split(path) -_dedupe = OrderedDict.fromkeys +_dedupe = dict.fromkeys """Deduplicate an iterable in original order""" @@ -107,7 +101,7 @@ class CompleteDirs(zipfile.ZipFile): return source if not isinstance(source, zipfile.ZipFile): - return cls(_pathlib_compat(source)) + return cls(source) # Only allow for FastLookup when supplied zipfile is read-only if 'r' not in source.mode: @@ -136,17 +130,6 @@ class FastLookup(CompleteDirs): return self.__lookup -def _pathlib_compat(path): - """ - For path-like objects, convert to a filename for compatibility - on Python 3.6.1 and earlier. - """ - try: - return path.__fspath__() - except AttributeError: - return str(path) - - class Path: """ A pathlib-compatible interface for zip files. @@ -314,7 +297,7 @@ class Path: return self.__repr.format(self=self) def joinpath(self, *other): - next = posixpath.join(self.at, *map(_pathlib_compat, other)) + next = posixpath.join(self.at, *other) return self._next(self.root.resolve_dir(next)) __truediv__ = joinpath diff --git a/requirements.txt b/requirements.txt index c4fe538f..0810ddc5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -48,7 +48,7 @@ urllib3==1.26.8 webencodings==0.5.1 websocket-client==1.2.3 xmltodict==0.12.0 -zipp==3.7.0 +zipp==3.8.0 # configobj==5.1.0 # sgmllib3k==1.0.0 From 1371cfa889dad3c5fbe2f16fcd852403d416d838 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 14 Apr 2022 20:54:07 -0700 Subject: [PATCH 070/743] Trigger on_stop notification after successful db write * Prevent race condition from querying the db using the notification trigger --- plexpy/activity_handler.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/plexpy/activity_handler.py b/plexpy/activity_handler.py index a45a2cd4..07d0f8e3 100644 --- a/plexpy/activity_handler.py +++ b/plexpy/activity_handler.py @@ -164,13 +164,13 @@ class ActivityHandler(object): # Retrieve the session data from our temp table db_session = ap.get_session_by_key(session_key=self.get_session_key()) - plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_stop'}) - # Write it to the history table monitor_proc = activity_processor.ActivityProcessor() row_id = monitor_proc.write_session_history(session=db_session) if row_id: + plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_stop'}) + schedule_callback('session_key-{}'.format(self.get_session_key()), remove_job=True) # Remove the session from our temp session table @@ -604,6 +604,8 @@ def force_stop_stream(session_key, title, user): row_id = ap.write_session_history(session=session) if row_id: + plexpy.NOTIFY_QUEUE.put({'stream_data': session.copy(), 'notify_action': 'on_stop'}) + # If session is written to the database successfully, remove the session from the session table logger.info("Tautulli ActivityHandler :: Removing stale stream with sessionKey %s ratingKey %s from session queue" % (session['session_key'], session['rating_key'])) From df54df5e65ac6c2f133bd3ee15bc575bb46d64d6 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Tue, 26 Apr 2022 11:02:07 -0700 Subject: [PATCH 071/743] Set PlexAPI log level to debug --- plexpy/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/plexpy/__init__.py b/plexpy/__init__.py index a7313b49..de453c0a 100644 --- a/plexpy/__init__.py +++ b/plexpy/__init__.py @@ -1,4 +1,4 @@ -# This file is part of Tautulli. +# This file is part of Tautulli. # # Tautulli is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -217,6 +217,7 @@ def initialize(config_file): if not PYTHON2: os.environ['PLEXAPI_CONFIG_PATH'] = os.path.join(DATA_DIR, 'plexapi.config.ini') os.environ['PLEXAPI_LOG_PATH'] = os.path.join(CONFIG.LOG_DIR, 'plexapi.log') + os.environ['PLEXAPI_LOG_LEVEL'] = 'DEBUG' plex.initialize_plexapi() if DOCKER: From 1f7ddd4d0193c3097b4422880f0a985722fa3505 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 12 May 2022 13:08:29 -0700 Subject: [PATCH 072/743] Disable PlexAPI auto-reloading when fetching collections/playlists --- plexpy/libraries.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/plexpy/libraries.py b/plexpy/libraries.py index 0b0f16bb..8a51eec1 100644 --- a/plexpy/libraries.py +++ b/plexpy/libraries.py @@ -155,6 +155,8 @@ def get_collections(section_id=None): collections_list = [] for collection in collections: + collection._autoReload = False + collection_mode = collection.collectionMode if collection_mode is None: collection_mode = -1 @@ -254,6 +256,8 @@ def get_playlists(section_id=None, user_id=None): playlists_list = [] for playlist in playlists: + playlist._autoReload = False + playlist_dict = { 'addedAt': helpers.datetime_to_iso(playlist.addedAt), 'composite': playlist.composite, From f08687b960b2e0f302655c50268cc2f54b6d207d Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 12 May 2022 13:10:57 -0700 Subject: [PATCH 073/743] Fix Dolby Vision detection * Plex renamed "Dolby Vision" to "DoVi" in the display title. --- plexpy/pmsconnect.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plexpy/pmsconnect.py b/plexpy/pmsconnect.py index a993f6af..de1f88ef 100644 --- a/plexpy/pmsconnect.py +++ b/plexpy/pmsconnect.py @@ -3187,7 +3187,7 @@ class PmsConnect(object): # HDR details got introduced with PMS version 1.25.6.5545 if helpers.version_to_tuple(plexpy.CONFIG.PMS_VERSION) >= helpers.version_to_tuple('1.25.6.5545'): - if 'Dolby Vision' in extended_display_title: + if 'Dolby Vision' in extended_display_title or 'DoVi' in extended_display_title: video_dynamic_range.append('Dolby Vision') if 'HLG' in extended_display_title: video_dynamic_range.append('HLG') From 942e09e59eea782525057c65428d34701093c67e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 May 2022 20:32:27 -0700 Subject: [PATCH 074/743] Bump distro from 1.6.0 to 1.7.0 (#1658) * Bump distro from 1.6.0 to 1.7.0 Bumps [distro](https://github.com/python-distro/distro) from 1.6.0 to 1.7.0. - [Release notes](https://github.com/python-distro/distro/releases) - [Changelog](https://github.com/python-distro/distro/blob/master/CHANGELOG.md) - [Commits](https://github.com/python-distro/distro/compare/v1.6.0...v1.7.0) --- updated-dependencies: - dependency-name: distro dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Update distro==1.7.0 Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci] --- lib/distro/__init__.py | 54 ++ lib/distro/__main__.py | 4 + lib/distro/distro.py | 1374 ++++++++++++++++++++++++++++++++++++++++ lib/distro/py.typed | 0 requirements.txt | 2 +- 5 files changed, 1433 insertions(+), 1 deletion(-) create mode 100644 lib/distro/__init__.py create mode 100644 lib/distro/__main__.py create mode 100644 lib/distro/distro.py create mode 100644 lib/distro/py.typed diff --git a/lib/distro/__init__.py b/lib/distro/__init__.py new file mode 100644 index 00000000..7686fe85 --- /dev/null +++ b/lib/distro/__init__.py @@ -0,0 +1,54 @@ +from .distro import ( + NORMALIZED_DISTRO_ID, + NORMALIZED_LSB_ID, + NORMALIZED_OS_ID, + LinuxDistribution, + __version__, + build_number, + codename, + distro_release_attr, + distro_release_info, + id, + info, + like, + linux_distribution, + lsb_release_attr, + lsb_release_info, + major_version, + minor_version, + name, + os_release_attr, + os_release_info, + uname_attr, + uname_info, + version, + version_parts, +) + +__all__ = [ + "NORMALIZED_DISTRO_ID", + "NORMALIZED_LSB_ID", + "NORMALIZED_OS_ID", + "LinuxDistribution", + "build_number", + "codename", + "distro_release_attr", + "distro_release_info", + "id", + "info", + "like", + "linux_distribution", + "lsb_release_attr", + "lsb_release_info", + "major_version", + "minor_version", + "name", + "os_release_attr", + "os_release_info", + "uname_attr", + "uname_info", + "version", + "version_parts", +] + +__version__ = __version__ diff --git a/lib/distro/__main__.py b/lib/distro/__main__.py new file mode 100644 index 00000000..0c01d5b0 --- /dev/null +++ b/lib/distro/__main__.py @@ -0,0 +1,4 @@ +from .distro import main + +if __name__ == "__main__": + main() diff --git a/lib/distro/distro.py b/lib/distro/distro.py new file mode 100644 index 00000000..49066ae8 --- /dev/null +++ b/lib/distro/distro.py @@ -0,0 +1,1374 @@ +#!/usr/bin/env python +# Copyright 2015,2016,2017 Nir Cohen +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +The ``distro`` package (``distro`` stands for Linux Distribution) provides +information about the Linux distribution it runs on, such as a reliable +machine-readable distro ID, or version information. + +It is the recommended replacement for Python's original +:py:func:`platform.linux_distribution` function, but it provides much more +functionality. An alternative implementation became necessary because Python +3.5 deprecated this function, and Python 3.8 removed it altogether. Its +predecessor function :py:func:`platform.dist` was already deprecated since +Python 2.6 and removed in Python 3.8. Still, there are many cases in which +access to OS distribution information is needed. See `Python issue 1322 +`_ for more information. +""" + +import argparse +import json +import logging +import os +import re +import shlex +import subprocess +import sys +import warnings +from typing import ( + Any, + Callable, + Dict, + Iterable, + Optional, + Sequence, + TextIO, + Tuple, + Type, +) + +try: + from typing import TypedDict +except ImportError: + # Python 3.7 + TypedDict = dict + +__version__ = "1.7.0" + + +class VersionDict(TypedDict): + major: str + minor: str + build_number: str + + +class InfoDict(TypedDict): + id: str + version: str + version_parts: VersionDict + like: str + codename: str + + +_UNIXCONFDIR = os.environ.get("UNIXCONFDIR", "/etc") +_UNIXUSRLIBDIR = os.environ.get("UNIXUSRLIBDIR", "/usr/lib") +_OS_RELEASE_BASENAME = "os-release" + +#: Translation table for normalizing the "ID" attribute defined in os-release +#: files, for use by the :func:`distro.id` method. +#: +#: * Key: Value as defined in the os-release file, translated to lower case, +#: with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_OS_ID = { + "ol": "oracle", # Oracle Linux + "opensuse-leap": "opensuse", # Newer versions of OpenSuSE report as opensuse-leap +} + +#: Translation table for normalizing the "Distributor ID" attribute returned by +#: the lsb_release command, for use by the :func:`distro.id` method. +#: +#: * Key: Value as returned by the lsb_release command, translated to lower +#: case, with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_LSB_ID = { + "enterpriseenterpriseas": "oracle", # Oracle Enterprise Linux 4 + "enterpriseenterpriseserver": "oracle", # Oracle Linux 5 + "redhatenterpriseworkstation": "rhel", # RHEL 6, 7 Workstation + "redhatenterpriseserver": "rhel", # RHEL 6, 7 Server + "redhatenterprisecomputenode": "rhel", # RHEL 6 ComputeNode +} + +#: Translation table for normalizing the distro ID derived from the file name +#: of distro release files, for use by the :func:`distro.id` method. +#: +#: * Key: Value as derived from the file name of a distro release file, +#: translated to lower case, with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_DISTRO_ID = { + "redhat": "rhel", # RHEL 6.x, 7.x +} + +# Pattern for content of distro release file (reversed) +_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile( + r"(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)" +) + +# Pattern for base file name of distro release file +_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$") + +# Base file names to be ignored when searching for distro release file +_DISTRO_RELEASE_IGNORE_BASENAMES = ( + "debian_version", + "lsb-release", + "oem-release", + _OS_RELEASE_BASENAME, + "system-release", + "plesk-release", + "iredmail-release", +) + + +def linux_distribution(full_distribution_name: bool = True) -> Tuple[str, str, str]: + """ + .. deprecated:: 1.6.0 + + :func:`distro.linux_distribution()` is deprecated. It should only be + used as a compatibility shim with Python's + :py:func:`platform.linux_distribution()`. Please use :func:`distro.id`, + :func:`distro.version` and :func:`distro.name` instead. + + Return information about the current OS distribution as a tuple + ``(id_name, version, codename)`` with items as follows: + + * ``id_name``: If *full_distribution_name* is false, the result of + :func:`distro.id`. Otherwise, the result of :func:`distro.name`. + + * ``version``: The result of :func:`distro.version`. + + * ``codename``: The extra item (usually in parentheses) after the + os-release version number, or the result of :func:`distro.codename`. + + The interface of this function is compatible with the original + :py:func:`platform.linux_distribution` function, supporting a subset of + its parameters. + + The data it returns may not exactly be the same, because it uses more data + sources than the original function, and that may lead to different data if + the OS distribution is not consistent across multiple data sources it + provides (there are indeed such distributions ...). + + Another reason for differences is the fact that the :func:`distro.id` + method normalizes the distro ID string to a reliable machine-readable value + for a number of popular OS distributions. + """ + warnings.warn( + "distro.linux_distribution() is deprecated. It should only be used as a " + "compatibility shim with Python's platform.linux_distribution(). Please use " + "distro.id(), distro.version() and distro.name() instead.", + DeprecationWarning, + stacklevel=2, + ) + return _distro.linux_distribution(full_distribution_name) + + +def id() -> str: + """ + Return the distro ID of the current distribution, as a + machine-readable string. + + For a number of OS distributions, the returned distro ID value is + *reliable*, in the sense that it is documented and that it does not change + across releases of the distribution. + + This package maintains the following reliable distro ID values: + + ============== ========================================= + Distro ID Distribution + ============== ========================================= + "ubuntu" Ubuntu + "debian" Debian + "rhel" RedHat Enterprise Linux + "centos" CentOS + "fedora" Fedora + "sles" SUSE Linux Enterprise Server + "opensuse" openSUSE + "amzn" Amazon Linux + "arch" Arch Linux + "cloudlinux" CloudLinux OS + "exherbo" Exherbo Linux + "gentoo" GenToo Linux + "ibm_powerkvm" IBM PowerKVM + "kvmibm" KVM for IBM z Systems + "linuxmint" Linux Mint + "mageia" Mageia + "mandriva" Mandriva Linux + "parallels" Parallels + "pidora" Pidora + "raspbian" Raspbian + "oracle" Oracle Linux (and Oracle Enterprise Linux) + "scientific" Scientific Linux + "slackware" Slackware + "xenserver" XenServer + "openbsd" OpenBSD + "netbsd" NetBSD + "freebsd" FreeBSD + "midnightbsd" MidnightBSD + "rocky" Rocky Linux + "aix" AIX + ============== ========================================= + + If you have a need to get distros for reliable IDs added into this set, + or if you find that the :func:`distro.id` function returns a different + distro ID for one of the listed distros, please create an issue in the + `distro issue tracker`_. + + **Lookup hierarchy and transformations:** + + First, the ID is obtained from the following sources, in the specified + order. The first available and non-empty value is used: + + * the value of the "ID" attribute of the os-release file, + + * the value of the "Distributor ID" attribute returned by the lsb_release + command, + + * the first part of the file name of the distro release file, + + The so determined ID value then passes the following transformations, + before it is returned by this method: + + * it is translated to lower case, + + * blanks (which should not be there anyway) are translated to underscores, + + * a normalization of the ID is performed, based upon + `normalization tables`_. The purpose of this normalization is to ensure + that the ID is as reliable as possible, even across incompatible changes + in the OS distributions. A common reason for an incompatible change is + the addition of an os-release file, or the addition of the lsb_release + command, with ID values that differ from what was previously determined + from the distro release file name. + """ + return _distro.id() + + +def name(pretty: bool = False) -> str: + """ + Return the name of the current OS distribution, as a human-readable + string. + + If *pretty* is false, the name is returned without version or codename. + (e.g. "CentOS Linux") + + If *pretty* is true, the version and codename are appended. + (e.g. "CentOS Linux 7.1.1503 (Core)") + + **Lookup hierarchy:** + + The name is obtained from the following sources, in the specified order. + The first available and non-empty value is used: + + * If *pretty* is false: + + - the value of the "NAME" attribute of the os-release file, + + - the value of the "Distributor ID" attribute returned by the lsb_release + command, + + - the value of the "" field of the distro release file. + + * If *pretty* is true: + + - the value of the "PRETTY_NAME" attribute of the os-release file, + + - the value of the "Description" attribute returned by the lsb_release + command, + + - the value of the "" field of the distro release file, appended + with the value of the pretty version ("" and "" + fields) of the distro release file, if available. + """ + return _distro.name(pretty) + + +def version(pretty: bool = False, best: bool = False) -> str: + """ + Return the version of the current OS distribution, as a human-readable + string. + + If *pretty* is false, the version is returned without codename (e.g. + "7.0"). + + If *pretty* is true, the codename in parenthesis is appended, if the + codename is non-empty (e.g. "7.0 (Maipo)"). + + Some distributions provide version numbers with different precisions in + the different sources of distribution information. Examining the different + sources in a fixed priority order does not always yield the most precise + version (e.g. for Debian 8.2, or CentOS 7.1). + + Some other distributions may not provide this kind of information. In these + cases, an empty string would be returned. This behavior can be observed + with rolling releases distributions (e.g. Arch Linux). + + The *best* parameter can be used to control the approach for the returned + version: + + If *best* is false, the first non-empty version number in priority order of + the examined sources is returned. + + If *best* is true, the most precise version number out of all examined + sources is returned. + + **Lookup hierarchy:** + + In all cases, the version number is obtained from the following sources. + If *best* is false, this order represents the priority order: + + * the value of the "VERSION_ID" attribute of the os-release file, + * the value of the "Release" attribute returned by the lsb_release + command, + * the version number parsed from the "" field of the first line + of the distro release file, + * the version number parsed from the "PRETTY_NAME" attribute of the + os-release file, if it follows the format of the distro release files. + * the version number parsed from the "Description" attribute returned by + the lsb_release command, if it follows the format of the distro release + files. + """ + return _distro.version(pretty, best) + + +def version_parts(best: bool = False) -> Tuple[str, str, str]: + """ + Return the version of the current OS distribution as a tuple + ``(major, minor, build_number)`` with items as follows: + + * ``major``: The result of :func:`distro.major_version`. + + * ``minor``: The result of :func:`distro.minor_version`. + + * ``build_number``: The result of :func:`distro.build_number`. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.version_parts(best) + + +def major_version(best: bool = False) -> str: + """ + Return the major version of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The major version is the first + part of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.major_version(best) + + +def minor_version(best: bool = False) -> str: + """ + Return the minor version of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The minor version is the second + part of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.minor_version(best) + + +def build_number(best: bool = False) -> str: + """ + Return the build number of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The build number is the third part + of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.build_number(best) + + +def like() -> str: + """ + Return a space-separated list of distro IDs of distributions that are + closely related to the current OS distribution in regards to packaging + and programming interfaces, for example distributions the current + distribution is a derivative from. + + **Lookup hierarchy:** + + This information item is only provided by the os-release file. + For details, see the description of the "ID_LIKE" attribute in the + `os-release man page + `_. + """ + return _distro.like() + + +def codename() -> str: + """ + Return the codename for the release of the current OS distribution, + as a string. + + If the distribution does not have a codename, an empty string is returned. + + Note that the returned codename is not always really a codename. For + example, openSUSE returns "x86_64". This function does not handle such + cases in any special way and just returns the string it finds, if any. + + **Lookup hierarchy:** + + * the codename within the "VERSION" attribute of the os-release file, if + provided, + + * the value of the "Codename" attribute returned by the lsb_release + command, + + * the value of the "" field of the distro release file. + """ + return _distro.codename() + + +def info(pretty: bool = False, best: bool = False) -> InfoDict: + """ + Return certain machine-readable information items about the current OS + distribution in a dictionary, as shown in the following example: + + .. sourcecode:: python + + { + 'id': 'rhel', + 'version': '7.0', + 'version_parts': { + 'major': '7', + 'minor': '0', + 'build_number': '' + }, + 'like': 'fedora', + 'codename': 'Maipo' + } + + The dictionary structure and keys are always the same, regardless of which + information items are available in the underlying data sources. The values + for the various keys are as follows: + + * ``id``: The result of :func:`distro.id`. + + * ``version``: The result of :func:`distro.version`. + + * ``version_parts -> major``: The result of :func:`distro.major_version`. + + * ``version_parts -> minor``: The result of :func:`distro.minor_version`. + + * ``version_parts -> build_number``: The result of + :func:`distro.build_number`. + + * ``like``: The result of :func:`distro.like`. + + * ``codename``: The result of :func:`distro.codename`. + + For a description of the *pretty* and *best* parameters, see the + :func:`distro.version` method. + """ + return _distro.info(pretty, best) + + +def os_release_info() -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information items + from the os-release file data source of the current OS distribution. + + See `os-release file`_ for details about these information items. + """ + return _distro.os_release_info() + + +def lsb_release_info() -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information items + from the lsb_release command data source of the current OS distribution. + + See `lsb_release command output`_ for details about these information + items. + """ + return _distro.lsb_release_info() + + +def distro_release_info() -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information items + from the distro release file data source of the current OS distribution. + + See `distro release file`_ for details about these information items. + """ + return _distro.distro_release_info() + + +def uname_info() -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information items + from the distro release file data source of the current OS distribution. + """ + return _distro.uname_info() + + +def os_release_attr(attribute: str) -> str: + """ + Return a single named information item from the os-release file data source + of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `os-release file`_ for details about these information items. + """ + return _distro.os_release_attr(attribute) + + +def lsb_release_attr(attribute: str) -> str: + """ + Return a single named information item from the lsb_release command output + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `lsb_release command output`_ for details about these information + items. + """ + return _distro.lsb_release_attr(attribute) + + +def distro_release_attr(attribute: str) -> str: + """ + Return a single named information item from the distro release file + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `distro release file`_ for details about these information items. + """ + return _distro.distro_release_attr(attribute) + + +def uname_attr(attribute: str) -> str: + """ + Return a single named information item from the distro release file + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + """ + return _distro.uname_attr(attribute) + + +try: + from functools import cached_property +except ImportError: + # Python < 3.8 + class cached_property: # type: ignore + """A version of @property which caches the value. On access, it calls the + underlying function and sets the value in `__dict__` so future accesses + will not re-call the property. + """ + + def __init__(self, f: Callable[[Any], Any]) -> None: + self._fname = f.__name__ + self._f = f + + def __get__(self, obj: Any, owner: Type[Any]) -> Any: + assert obj is not None, f"call {self._fname} on an instance" + ret = obj.__dict__[self._fname] = self._f(obj) + return ret + + +class LinuxDistribution: + """ + Provides information about a OS distribution. + + This package creates a private module-global instance of this class with + default initialization arguments, that is used by the + `consolidated accessor functions`_ and `single source accessor functions`_. + By using default initialization arguments, that module-global instance + returns data about the current OS distribution (i.e. the distro this + package runs on). + + Normally, it is not necessary to create additional instances of this class. + However, in situations where control is needed over the exact data sources + that are used, instances of this class can be created with a specific + distro release file, or a specific os-release file, or without invoking the + lsb_release command. + """ + + def __init__( + self, + include_lsb: Optional[bool] = None, + os_release_file: str = "", + distro_release_file: str = "", + include_uname: Optional[bool] = None, + root_dir: Optional[str] = None, + include_oslevel: Optional[bool] = None, + ) -> None: + """ + The initialization method of this class gathers information from the + available data sources, and stores that in private instance attributes. + Subsequent access to the information items uses these private instance + attributes, so that the data sources are read only once. + + Parameters: + + * ``include_lsb`` (bool): Controls whether the + `lsb_release command output`_ is included as a data source. + + If the lsb_release command is not available in the program execution + path, the data source for the lsb_release command will be empty. + + * ``os_release_file`` (string): The path name of the + `os-release file`_ that is to be used as a data source. + + An empty string (the default) will cause the default path name to + be used (see `os-release file`_ for details). + + If the specified or defaulted os-release file does not exist, the + data source for the os-release file will be empty. + + * ``distro_release_file`` (string): The path name of the + `distro release file`_ that is to be used as a data source. + + An empty string (the default) will cause a default search algorithm + to be used (see `distro release file`_ for details). + + If the specified distro release file does not exist, or if no default + distro release file can be found, the data source for the distro + release file will be empty. + + * ``include_uname`` (bool): Controls whether uname command output is + included as a data source. If the uname command is not available in + the program execution path the data source for the uname command will + be empty. + + * ``root_dir`` (string): The absolute path to the root directory to use + to find distro-related information files. Note that ``include_*`` + parameters must not be enabled in combination with ``root_dir``. + + * ``include_oslevel`` (bool): Controls whether (AIX) oslevel command + output is included as a data source. If the oslevel command is not + available in the program execution path the data source will be + empty. + + Public instance attributes: + + * ``os_release_file`` (string): The path name of the + `os-release file`_ that is actually used as a data source. The + empty string if no distro release file is used as a data source. + + * ``distro_release_file`` (string): The path name of the + `distro release file`_ that is actually used as a data source. The + empty string if no distro release file is used as a data source. + + * ``include_lsb`` (bool): The result of the ``include_lsb`` parameter. + This controls whether the lsb information will be loaded. + + * ``include_uname`` (bool): The result of the ``include_uname`` + parameter. This controls whether the uname information will + be loaded. + + * ``include_oslevel`` (bool): The result of the ``include_oslevel`` + parameter. This controls whether (AIX) oslevel information will be + loaded. + + * ``root_dir`` (string): The result of the ``root_dir`` parameter. + The absolute path to the root directory to use to find distro-related + information files. + + Raises: + + * :py:exc:`ValueError`: Initialization parameters combination is not + supported. + + * :py:exc:`OSError`: Some I/O issue with an os-release file or distro + release file. + + * :py:exc:`UnicodeError`: A data source has unexpected characters or + uses an unexpected encoding. + """ + self.root_dir = root_dir + self.etc_dir = os.path.join(root_dir, "etc") if root_dir else _UNIXCONFDIR + self.usr_lib_dir = ( + os.path.join(root_dir, "usr/lib") if root_dir else _UNIXUSRLIBDIR + ) + + if os_release_file: + self.os_release_file = os_release_file + else: + etc_dir_os_release_file = os.path.join(self.etc_dir, _OS_RELEASE_BASENAME) + usr_lib_os_release_file = os.path.join( + self.usr_lib_dir, _OS_RELEASE_BASENAME + ) + + # NOTE: The idea is to respect order **and** have it set + # at all times for API backwards compatibility. + if os.path.isfile(etc_dir_os_release_file) or not os.path.isfile( + usr_lib_os_release_file + ): + self.os_release_file = etc_dir_os_release_file + else: + self.os_release_file = usr_lib_os_release_file + + self.distro_release_file = distro_release_file or "" # updated later + + is_root_dir_defined = root_dir is not None + if is_root_dir_defined and (include_lsb or include_uname or include_oslevel): + raise ValueError( + "Including subprocess data sources from specific root_dir is disallowed" + " to prevent false information" + ) + self.include_lsb = ( + include_lsb if include_lsb is not None else not is_root_dir_defined + ) + self.include_uname = ( + include_uname if include_uname is not None else not is_root_dir_defined + ) + self.include_oslevel = ( + include_oslevel if include_oslevel is not None else not is_root_dir_defined + ) + + def __repr__(self) -> str: + """Return repr of all info""" + return ( + "LinuxDistribution(" + "os_release_file={self.os_release_file!r}, " + "distro_release_file={self.distro_release_file!r}, " + "include_lsb={self.include_lsb!r}, " + "include_uname={self.include_uname!r}, " + "include_oslevel={self.include_oslevel!r}, " + "root_dir={self.root_dir!r}, " + "_os_release_info={self._os_release_info!r}, " + "_lsb_release_info={self._lsb_release_info!r}, " + "_distro_release_info={self._distro_release_info!r}, " + "_uname_info={self._uname_info!r}, " + "_oslevel_info={self._oslevel_info!r})".format(self=self) + ) + + def linux_distribution( + self, full_distribution_name: bool = True + ) -> Tuple[str, str, str]: + """ + Return information about the OS distribution that is compatible + with Python's :func:`platform.linux_distribution`, supporting a subset + of its parameters. + + For details, see :func:`distro.linux_distribution`. + """ + return ( + self.name() if full_distribution_name else self.id(), + self.version(), + self._os_release_info.get("release_codename") or self.codename(), + ) + + def id(self) -> str: + """Return the distro ID of the OS distribution, as a string. + + For details, see :func:`distro.id`. + """ + + def normalize(distro_id: str, table: Dict[str, str]) -> str: + distro_id = distro_id.lower().replace(" ", "_") + return table.get(distro_id, distro_id) + + distro_id = self.os_release_attr("id") + if distro_id: + return normalize(distro_id, NORMALIZED_OS_ID) + + distro_id = self.lsb_release_attr("distributor_id") + if distro_id: + return normalize(distro_id, NORMALIZED_LSB_ID) + + distro_id = self.distro_release_attr("id") + if distro_id: + return normalize(distro_id, NORMALIZED_DISTRO_ID) + + distro_id = self.uname_attr("id") + if distro_id: + return normalize(distro_id, NORMALIZED_DISTRO_ID) + + return "" + + def name(self, pretty: bool = False) -> str: + """ + Return the name of the OS distribution, as a string. + + For details, see :func:`distro.name`. + """ + name = ( + self.os_release_attr("name") + or self.lsb_release_attr("distributor_id") + or self.distro_release_attr("name") + or self.uname_attr("name") + ) + if pretty: + name = self.os_release_attr("pretty_name") or self.lsb_release_attr( + "description" + ) + if not name: + name = self.distro_release_attr("name") or self.uname_attr("name") + version = self.version(pretty=True) + if version: + name = f"{name} {version}" + return name or "" + + def version(self, pretty: bool = False, best: bool = False) -> str: + """ + Return the version of the OS distribution, as a string. + + For details, see :func:`distro.version`. + """ + versions = [ + self.os_release_attr("version_id"), + self.lsb_release_attr("release"), + self.distro_release_attr("version_id"), + self._parse_distro_release_content(self.os_release_attr("pretty_name")).get( + "version_id", "" + ), + self._parse_distro_release_content( + self.lsb_release_attr("description") + ).get("version_id", ""), + self.uname_attr("release"), + ] + if self.uname_attr("id").startswith("aix"): + # On AIX platforms, prefer oslevel command output. + versions.insert(0, self.oslevel_info()) + version = "" + if best: + # This algorithm uses the last version in priority order that has + # the best precision. If the versions are not in conflict, that + # does not matter; otherwise, using the last one instead of the + # first one might be considered a surprise. + for v in versions: + if v.count(".") > version.count(".") or version == "": + version = v + else: + for v in versions: + if v != "": + version = v + break + if pretty and version and self.codename(): + version = f"{version} ({self.codename()})" + return version + + def version_parts(self, best: bool = False) -> Tuple[str, str, str]: + """ + Return the version of the OS distribution, as a tuple of version + numbers. + + For details, see :func:`distro.version_parts`. + """ + version_str = self.version(best=best) + if version_str: + version_regex = re.compile(r"(\d+)\.?(\d+)?\.?(\d+)?") + matches = version_regex.match(version_str) + if matches: + major, minor, build_number = matches.groups() + return major, minor or "", build_number or "" + return "", "", "" + + def major_version(self, best: bool = False) -> str: + """ + Return the major version number of the current distribution. + + For details, see :func:`distro.major_version`. + """ + return self.version_parts(best)[0] + + def minor_version(self, best: bool = False) -> str: + """ + Return the minor version number of the current distribution. + + For details, see :func:`distro.minor_version`. + """ + return self.version_parts(best)[1] + + def build_number(self, best: bool = False) -> str: + """ + Return the build number of the current distribution. + + For details, see :func:`distro.build_number`. + """ + return self.version_parts(best)[2] + + def like(self) -> str: + """ + Return the IDs of distributions that are like the OS distribution. + + For details, see :func:`distro.like`. + """ + return self.os_release_attr("id_like") or "" + + def codename(self) -> str: + """ + Return the codename of the OS distribution. + + For details, see :func:`distro.codename`. + """ + try: + # Handle os_release specially since distros might purposefully set + # this to empty string to have no codename + return self._os_release_info["codename"] + except KeyError: + return ( + self.lsb_release_attr("codename") + or self.distro_release_attr("codename") + or "" + ) + + def info(self, pretty: bool = False, best: bool = False) -> InfoDict: + """ + Return certain machine-readable information about the OS + distribution. + + For details, see :func:`distro.info`. + """ + return dict( + id=self.id(), + version=self.version(pretty, best), + version_parts=dict( + major=self.major_version(best), + minor=self.minor_version(best), + build_number=self.build_number(best), + ), + like=self.like(), + codename=self.codename(), + ) + + def os_release_info(self) -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information + items from the os-release file data source of the OS distribution. + + For details, see :func:`distro.os_release_info`. + """ + return self._os_release_info + + def lsb_release_info(self) -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information + items from the lsb_release command data source of the OS + distribution. + + For details, see :func:`distro.lsb_release_info`. + """ + return self._lsb_release_info + + def distro_release_info(self) -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information + items from the distro release file data source of the OS + distribution. + + For details, see :func:`distro.distro_release_info`. + """ + return self._distro_release_info + + def uname_info(self) -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information + items from the uname command data source of the OS distribution. + + For details, see :func:`distro.uname_info`. + """ + return self._uname_info + + def oslevel_info(self) -> str: + """ + Return AIX' oslevel command output. + """ + return self._oslevel_info + + def os_release_attr(self, attribute: str) -> str: + """ + Return a single named information item from the os-release file data + source of the OS distribution. + + For details, see :func:`distro.os_release_attr`. + """ + return self._os_release_info.get(attribute, "") + + def lsb_release_attr(self, attribute: str) -> str: + """ + Return a single named information item from the lsb_release command + output data source of the OS distribution. + + For details, see :func:`distro.lsb_release_attr`. + """ + return self._lsb_release_info.get(attribute, "") + + def distro_release_attr(self, attribute: str) -> str: + """ + Return a single named information item from the distro release file + data source of the OS distribution. + + For details, see :func:`distro.distro_release_attr`. + """ + return self._distro_release_info.get(attribute, "") + + def uname_attr(self, attribute: str) -> str: + """ + Return a single named information item from the uname command + output data source of the OS distribution. + + For details, see :func:`distro.uname_attr`. + """ + return self._uname_info.get(attribute, "") + + @cached_property + def _os_release_info(self) -> Dict[str, str]: + """ + Get the information items from the specified os-release file. + + Returns: + A dictionary containing all information items. + """ + if os.path.isfile(self.os_release_file): + with open(self.os_release_file, encoding="utf-8") as release_file: + return self._parse_os_release_content(release_file) + return {} + + @staticmethod + def _parse_os_release_content(lines: TextIO) -> Dict[str, str]: + """ + Parse the lines of an os-release file. + + Parameters: + + * lines: Iterable through the lines in the os-release file. + Each line must be a unicode string or a UTF-8 encoded byte + string. + + Returns: + A dictionary containing all information items. + """ + props = {} + lexer = shlex.shlex(lines, posix=True) + lexer.whitespace_split = True + + tokens = list(lexer) + for token in tokens: + # At this point, all shell-like parsing has been done (i.e. + # comments processed, quotes and backslash escape sequences + # processed, multi-line values assembled, trailing newlines + # stripped, etc.), so the tokens are now either: + # * variable assignments: var=value + # * commands or their arguments (not allowed in os-release) + # Ignore any tokens that are not variable assignments + if "=" in token: + k, v = token.split("=", 1) + props[k.lower()] = v + + if "version" in props: + # extract release codename (if any) from version attribute + match = re.search(r"\((\D+)\)|,\s*(\D+)", props["version"]) + if match: + release_codename = match.group(1) or match.group(2) + props["codename"] = props["release_codename"] = release_codename + + if "version_codename" in props: + # os-release added a version_codename field. Use that in + # preference to anything else Note that some distros purposefully + # do not have code names. They should be setting + # version_codename="" + props["codename"] = props["version_codename"] + elif "ubuntu_codename" in props: + # Same as above but a non-standard field name used on older Ubuntus + props["codename"] = props["ubuntu_codename"] + + return props + + @cached_property + def _lsb_release_info(self) -> Dict[str, str]: + """ + Get the information items from the lsb_release command output. + + Returns: + A dictionary containing all information items. + """ + if not self.include_lsb: + return {} + try: + cmd = ("lsb_release", "-a") + stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL) + # Command not found or lsb_release returned error + except (OSError, subprocess.CalledProcessError): + return {} + content = self._to_str(stdout).splitlines() + return self._parse_lsb_release_content(content) + + @staticmethod + def _parse_lsb_release_content(lines: Iterable[str]) -> Dict[str, str]: + """ + Parse the output of the lsb_release command. + + Parameters: + + * lines: Iterable through the lines of the lsb_release output. + Each line must be a unicode string or a UTF-8 encoded byte + string. + + Returns: + A dictionary containing all information items. + """ + props = {} + for line in lines: + kv = line.strip("\n").split(":", 1) + if len(kv) != 2: + # Ignore lines without colon. + continue + k, v = kv + props.update({k.replace(" ", "_").lower(): v.strip()}) + return props + + @cached_property + def _uname_info(self) -> Dict[str, str]: + if not self.include_uname: + return {} + try: + cmd = ("uname", "-rs") + stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL) + except OSError: + return {} + content = self._to_str(stdout).splitlines() + return self._parse_uname_content(content) + + @cached_property + def _oslevel_info(self) -> str: + if not self.include_oslevel: + return "" + try: + stdout = subprocess.check_output("oslevel", stderr=subprocess.DEVNULL) + except (OSError, subprocess.CalledProcessError): + return "" + return self._to_str(stdout).strip() + + @staticmethod + def _parse_uname_content(lines: Sequence[str]) -> Dict[str, str]: + if not lines: + return {} + props = {} + match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip()) + if match: + name, version = match.groups() + + # This is to prevent the Linux kernel version from + # appearing as the 'best' version on otherwise + # identifiable distributions. + if name == "Linux": + return {} + props["id"] = name.lower() + props["name"] = name + props["release"] = version + return props + + @staticmethod + def _to_str(bytestring: bytes) -> str: + encoding = sys.getfilesystemencoding() + return bytestring.decode(encoding) + + @cached_property + def _distro_release_info(self) -> Dict[str, str]: + """ + Get the information items from the specified distro release file. + + Returns: + A dictionary containing all information items. + """ + if self.distro_release_file: + # If it was specified, we use it and parse what we can, even if + # its file name or content does not match the expected pattern. + distro_info = self._parse_distro_release_file(self.distro_release_file) + basename = os.path.basename(self.distro_release_file) + # The file name pattern for user-specified distro release files + # is somewhat more tolerant (compared to when searching for the + # file), because we want to use what was specified as best as + # possible. + match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) + if "name" in distro_info and "cloudlinux" in distro_info["name"].lower(): + distro_info["id"] = "cloudlinux" + elif match: + distro_info["id"] = match.group(1) + return distro_info + else: + try: + basenames = os.listdir(self.etc_dir) + # We sort for repeatability in cases where there are multiple + # distro specific files; e.g. CentOS, Oracle, Enterprise all + # containing `redhat-release` on top of their own. + basenames.sort() + except OSError: + # This may occur when /etc is not readable but we can't be + # sure about the *-release files. Check common entries of + # /etc for information. If they turn out to not be there the + # error is handled in `_parse_distro_release_file()`. + basenames = [ + "SuSE-release", + "arch-release", + "base-release", + "centos-release", + "fedora-release", + "gentoo-release", + "mageia-release", + "mandrake-release", + "mandriva-release", + "mandrivalinux-release", + "manjaro-release", + "oracle-release", + "redhat-release", + "rocky-release", + "sl-release", + "slackware-version", + ] + for basename in basenames: + if basename in _DISTRO_RELEASE_IGNORE_BASENAMES: + continue + match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) + if match: + filepath = os.path.join(self.etc_dir, basename) + distro_info = self._parse_distro_release_file(filepath) + if "name" in distro_info: + # The name is always present if the pattern matches + self.distro_release_file = filepath + distro_info["id"] = match.group(1) + if "cloudlinux" in distro_info["name"].lower(): + distro_info["id"] = "cloudlinux" + return distro_info + return {} + + def _parse_distro_release_file(self, filepath: str) -> Dict[str, str]: + """ + Parse a distro release file. + + Parameters: + + * filepath: Path name of the distro release file. + + Returns: + A dictionary containing all information items. + """ + try: + with open(filepath, encoding="utf-8") as fp: + # Only parse the first line. For instance, on SLES there + # are multiple lines. We don't want them... + return self._parse_distro_release_content(fp.readline()) + except OSError: + # Ignore not being able to read a specific, seemingly version + # related file. + # See https://github.com/python-distro/distro/issues/162 + return {} + + @staticmethod + def _parse_distro_release_content(line: str) -> Dict[str, str]: + """ + Parse a line from a distro release file. + + Parameters: + * line: Line from the distro release file. Must be a unicode string + or a UTF-8 encoded byte string. + + Returns: + A dictionary containing all information items. + """ + matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(line.strip()[::-1]) + distro_info = {} + if matches: + # regexp ensures non-None + distro_info["name"] = matches.group(3)[::-1] + if matches.group(2): + distro_info["version_id"] = matches.group(2)[::-1] + if matches.group(1): + distro_info["codename"] = matches.group(1)[::-1] + elif line: + distro_info["name"] = line.strip() + return distro_info + + +_distro = LinuxDistribution() + + +def main() -> None: + logger = logging.getLogger(__name__) + logger.setLevel(logging.DEBUG) + logger.addHandler(logging.StreamHandler(sys.stdout)) + + parser = argparse.ArgumentParser(description="OS distro info tool") + parser.add_argument( + "--json", "-j", help="Output in machine readable format", action="store_true" + ) + + parser.add_argument( + "--root-dir", + "-r", + type=str, + dest="root_dir", + help="Path to the root filesystem directory (defaults to /)", + ) + + args = parser.parse_args() + + if args.root_dir: + dist = LinuxDistribution( + include_lsb=False, + include_uname=False, + include_oslevel=False, + root_dir=args.root_dir, + ) + else: + dist = _distro + + if args.json: + logger.info(json.dumps(dist.info(), indent=4, sort_keys=True)) + else: + logger.info("Name: %s", dist.name(pretty=True)) + distribution_version = dist.version(pretty=True) + logger.info("Version: %s", distribution_version) + distribution_codename = dist.codename() + logger.info("Codename: %s", distribution_codename) + + +if __name__ == "__main__": + main() diff --git a/lib/distro/py.typed b/lib/distro/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/requirements.txt b/requirements.txt index 0810ddc5..d433889a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,7 +10,7 @@ certifi==2021.10.8 cheroot==8.6.0 cherrypy==18.6.1 cloudinary==1.29.0 -distro==1.6.0 +distro==1.7.0 dnspython==2.2.0 facebook-sdk==3.1.0 future==0.18.2 From 54c9214b039ae929e00d8b47287e92e6f7c065db Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 May 2022 20:32:37 -0700 Subject: [PATCH 075/743] Bump apscheduler from 3.8.0 to 3.9.1 (#1675) * Bump apscheduler from 3.8.0 to 3.9.1 Bumps [apscheduler](https://github.com/agronholm/apscheduler) from 3.8.0 to 3.9.1. - [Release notes](https://github.com/agronholm/apscheduler/releases) - [Changelog](https://github.com/agronholm/apscheduler/blob/3.9.1/docs/versionhistory.rst) - [Commits](https://github.com/agronholm/apscheduler/compare/3.8.0...3.9.1) --- updated-dependencies: - dependency-name: apscheduler dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Update apscheduler==3.9.1 * Update pytz==2022.1 * Add pytz-deprecation-shim==0.1.0.post0 * Update tzdata==2022.1 * Update tzlocal==4.2 * Update requirements.txt Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci] --- lib/apscheduler/jobstores/mongodb.py | 4 +- lib/apscheduler/schedulers/base.py | 5 +- lib/apscheduler/schedulers/qt.py | 11 +- lib/apscheduler/triggers/cron/__init__.py | 7 +- lib/apscheduler/triggers/interval.py | 6 +- lib/apscheduler/util.py | 23 +- lib/pytz/__init__.py | 4 +- lib/pytz/zoneinfo/America/Punta_Arenas | Bin 1902 -> 1902 bytes lib/pytz/zoneinfo/America/Santiago | Bin 2529 -> 2529 bytes lib/pytz/zoneinfo/Asia/Gaza | Bin 2422 -> 2422 bytes lib/pytz/zoneinfo/Asia/Hebron | Bin 2450 -> 2450 bytes lib/pytz/zoneinfo/Chile/Continental | Bin 2529 -> 2529 bytes lib/pytz/zoneinfo/Europe/Kiev | Bin 2088 -> 2120 bytes lib/pytz/zoneinfo/Europe/Simferopol | Bin 1453 -> 1469 bytes lib/pytz/zoneinfo/Europe/Uzhgorod | Bin 2050 -> 2066 bytes lib/pytz/zoneinfo/Europe/Zaporozhye | Bin 2106 -> 2138 bytes lib/pytz/zoneinfo/Pacific/Fiji | Bin 1077 -> 1049 bytes lib/pytz/zoneinfo/leapseconds | 8 +- lib/pytz/zoneinfo/tzdata.zi | 38 +-- lib/pytz_deprecation_shim/__init__.py | 34 +++ lib/pytz_deprecation_shim/_common.py | 13 + lib/pytz_deprecation_shim/_compat.py | 15 ++ lib/pytz_deprecation_shim/_compat_py2.py | 43 ++++ lib/pytz_deprecation_shim/_compat_py3.py | 58 +++++ lib/pytz_deprecation_shim/_exceptions.py | 75 ++++++ lib/pytz_deprecation_shim/_impl.py | 296 ++++++++++++++++++++++ lib/pytz_deprecation_shim/helpers.py | 90 +++++++ lib/tzdata/__init__.py | 4 +- lib/tzdata/zoneinfo/America/Punta_Arenas | Bin 1209 -> 1209 bytes lib/tzdata/zoneinfo/America/Santiago | Bin 1282 -> 1282 bytes lib/tzdata/zoneinfo/Asia/Gaza | Bin 1230 -> 1240 bytes lib/tzdata/zoneinfo/Asia/Hebron | Bin 1248 -> 1258 bytes lib/tzdata/zoneinfo/Chile/Continental | Bin 1282 -> 1282 bytes lib/tzdata/zoneinfo/Europe/Kiev | Bin 549 -> 558 bytes lib/tzdata/zoneinfo/Europe/Simferopol | Bin 865 -> 865 bytes lib/tzdata/zoneinfo/Europe/Uzhgorod | Bin 530 -> 539 bytes lib/tzdata/zoneinfo/Europe/Zaporozhye | Bin 560 -> 569 bytes lib/tzdata/zoneinfo/leapseconds | 8 +- lib/tzdata/zoneinfo/tzdata.zi | 34 +-- lib/tzlocal/__init__.py | 14 +- lib/tzlocal/unix.py | 219 +++++++++------- lib/tzlocal/utils.py | 94 ++++++- lib/tzlocal/win32.py | 119 +++++---- lib/tzlocal/windows_tz.py | 20 +- package/requirements-package.txt | 2 +- requirements.txt | 8 +- 46 files changed, 1029 insertions(+), 223 deletions(-) create mode 100644 lib/pytz_deprecation_shim/__init__.py create mode 100644 lib/pytz_deprecation_shim/_common.py create mode 100644 lib/pytz_deprecation_shim/_compat.py create mode 100644 lib/pytz_deprecation_shim/_compat_py2.py create mode 100644 lib/pytz_deprecation_shim/_compat_py3.py create mode 100644 lib/pytz_deprecation_shim/_exceptions.py create mode 100644 lib/pytz_deprecation_shim/_impl.py create mode 100644 lib/pytz_deprecation_shim/helpers.py diff --git a/lib/apscheduler/jobstores/mongodb.py b/lib/apscheduler/jobstores/mongodb.py index ea3097dd..5a00f941 100644 --- a/lib/apscheduler/jobstores/mongodb.py +++ b/lib/apscheduler/jobstores/mongodb.py @@ -106,7 +106,7 @@ class MongoDBJobStore(BaseJobStore): raise JobLookupError(job_id) def remove_all_jobs(self): - self.collection.remove() + self.collection.delete_many({}) def shutdown(self): self.client.close() @@ -133,7 +133,7 @@ class MongoDBJobStore(BaseJobStore): # Remove all the jobs we failed to restore if failed_job_ids: - self.collection.remove({'_id': {'$in': failed_job_ids}}) + self.collection.delete_many({'_id': {'$in': failed_job_ids}}) return jobs diff --git a/lib/apscheduler/schedulers/base.py b/lib/apscheduler/schedulers/base.py index 3dfb7437..444de8ef 100644 --- a/lib/apscheduler/schedulers/base.py +++ b/lib/apscheduler/schedulers/base.py @@ -191,12 +191,11 @@ class BaseScheduler(six.with_metaclass(ABCMeta)): self.state = STATE_STOPPED # Shut down all executors - with self._executors_lock: + with self._executors_lock, self._jobstores_lock: for executor in six.itervalues(self._executors): executor.shutdown(wait) - # Shut down all job stores - with self._jobstores_lock: + # Shut down all job stores for jobstore in six.itervalues(self._jobstores): jobstore.shutdown() diff --git a/lib/apscheduler/schedulers/qt.py b/lib/apscheduler/schedulers/qt.py index dda77d79..600f6e67 100644 --- a/lib/apscheduler/schedulers/qt.py +++ b/lib/apscheduler/schedulers/qt.py @@ -9,13 +9,16 @@ except (ImportError, RuntimeError): # pragma: nocover from PyQt4.QtCore import QObject, QTimer except ImportError: try: - from PySide2.QtCore import QObject, QTimer # noqa + from PySide6.QtCore import QObject, QTimer # noqa except ImportError: try: - from PySide.QtCore import QObject, QTimer # noqa + from PySide2.QtCore import QObject, QTimer # noqa except ImportError: - raise ImportError('QtScheduler requires either PyQt5, PyQt4, PySide2 ' - 'or PySide installed') + try: + from PySide.QtCore import QObject, QTimer # noqa + except ImportError: + raise ImportError('QtScheduler requires either PyQt5, PyQt4, PySide6, PySide2 ' + 'or PySide installed') class QtScheduler(BaseScheduler): diff --git a/lib/apscheduler/triggers/cron/__init__.py b/lib/apscheduler/triggers/cron/__init__.py index fec6e3b5..b5389dd2 100644 --- a/lib/apscheduler/triggers/cron/__init__.py +++ b/lib/apscheduler/triggers/cron/__init__.py @@ -6,7 +6,8 @@ import six from apscheduler.triggers.base import BaseTrigger from apscheduler.triggers.cron.fields import ( BaseField, MonthField, WeekField, DayOfMonthField, DayOfWeekField, DEFAULT_VALUES) -from apscheduler.util import datetime_ceil, convert_to_datetime, datetime_repr, astimezone +from apscheduler.util import ( + datetime_ceil, convert_to_datetime, datetime_repr, astimezone, localize, normalize) class CronTrigger(BaseTrigger): @@ -143,7 +144,7 @@ class CronTrigger(BaseTrigger): i += 1 difference = datetime(**values) - dateval.replace(tzinfo=None) - return self.timezone.normalize(dateval + difference), fieldnum + return normalize(dateval + difference), fieldnum def _set_field_value(self, dateval, fieldnum, new_value): values = {} @@ -156,7 +157,7 @@ class CronTrigger(BaseTrigger): else: values[field.name] = new_value - return self.timezone.localize(datetime(**values)) + return localize(datetime(**values), self.timezone) def get_next_fire_time(self, previous_fire_time, now): if previous_fire_time: diff --git a/lib/apscheduler/triggers/interval.py b/lib/apscheduler/triggers/interval.py index 61094aa1..b0e2dbdd 100644 --- a/lib/apscheduler/triggers/interval.py +++ b/lib/apscheduler/triggers/interval.py @@ -4,7 +4,9 @@ from math import ceil from tzlocal import get_localzone from apscheduler.triggers.base import BaseTrigger -from apscheduler.util import convert_to_datetime, timedelta_seconds, datetime_repr, astimezone +from apscheduler.util import ( + convert_to_datetime, normalize, timedelta_seconds, datetime_repr, + astimezone) class IntervalTrigger(BaseTrigger): @@ -63,7 +65,7 @@ class IntervalTrigger(BaseTrigger): next_fire_time = self._apply_jitter(next_fire_time, self.jitter, now) if not self.end_date or next_fire_time <= self.end_date: - return self.timezone.normalize(next_fire_time) + return normalize(next_fire_time) def __getstate__(self): return { diff --git a/lib/apscheduler/util.py b/lib/apscheduler/util.py index 1e643bff..d929a482 100644 --- a/lib/apscheduler/util.py +++ b/lib/apscheduler/util.py @@ -34,7 +34,7 @@ except ImportError: __all__ = ('asint', 'asbool', 'astimezone', 'convert_to_datetime', 'datetime_to_utc_timestamp', 'utc_timestamp_to_datetime', 'timedelta_seconds', 'datetime_ceil', 'get_callable_name', 'obj_to_ref', 'ref_to_obj', 'maybe_ref', 'repr_escape', 'check_callable_args', - 'TIMEOUT_MAX') + 'normalize', 'localize', 'TIMEOUT_MAX') class _Undefined(object): @@ -90,9 +90,7 @@ def astimezone(obj): if isinstance(obj, six.string_types): return timezone(obj) if isinstance(obj, tzinfo): - if not hasattr(obj, 'localize') or not hasattr(obj, 'normalize'): - raise TypeError('Only timezones from the pytz library are supported') - if obj.zone == 'local': + if obj.tzname(None) == 'local': raise ValueError( 'Unable to determine the name of the local timezone -- you must explicitly ' 'specify the name of the local timezone. Please refrain from using timezones like ' @@ -162,11 +160,7 @@ def convert_to_datetime(input, tz, arg_name): if isinstance(tz, six.string_types): tz = timezone(tz) - try: - return tz.localize(datetime_, is_dst=None) - except AttributeError: - raise TypeError( - 'Only pytz timezones are supported (need the localize() and normalize() methods)') + return localize(datetime_, tz) def datetime_to_utc_timestamp(timeval): @@ -431,3 +425,14 @@ def iscoroutinefunction_partial(f): # The asyncio version of iscoroutinefunction includes testing for @coroutine # decorations vs. the inspect version which does not. return iscoroutinefunction(f) + + +def normalize(dt): + return datetime.fromtimestamp(dt.timestamp(), dt.tzinfo) + + +def localize(dt, tzinfo): + if hasattr(tzinfo, 'localize'): + return tzinfo.localize(dt) + + return normalize(dt.replace(tzinfo=tzinfo)) diff --git a/lib/pytz/__init__.py b/lib/pytz/__init__.py index 6ef4366b..900e8caa 100644 --- a/lib/pytz/__init__.py +++ b/lib/pytz/__init__.py @@ -22,8 +22,8 @@ from pytz.tzfile import build_tzinfo # The IANA (nee Olson) database is updated several times a year. -OLSON_VERSION = '2021c' -VERSION = '2021.3' # pip compatible version number. +OLSON_VERSION = '2022a' +VERSION = '2022.1' # pip compatible version number. __version__ = VERSION OLSEN_VERSION = OLSON_VERSION # Old releases had this misspelling diff --git a/lib/pytz/zoneinfo/America/Punta_Arenas b/lib/pytz/zoneinfo/America/Punta_Arenas index a5a8af52c2f26baf6f85a1786f69593491ad5195..13bd1d9a7755df59c009b4c4e5fbe44257e7ab51 100644 GIT binary patch delta 88 zcmaFI_l|FZv7qbhn5O~rYaSh4$WVA-LtOdMjV@M9jJqe-GqozZ{s#h(JctIV0ns3J ZAR450vnum9X0T!}4lWyA17ka5E&yJ$E=d3Y delta 88 zcmaFI_l|FZv7p=Rn5O~rYaSh2$WVA-LtOc>jV@M9jJqb+GqozZ{RaY&JctIV0ns3J ZAR450vnum9X0T!}4lWyA17ka5E&yM&E=>Rc diff --git a/lib/pytz/zoneinfo/America/Santiago b/lib/pytz/zoneinfo/America/Santiago index 816a0428188d99f437004312ee73c3860ee0f54f..aa2906063f3bb2be42e800c208c6a4453a610031 100644 GIT binary patch delta 99 zcmaDT{7`s;v7qbhn5O~rYaSh4$WVA-LtOdMjV|2G?7JBl{{P>*dvX|at>)4HK+tdC f?h2woszEeJ2Z#pg0?{CyoBuEuv%jV|2G?7J8k{{P>*YjPNKt>&@+K+tdC f?gpYkszEeJ2Z#pg0?{CyoBuEuv%I^7F@!jYzv~j#8ix+rNO0!h}K*VS>4{WcTsP3hlm2(T5^^^aFHDP z16+a@mxh{7TSG(;In+?U=bq)WexK6`cfyT8m*iIrpEPX;7Rk5 zS;>FiYauiyg?2@Y?;$BQTy-BmuZvcY$P4YM_z!u e0eOX?$KoB$wA~lzpGrjLDuJAsR_Z8kR8|u`xZq=OLoTaguEX0P`grt*= zL~LS24BpKoCTD3RzUO?)?;HQm>&N`qZMeZJXLWIWRF>xFG~L}L%VWdpG$%FF*{+$d z4qd7Hy885|YaegAe)XvvkH4~c_@LR#SJ`qdbh~;dJG1w)n?94h#JS`qE3)5KmHfb| z=6@V$vGqyVWvO3)*R4@pQCl7{3ViAZLkNkwu4O)`>=q$Byr36L`& cr!X{yI7clNiH2M4k^Y&X!MGi_6LzxYA58?5rT_o{ diff --git a/lib/pytz/zoneinfo/Asia/Hebron b/lib/pytz/zoneinfo/Asia/Hebron index 1a206a70c7da285a55cf3790e116e633fbd0a14a..0078bf01c3db4a013a599ca995fbb72b81460a47 100644 GIT binary patch delta 430 zcmW;Ize@sf7{~GBFvyUoZVRHl#8ix+rJ<#Uh}Ncutgc^LS&4et4G{(2q)W~c1T9iS ze}GHS;?mOev_wNh5G_H}_kGXuT0ifn?$n)j)b_m*S(ynmyZk4sqj_DMev$Rwnz+Lq zao=*X@pC1c4~w$(d@kG9DcR|?G= zEDSX(k)_C5WHGWDS&pnn3Xlr)NikaFAW29Tl7{3ViJ>MFNewl*NHUU*q$Byr3y?P$ ddMsYi2<+cRAv)AQm5wi@l8H4oYs7NJ9)aE0xHA0J=K||U_#3K9wBu}VQpGDCe+B!>Ps-<0sO~oc8 zCL4*^#7JWBOiW^8ktW{jyvy$$@9U|AE8**qQNTRRFL{fcCE|QF7Bk4#!asuQG dhQ*dvX|at>)4HK+tdC f?h2woszEeJ2Z#pg0?{CyoBuEuv%jV|2G?7J8k{{P>*YjPNKt>&@+K+tdC f?gpYkszEeJ2Z#pg0?{CyoBuEuv%a6({$xFA0R0|N+yKqV0KPt>SXzRJ;{b>wG*_Lf%-I!kUh=uSD)px3guLBC{O zgF(vt2E*9X3>%MRFvfE8z#uOl)#2m@b{8=j1qKcV1}y_dSq26Kd-6GUeHKPW28PLO z%qo~(m~74b(HZCmke@*Q0MQ`7fM}3^Ks3ltAR6Q^5DoGhhz9u&M1%aeIiF<<(;)EV HUv_N(G=W0U delta 396 zcmX>hutH#hxF9bB0|N+yKqV0KPSmJWzVaqP>&W>4?JegLbe3!k(4Dd&L9by_b7}xS#+70|N+yfHx2eOw=e9zRJ;{b>wG*_Lf%-I!kUh=uSD)Fmc0CU2ZOJ zE_P;iW=;+sUP6#>GB>L$i;MyT$7DZN7j{M&W>4?JegLbe3!k(4Dd&Vd93Px=dV5 zTfdPa;paO__CTf%_UwMw`2_Lg%AI!iVN=uX*?pw}`vK)+YOD7-SR}L?`cIS7&BmWSsnuNg310 zlZ}{PIs@Gd@&m}-AR6R$5Djuahz9uqM1%YRqCtKE(IEeTXpo;a$FXc>8URndVAlcw DC`vv8 diff --git a/lib/pytz/zoneinfo/Europe/Zaporozhye b/lib/pytz/zoneinfo/Europe/Zaporozhye index e42edfc8506b9b99362b36d90c8b8c4db67d50d8..f0406c12a6b519347d6a7c091424da9d0f241236 100644 GIT binary patch delta 408 zcmdlba7$oSYzRJ;{b>wG*_Lf%-I!kUh=uSD)px3guLBC{O zgF(vt2E*9X3>!~mF~)H7Kp`(5)#2m`?0!593^ED~Tnr3a28{BP@39-QFfuYQOy*@) z!F2azC+45dK!<}o0&+Zv26+HPgFFGEK^_6oAkTnkkcU7t$WtI1 F+5qwkK=c3r delta 372 zcmca5uuEWqxF9bB0|N+yKot=4PSmJXzVaqP>&W>4?JegLbe3!k(4Dd&L9bR delta 57 zcmbQqv6W+jGNZ#pl^XWM{v!_(A6=d}|Ip;?%!-p67=hxGmoR?e21zgg0Z?-DWF|L8 HptKVJt~D1- diff --git a/lib/pytz/zoneinfo/leapseconds b/lib/pytz/zoneinfo/leapseconds index 834b96ea..ffa5eb80 100644 --- a/lib/pytz/zoneinfo/leapseconds +++ b/lib/pytz/zoneinfo/leapseconds @@ -72,11 +72,11 @@ Leap 2016 Dec 31 23:59:60 + S # Any additional leap seconds will come after this. # This Expires line is commented out for now, # so that pre-2020a zic implementations do not reject this file. -#Expires 2022 Jun 28 00:00:00 +#Expires 2022 Dec 28 00:00:00 # POSIX timestamps for the data in this file: #updated 1467936000 (2016-07-08 00:00:00 UTC) -#expires 1656374400 (2022-06-28 00:00:00 UTC) +#expires 1672185600 (2022-12-28 00:00:00 UTC) -# Updated through IERS Bulletin C62 -# File expires on: 28 June 2022 +# Updated through IERS Bulletin C63 +# File expires on: 28 December 2022 diff --git a/lib/pytz/zoneinfo/tzdata.zi b/lib/pytz/zoneinfo/tzdata.zi index e16ab09f..e21fc920 100644 --- a/lib/pytz/zoneinfo/tzdata.zi +++ b/lib/pytz/zoneinfo/tzdata.zi @@ -1111,8 +1111,10 @@ R P 2016 2018 - Mar Sa>=24 1 1 S R P 2016 2018 - O Sa>=24 1 0 - R P 2019 o - Mar 29 0 1 S R P 2019 o - O Sa>=24 0 0 - -R P 2020 ma - Mar Sa>=24 0 1 S -R P 2020 ma - O Sa>=24 1 0 - +R P 2020 2021 - Mar Sa>=24 0 1 S +R P 2020 o - O 24 1 0 - +R P 2021 ma - O F>=23 1 0 - +R P 2022 ma - Mar Su>=25 0 1 S Z Asia/Gaza 2:17:52 - LMT 1900 O 2 Z EET/EEST 1948 May 15 2 K EE%sT 1967 Jun 5 @@ -1418,10 +1420,11 @@ R FJ 2011 o - Mar Su>=1 3 0 - R FJ 2012 2013 - Ja Su>=18 3 0 - R FJ 2014 o - Ja Su>=18 2 0 - R FJ 2014 2018 - N Su>=1 2 1 - -R FJ 2015 ma - Ja Su>=12 3 0 - +R FJ 2015 2021 - Ja Su>=12 3 0 - R FJ 2019 o - N Su>=8 2 1 - R FJ 2020 o - D 20 2 1 - -R FJ 2021 ma - N Su>=8 2 1 - +R FJ 2022 ma - N Su>=8 2 1 - +R FJ 2023 ma - Ja Su>=12 3 0 - Z Pacific/Fiji 11:55:44 - LMT 1915 O 26 12 FJ +12/+13 Z Pacific/Gambier -8:59:48 - LMT 1912 O @@ -2429,8 +2432,8 @@ Z Europe/Simferopol 2:16:24 - LMT 1880 1 c CE%sT 1944 Ap 13 3 R MSK/MSD 1990 3 - MSK 1990 Jul 1 2 -2 - EET 1992 -2 e EE%sT 1994 May +2 - EET 1992 Mar 20 +2 c EE%sT 1994 May 3 e MSK/MSD 1996 Mar 31 0s 3 1 MSD 1996 O 27 3s 3 R MSK/MSD 1997 @@ -2785,7 +2788,7 @@ Z Europe/Kiev 2:2:4 - LMT 1880 1 c CE%sT 1943 N 6 3 R MSK/MSD 1990 Jul 1 2 2 1 EEST 1991 S 29 3 -2 e EE%sT 1995 +2 c EE%sT 1996 May 13 2 E EE%sT Z Europe/Uzhgorod 1:29:12 - LMT 1890 O 1 - CET 1940 @@ -2795,8 +2798,8 @@ Z Europe/Uzhgorod 1:29:12 - LMT 1890 O 3 R MSK/MSD 1990 3 - MSK 1990 Jul 1 2 1 - CET 1991 Mar 31 3 -2 - EET 1992 -2 e EE%sT 1995 +2 - EET 1992 Mar 20 +2 c EE%sT 1996 May 13 2 E EE%sT Z Europe/Zaporozhye 2:20:40 - LMT 1880 2:20 - +0220 1924 May 2 @@ -2804,7 +2807,8 @@ Z Europe/Zaporozhye 2:20:40 - LMT 1880 3 - MSK 1941 Au 25 1 c CE%sT 1943 O 25 3 R MSK/MSD 1991 Mar 31 2 -2 e EE%sT 1995 +2 e EE%sT 1992 Mar 20 +2 c EE%sT 1996 May 13 2 E EE%sT R u 1918 1919 - Mar lastSu 2 1 D R u 1918 1919 - O lastSu 2 0 S @@ -4086,12 +4090,12 @@ R x 2016 2018 - May Su>=9 3u 0 - R x 2016 2018 - Au Su>=9 4u 1 - R x 2019 ma - Ap Su>=2 3u 0 - R x 2019 ma - S Su>=2 4u 1 - -Z America/Santiago -4:42:46 - LMT 1890 --4:42:46 - SMT 1910 Ja 10 +Z America/Santiago -4:42:45 - LMT 1890 +-4:42:45 - SMT 1910 Ja 10 -5 - -05 1916 Jul --4:42:46 - SMT 1918 S 10 +-4:42:45 - SMT 1918 S 10 -4 - -04 1919 Jul --4:42:46 - SMT 1927 S +-4:42:45 - SMT 1927 S -5 x -05/-04 1932 S -4 - -04 1942 Jun -5 - -05 1942 Au @@ -4101,11 +4105,11 @@ Z America/Santiago -4:42:46 - LMT 1890 -5 - -05 1947 May 21 23 -4 x -04/-03 Z America/Punta_Arenas -4:43:40 - LMT 1890 --4:42:46 - SMT 1910 Ja 10 +-4:42:45 - SMT 1910 Ja 10 -5 - -05 1916 Jul --4:42:46 - SMT 1918 S 10 +-4:42:45 - SMT 1918 S 10 -4 - -04 1919 Jul --4:42:46 - SMT 1927 S +-4:42:45 - SMT 1927 S -5 x -05/-04 1932 S -4 - -04 1942 Jun -5 - -05 1942 Au diff --git a/lib/pytz_deprecation_shim/__init__.py b/lib/pytz_deprecation_shim/__init__.py new file mode 100644 index 00000000..8b451620 --- /dev/null +++ b/lib/pytz_deprecation_shim/__init__.py @@ -0,0 +1,34 @@ +__all__ = [ + "AmbiguousTimeError", + "NonExistentTimeError", + "InvalidTimeError", + "UnknownTimeZoneError", + "PytzUsageWarning", + "FixedOffset", + "UTC", + "utc", + "build_tzinfo", + "timezone", + "fixed_offset_timezone", + "wrap_zone", +] + +from . import helpers +from ._exceptions import ( + AmbiguousTimeError, + InvalidTimeError, + NonExistentTimeError, + PytzUsageWarning, + UnknownTimeZoneError, +) +from ._impl import ( + UTC, + build_tzinfo, + fixed_offset_timezone, + timezone, + wrap_zone, +) + +# Compatibility aliases +utc = UTC +FixedOffset = fixed_offset_timezone diff --git a/lib/pytz_deprecation_shim/_common.py b/lib/pytz_deprecation_shim/_common.py new file mode 100644 index 00000000..ace322e9 --- /dev/null +++ b/lib/pytz_deprecation_shim/_common.py @@ -0,0 +1,13 @@ +import sys + +_PYTZ_IMPORTED = False + + +def pytz_imported(): + """Detects whether or not pytz has been imported without importing pytz.""" + global _PYTZ_IMPORTED + + if not _PYTZ_IMPORTED and "pytz" in sys.modules: + _PYTZ_IMPORTED = True + + return _PYTZ_IMPORTED diff --git a/lib/pytz_deprecation_shim/_compat.py b/lib/pytz_deprecation_shim/_compat.py new file mode 100644 index 00000000..5b734592 --- /dev/null +++ b/lib/pytz_deprecation_shim/_compat.py @@ -0,0 +1,15 @@ +import sys + +if sys.version_info[0] == 2: + from . import _compat_py2 as _compat_impl +else: + from . import _compat_py3 as _compat_impl + +UTC = _compat_impl.UTC +get_timezone = _compat_impl.get_timezone +get_timezone_file = _compat_impl.get_timezone_file +get_fixed_offset_zone = _compat_impl.get_fixed_offset_zone +is_ambiguous = _compat_impl.is_ambiguous +is_imaginary = _compat_impl.is_imaginary +enfold = _compat_impl.enfold +get_fold = _compat_impl.get_fold diff --git a/lib/pytz_deprecation_shim/_compat_py2.py b/lib/pytz_deprecation_shim/_compat_py2.py new file mode 100644 index 00000000..f473d267 --- /dev/null +++ b/lib/pytz_deprecation_shim/_compat_py2.py @@ -0,0 +1,43 @@ +from datetime import timedelta + +from dateutil import tz + +UTC = tz.UTC + + +def get_timezone(key): + if not key: + raise KeyError("Unknown time zone: %s" % key) + + try: + rv = tz.gettz(key) + except Exception: + rv = None + + if rv is None or not isinstance(rv, (tz.tzutc, tz.tzfile)): + raise KeyError("Unknown time zone: %s" % key) + + return rv + + +def get_timezone_file(f, key=None): + return tz.tzfile(f) + + +def get_fixed_offset_zone(offset): + return tz.tzoffset(None, timedelta(minutes=offset)) + + +def is_ambiguous(dt): + return tz.datetime_ambiguous(dt) + + +def is_imaginary(dt): + return not tz.datetime_exists(dt) + + +enfold = tz.enfold + + +def get_fold(dt): + return getattr(dt, "fold", 0) diff --git a/lib/pytz_deprecation_shim/_compat_py3.py b/lib/pytz_deprecation_shim/_compat_py3.py new file mode 100644 index 00000000..8881abac --- /dev/null +++ b/lib/pytz_deprecation_shim/_compat_py3.py @@ -0,0 +1,58 @@ +# Note: This file could use Python 3-only syntax, but at the moment this breaks +# the coverage job on Python 2. Until we make it so that coverage can ignore +# this file only on Python 2, we'll have to stick to 2/3-compatible syntax. +try: + import zoneinfo +except ImportError: + from backports import zoneinfo + +import datetime + +UTC = datetime.timezone.utc + + +def get_timezone(key): + try: + return zoneinfo.ZoneInfo(key) + except (ValueError, OSError): + # TODO: Use `from e` when this file can use Python 3 syntax + raise KeyError(key) + + +def get_timezone_file(f, key=None): + return zoneinfo.ZoneInfo.from_file(f, key=key) + + +def get_fixed_offset_zone(offset): + return datetime.timezone(datetime.timedelta(minutes=offset)) + + +def is_imaginary(dt): + dt_rt = dt.astimezone(UTC).astimezone(dt.tzinfo) + + return not (dt == dt_rt) + + +def is_ambiguous(dt): + if is_imaginary(dt): + return False + + wall_0 = dt + wall_1 = dt.replace(fold=not dt.fold) + + # Ambiguous datetimes can only exist if the offset changes, so we don't + # actually have to check whether dst() or tzname() are different. + same_offset = wall_0.utcoffset() == wall_1.utcoffset() + + return not same_offset + + +def enfold(dt, fold=1): + if dt.fold != fold: + return dt.replace(fold=fold) + else: + return dt + + +def get_fold(dt): + return dt.fold diff --git a/lib/pytz_deprecation_shim/_exceptions.py b/lib/pytz_deprecation_shim/_exceptions.py new file mode 100644 index 00000000..58d7af0a --- /dev/null +++ b/lib/pytz_deprecation_shim/_exceptions.py @@ -0,0 +1,75 @@ +from ._common import pytz_imported + + +class PytzUsageWarning(RuntimeWarning): + """Warning raised when accessing features specific to ``pytz``'s interface. + + This warning is used to direct users of ``pytz``-specific features like the + ``localize`` and ``normalize`` methods towards using the standard + ``tzinfo`` interface, so that these shims can be replaced with one of the + underlying libraries they are wrapping. + """ + + +class UnknownTimeZoneError(KeyError): + """Raised when no time zone is found for a specified key.""" + + +class InvalidTimeError(Exception): + """The base class for exceptions related to folds and gaps.""" + + +class AmbiguousTimeError(InvalidTimeError): + """Exception raised when ``is_dst=None`` for an ambiguous time (fold).""" + + +class NonExistentTimeError(InvalidTimeError): + """Exception raised when ``is_dst=None`` for a non-existent time (gap).""" + + +PYTZ_BASE_ERROR_MAPPING = {} + + +def _make_pytz_derived_errors( + InvalidTimeError_=InvalidTimeError, + AmbiguousTimeError_=AmbiguousTimeError, + NonExistentTimeError_=NonExistentTimeError, + UnknownTimeZoneError_=UnknownTimeZoneError, +): + if PYTZ_BASE_ERROR_MAPPING or not pytz_imported(): + return + + import pytz + + class InvalidTimeError(InvalidTimeError_, pytz.InvalidTimeError): + pass + + class AmbiguousTimeError(AmbiguousTimeError_, pytz.AmbiguousTimeError): + pass + + class NonExistentTimeError( + NonExistentTimeError_, pytz.NonExistentTimeError + ): + pass + + class UnknownTimeZoneError( + UnknownTimeZoneError_, pytz.UnknownTimeZoneError + ): + pass + + PYTZ_BASE_ERROR_MAPPING.update( + { + InvalidTimeError_: InvalidTimeError, + AmbiguousTimeError_: AmbiguousTimeError, + NonExistentTimeError_: NonExistentTimeError, + UnknownTimeZoneError_: UnknownTimeZoneError, + } + ) + + +def get_exception(exc_type, msg): + _make_pytz_derived_errors() + + out_exc_type = PYTZ_BASE_ERROR_MAPPING.get(exc_type, exc_type) + + return out_exc_type(msg) diff --git a/lib/pytz_deprecation_shim/_impl.py b/lib/pytz_deprecation_shim/_impl.py new file mode 100644 index 00000000..54430479 --- /dev/null +++ b/lib/pytz_deprecation_shim/_impl.py @@ -0,0 +1,296 @@ +# -*- coding: utf-8 -*- +import warnings +from datetime import tzinfo + +from . import _compat +from ._exceptions import ( + AmbiguousTimeError, + NonExistentTimeError, + PytzUsageWarning, + UnknownTimeZoneError, + get_exception, +) + +IS_DST_SENTINEL = object() +KEY_SENTINEL = object() + + +def timezone(key, _cache={}): + """Builds an IANA database time zone shim. + + This is the equivalent of ``pytz.timezone``. + + :param key: + A valid key from the IANA time zone database. + + :raises UnknownTimeZoneError: + If an unknown value is passed, this will raise an exception that can be + caught by :exc:`pytz_deprecation_shim.UnknownTimeZoneError` or + ``pytz.UnknownTimeZoneError``. Like + :exc:`zoneinfo.ZoneInfoNotFoundError`, both of those are subclasses of + :exc:`KeyError`. + """ + instance = _cache.get(key, None) + if instance is None: + if len(key) == 3 and key.lower() == "utc": + instance = _cache.setdefault(key, UTC) + else: + try: + zone = _compat.get_timezone(key) + except KeyError: + raise get_exception(UnknownTimeZoneError, key) + instance = _cache.setdefault(key, wrap_zone(zone, key=key)) + + return instance + + +def fixed_offset_timezone(offset, _cache={}): + """Builds a fixed offset time zone shim. + + This is the equivalent of ``pytz.FixedOffset``. An alias is available as + ``pytz_deprecation_shim.FixedOffset`` as well. + + :param offset: + A fixed offset from UTC, in minutes. This must be in the range ``-1439 + <= offset <= 1439``. + + :raises ValueError: + For offsets whose absolute value is greater than or equal to 24 hours. + + :return: + A shim time zone. + """ + if not (-1440 < offset < 1440): + raise ValueError("absolute offset is too large", offset) + + instance = _cache.get(offset, None) + if instance is None: + if offset == 0: + instance = _cache.setdefault(offset, UTC) + else: + zone = _compat.get_fixed_offset_zone(offset) + instance = _cache.setdefault(offset, wrap_zone(zone, key=None)) + + return instance + + +def build_tzinfo(zone, fp): + """Builds a shim object from a TZif file. + + This is a shim for ``pytz.build_tzinfo``. Given a value to use as the zone + IANA key and a file-like object containing a valid TZif file (i.e. + conforming to :rfc:`8536`), this builds a time zone object and wraps it in + a shim class. + + The argument names are chosen to match those in ``pytz.build_tzinfo``. + + :param zone: + A string to be used as the time zone object's IANA key. + + :param fp: + A readable file-like object emitting bytes, pointing to a valid TZif + file. + + :return: + A shim time zone. + """ + zone_file = _compat.get_timezone_file(fp) + + return wrap_zone(zone_file, key=zone) + + +def wrap_zone(tz, key=KEY_SENTINEL, _cache={}): + """Wrap an existing time zone object in a shim class. + + This is likely to be useful if you would like to work internally with + non-``pytz`` zones, but you expose an interface to callers relying on + ``pytz``'s interface. It may also be useful for passing non-``pytz`` zones + to libraries expecting to use ``pytz``'s interface. + + :param tz: + A :pep:`495`-compatible time zone, such as those provided by + :mod:`dateutil.tz` or :mod:`zoneinfo`. + + :param key: + The value for the IANA time zone key. This is optional for ``zoneinfo`` + zones, but required for ``dateutil.tz`` zones. + + :return: + A shim time zone. + """ + if key is KEY_SENTINEL: + key = getattr(tz, "key", KEY_SENTINEL) + + if key is KEY_SENTINEL: + raise TypeError( + "The `key` argument is required when wrapping zones that do not " + + "have a `key` attribute." + ) + + instance = _cache.get((id(tz), key), None) + if instance is None: + instance = _cache.setdefault((id(tz), key), _PytzShimTimezone(tz, key)) + + return instance + + +class _PytzShimTimezone(tzinfo): + # Add instance variables for _zone and _key because this will make error + # reporting with partially-initialized _BasePytzShimTimezone objects + # work better. + _zone = None + _key = None + + def __init__(self, zone, key): + self._key = key + self._zone = zone + + def utcoffset(self, dt): + return self._zone.utcoffset(dt) + + def dst(self, dt): + return self._zone.dst(dt) + + def tzname(self, dt): + return self._zone.tzname(dt) + + def fromutc(self, dt): + # The default fromutc implementation only works if tzinfo is "self" + dt_base = dt.replace(tzinfo=self._zone) + dt_out = self._zone.fromutc(dt_base) + + return dt_out.replace(tzinfo=self) + + def __str__(self): + if self._key is not None: + return str(self._key) + else: + return repr(self) + + def __repr__(self): + return "%s(%s, %s)" % ( + self.__class__.__name__, + repr(self._zone), + repr(self._key), + ) + + def unwrap_shim(self): + """Returns the underlying class that the shim is a wrapper for. + + This is a shim-specific method equivalent to + :func:`pytz_deprecation_shim.helpers.upgrade_tzinfo`. It is provided as + a method to allow end-users to upgrade shim timezones without requiring + an explicit dependency on ``pytz_deprecation_shim``, e.g.: + + .. code-block:: python + + if getattr(tz, "unwrap_shim", None) is None: + tz = tz.unwrap_shim() + """ + return self._zone + + @property + def zone(self): + warnings.warn( + "The zone attribute is specific to pytz's interface; " + + "please migrate to a new time zone provider. " + + "For more details on how to do so, see %s" + % PYTZ_MIGRATION_GUIDE_URL, + PytzUsageWarning, + stacklevel=2, + ) + + return self._key + + def localize(self, dt, is_dst=IS_DST_SENTINEL): + warnings.warn( + "The localize method is no longer necessary, as this " + + "time zone supports the fold attribute (PEP 495). " + + "For more details on migrating to a PEP 495-compliant " + + "implementation, see %s" % PYTZ_MIGRATION_GUIDE_URL, + PytzUsageWarning, + stacklevel=2, + ) + + if dt.tzinfo is not None: + raise ValueError("Not naive datetime (tzinfo is already set)") + + dt_out = dt.replace(tzinfo=self) + + if is_dst is IS_DST_SENTINEL: + return dt_out + + dt_ambiguous = _compat.is_ambiguous(dt_out) + dt_imaginary = ( + _compat.is_imaginary(dt_out) if not dt_ambiguous else False + ) + + if is_dst is None: + if dt_imaginary: + raise get_exception( + NonExistentTimeError, dt.replace(tzinfo=None) + ) + + if dt_ambiguous: + raise get_exception(AmbiguousTimeError, dt.replace(tzinfo=None)) + + elif dt_ambiguous or dt_imaginary: + # Start by normalizing the folds; dt_out may have fold=0 or fold=1, + # but we need to know the DST offset on both sides anyway, so we + # will get one datetime representing each side of the fold, then + # decide which one we're going to return. + if _compat.get_fold(dt_out): + dt_enfolded = dt_out + dt_out = _compat.enfold(dt_out, fold=0) + else: + dt_enfolded = _compat.enfold(dt_out, fold=1) + + # Now we want to decide whether the fold=0 or fold=1 represents + # what pytz would return for `is_dst=True` + enfolded_dst = bool(dt_enfolded.dst()) + if bool(dt_out.dst()) == enfolded_dst: + # If this is not a transition between standard time and + # daylight saving time, pytz will consider the larger offset + # the DST offset. + enfolded_dst = dt_enfolded.utcoffset() > dt_out.utcoffset() + + # The default we've established is that dt_out is fold=0; swap it + # for the fold=1 datetime if is_dst == True and the enfolded side + # is DST or if is_dst == False and the enfolded side is *not* DST. + if is_dst == enfolded_dst: + dt_out = dt_enfolded + + return dt_out + + def normalize(self, dt): + warnings.warn( + "The normalize method is no longer necessary, as this " + + "time zone supports the fold attribute (PEP 495). " + + "For more details on migrating to a PEP 495-compliant " + + "implementation, see %s" % PYTZ_MIGRATION_GUIDE_URL, + PytzUsageWarning, + stacklevel=2, + ) + + if dt.tzinfo is None: + raise ValueError("Naive time - no tzinfo set") + + if dt.tzinfo is self: + return dt + + return dt.astimezone(self) + + def __copy__(self): + return self + + def __deepcopy__(self, memo=None): + return self + + def __reduce__(self): + return wrap_zone, (self._zone, self._key) + + +UTC = wrap_zone(_compat.UTC, "UTC") +PYTZ_MIGRATION_GUIDE_URL = ( + "https://pytz-deprecation-shim.readthedocs.io/en/latest/migration.html" +) diff --git a/lib/pytz_deprecation_shim/helpers.py b/lib/pytz_deprecation_shim/helpers.py new file mode 100644 index 00000000..6b05b130 --- /dev/null +++ b/lib/pytz_deprecation_shim/helpers.py @@ -0,0 +1,90 @@ +""" +This module contains helper functions to ease the transition from ``pytz`` to +another :pep:`495`-compatible library. +""" +from . import _common, _compat +from ._impl import _PytzShimTimezone + +_PYTZ_BASE_CLASSES = None + + +def is_pytz_zone(tz): + """Check if a time zone is a ``pytz`` time zone. + + This will only import ``pytz`` if it has already been imported, and does + not rely on the existence of the ``localize`` or ``normalize`` methods + (since the shim classes also have these methods, but are not ``pytz`` + zones). + """ + + # If pytz is not in sys.modules, then we will assume the time zone is not a + # pytz zone. It is possible that someone has manipulated sys.modules to + # remove pytz, but that's the kind of thing that causes all kinds of other + # problems anyway, so we'll call that an unsupported configuration. + if not _common.pytz_imported(): + return False + + if _PYTZ_BASE_CLASSES is None: + _populate_pytz_base_classes() + + return isinstance(tz, _PYTZ_BASE_CLASSES) + + +def upgrade_tzinfo(tz): + """Convert a ``pytz`` or shim timezone into its modern equivalent. + + The shim classes are thin wrappers around :mod:`zoneinfo` or + :mod:`dateutil.tz` implementations of the :class:`datetime.tzinfo` base + class. This function removes the shim and returns the underlying "upgraded" + time zone. + + When passed a ``pytz`` zone (not a shim), this returns the non-``pytz`` + equivalent. This may fail if ``pytz`` is using a data source incompatible + with the upgraded provider's data source, or if the ``pytz`` zone was built + from a file rather than an IANA key. + + When passed an object that is not a shim or a ``pytz`` zone, this returns + the original object. + + :param tz: + A :class:`datetime.tzinfo` object. + + :raises KeyError: + If a ``pytz`` zone is passed to the function with no equivalent in the + :pep:`495`-compatible library's version of the Olson database. + + :return: + A :pep:`495`-compatible equivalent of any ``pytz`` or shim + class, or the original object. + """ + if isinstance(tz, _PytzShimTimezone): + return tz._zone + + if is_pytz_zone(tz): + if tz.zone is None: + # This is a fixed offset zone + offset = tz.utcoffset(None) + offset_minutes = offset.total_seconds() / 60 + + return _compat.get_fixed_offset_zone(offset_minutes) + + if tz.zone == "UTC": + return _compat.UTC + + return _compat.get_timezone(tz.zone) + + return tz + + +def _populate_pytz_base_classes(): + import pytz + from pytz.tzinfo import BaseTzInfo + + base_classes = (BaseTzInfo, pytz._FixedOffset) + + # In releases prior to 2018.4, pytz.UTC was not a subclass of BaseTzInfo + if not isinstance(pytz.UTC, BaseTzInfo): # pragma: nocover + base_classes = base_classes + (type(pytz.UTC),) + + global _PYTZ_BASE_CLASSES + _PYTZ_BASE_CLASSES = base_classes diff --git a/lib/tzdata/__init__.py b/lib/tzdata/__init__.py index 07c7b3a2..7e6440bb 100644 --- a/lib/tzdata/__init__.py +++ b/lib/tzdata/__init__.py @@ -1,6 +1,6 @@ # IANA versions like 2020a are not valid PEP 440 identifiers; the recommended # way to translate the version is to use YYYY.n where `n` is a 0-based index. -__version__ = "2021.5" +__version__ = "2022.1" # This exposes the original IANA version number. -IANA_VERSION = "2021e" +IANA_VERSION = "2022a" diff --git a/lib/tzdata/zoneinfo/America/Punta_Arenas b/lib/tzdata/zoneinfo/America/Punta_Arenas index 5c9a20b947f3763da250afdf005bc7f0136ad537..c04210406f7bd23172481db68ae2f7f9dcd8e9d2 100644 GIT binary patch delta 50 ycmdnVxs!83o}%l2AebHVGyp`;uX%J7L@#70JOH9M#FZc2*#DN9arfj}78?LbULYR; delta 50 ycmdnVxs!83o}$}-AebHVGyp`;uX%I~L@#70JOH9M#FZc0*#DN9ao6Np78?Lbsvsc% diff --git a/lib/tzdata/zoneinfo/America/Santiago b/lib/tzdata/zoneinfo/America/Santiago index 8d6032264b656186dc23df35402b1be0079fd670..cde8dbbf049e1bbeabf3bc757828aa1884e673c6 100644 GIT binary patch delta 64 zcmZqTYT}xZrg`)~5cC_kyMpN1F;4?P^!%DfVDW_vg$F?V4RPg1H}=c2u$8MTZe?-hHL=j~H8jvO)ic#Mo3_y@rEt@b|kws#10Snh;OO{YZrpbLQu6*W3I=+SmdM0`% I`bMT)0IN?8kN^Mx delta 38 ucmaFG`G9jm7-RXy@B_?~6#M aKA1L)JQ`$ delta 86 zcmZ3-vXo^)7^B+6@UKo+-Xt&pf!2}p0U%m?%ee#)t+Qk!n6Eo!1DLPZG8xR*FWEBz WByW(i7EBw)-kdy*QGN10#sUCG#TYXH diff --git a/lib/tzdata/zoneinfo/Europe/Simferopol b/lib/tzdata/zoneinfo/Europe/Simferopol index 88a6f3bdb4691ace47a6d670899a9d049ef5a123..40d23c029a647297b4901286652d2db5407d32dd 100644 GIT binary patch delta 48 tcmaFJ_K1*UbD+y>LSQ_eJOPG|hZ2mo;X4?+L{ delta 48 vcmaFJ_Knzy_=Ic(`kgz$O@fRZics&l; diff --git a/lib/tzdata/zoneinfo/Europe/Uzhgorod b/lib/tzdata/zoneinfo/Europe/Uzhgorod index a5755685e390bd5ad54402ee61b1319e9a66296e..d4c35914191fa2dd7a3e07105af0d4494681badb 100644 GIT binary patch delta 96 zcmbQlGMi;W7^B+6@V8!9IT{#%K#M aKA1L)J#M aKA1L)Ju?PT78yYeI delta 86 zcmdnVvVmnn7^C{c@V`!1-Xt&pf!2}p0U%m?%ee#)t+Qk!n6Eo!1DLPZG8xR*FWEBz WByW(i7EBw)-kdy-QDgFP#v%Yx92jN* diff --git a/lib/tzdata/zoneinfo/leapseconds b/lib/tzdata/zoneinfo/leapseconds index 834b96ea..ffa5eb80 100644 --- a/lib/tzdata/zoneinfo/leapseconds +++ b/lib/tzdata/zoneinfo/leapseconds @@ -72,11 +72,11 @@ Leap 2016 Dec 31 23:59:60 + S # Any additional leap seconds will come after this. # This Expires line is commented out for now, # so that pre-2020a zic implementations do not reject this file. -#Expires 2022 Jun 28 00:00:00 +#Expires 2022 Dec 28 00:00:00 # POSIX timestamps for the data in this file: #updated 1467936000 (2016-07-08 00:00:00 UTC) -#expires 1656374400 (2022-06-28 00:00:00 UTC) +#expires 1672185600 (2022-12-28 00:00:00 UTC) -# Updated through IERS Bulletin C62 -# File expires on: 28 June 2022 +# Updated through IERS Bulletin C63 +# File expires on: 28 December 2022 diff --git a/lib/tzdata/zoneinfo/tzdata.zi b/lib/tzdata/zoneinfo/tzdata.zi index 1948c725..c38197ea 100644 --- a/lib/tzdata/zoneinfo/tzdata.zi +++ b/lib/tzdata/zoneinfo/tzdata.zi @@ -1,4 +1,4 @@ -# version 2021e +# version 2022a # This zic input file is in the public domain. R d 1916 o - Jun 14 23s 1 S R d 1916 1919 - O Su>=1 23s 0 - @@ -1111,9 +1111,10 @@ R P 2016 2018 - Mar Sa>=24 1 1 S R P 2016 2018 - O Sa>=24 1 0 - R P 2019 o - Mar 29 0 1 S R P 2019 o - O Sa>=24 0 0 - -R P 2020 ma - Mar Sa>=24 0 1 S +R P 2020 2021 - Mar Sa>=24 0 1 S R P 2020 o - O 24 1 0 - -R P 2021 ma - O lastF 1 0 - +R P 2021 ma - O F>=23 1 0 - +R P 2022 ma - Mar Su>=25 0 1 S Z Asia/Gaza 2:17:52 - LMT 1900 O 2 Z EET/EEST 1948 May 15 2 K EE%sT 1967 Jun 5 @@ -2431,8 +2432,8 @@ Z Europe/Simferopol 2:16:24 - LMT 1880 1 c CE%sT 1944 Ap 13 3 R MSK/MSD 1990 3 - MSK 1990 Jul 1 2 -2 - EET 1992 -2 e EE%sT 1994 May +2 - EET 1992 Mar 20 +2 c EE%sT 1994 May 3 e MSK/MSD 1996 Mar 31 0s 3 1 MSD 1996 O 27 3s 3 R MSK/MSD 1997 @@ -2787,7 +2788,7 @@ Z Europe/Kiev 2:2:4 - LMT 1880 1 c CE%sT 1943 N 6 3 R MSK/MSD 1990 Jul 1 2 2 1 EEST 1991 S 29 3 -2 e EE%sT 1995 +2 c EE%sT 1996 May 13 2 E EE%sT Z Europe/Uzhgorod 1:29:12 - LMT 1890 O 1 - CET 1940 @@ -2797,8 +2798,8 @@ Z Europe/Uzhgorod 1:29:12 - LMT 1890 O 3 R MSK/MSD 1990 3 - MSK 1990 Jul 1 2 1 - CET 1991 Mar 31 3 -2 - EET 1992 -2 e EE%sT 1995 +2 - EET 1992 Mar 20 +2 c EE%sT 1996 May 13 2 E EE%sT Z Europe/Zaporozhye 2:20:40 - LMT 1880 2:20 - +0220 1924 May 2 @@ -2806,7 +2807,8 @@ Z Europe/Zaporozhye 2:20:40 - LMT 1880 3 - MSK 1941 Au 25 1 c CE%sT 1943 O 25 3 R MSK/MSD 1991 Mar 31 2 -2 e EE%sT 1995 +2 e EE%sT 1992 Mar 20 +2 c EE%sT 1996 May 13 2 E EE%sT R u 1918 1919 - Mar lastSu 2 1 D R u 1918 1919 - O lastSu 2 0 S @@ -4088,12 +4090,12 @@ R x 2016 2018 - May Su>=9 3u 0 - R x 2016 2018 - Au Su>=9 4u 1 - R x 2019 ma - Ap Su>=2 3u 0 - R x 2019 ma - S Su>=2 4u 1 - -Z America/Santiago -4:42:46 - LMT 1890 --4:42:46 - SMT 1910 Ja 10 +Z America/Santiago -4:42:45 - LMT 1890 +-4:42:45 - SMT 1910 Ja 10 -5 - -05 1916 Jul --4:42:46 - SMT 1918 S 10 +-4:42:45 - SMT 1918 S 10 -4 - -04 1919 Jul --4:42:46 - SMT 1927 S +-4:42:45 - SMT 1927 S -5 x -05/-04 1932 S -4 - -04 1942 Jun -5 - -05 1942 Au @@ -4103,11 +4105,11 @@ Z America/Santiago -4:42:46 - LMT 1890 -5 - -05 1947 May 21 23 -4 x -04/-03 Z America/Punta_Arenas -4:43:40 - LMT 1890 --4:42:46 - SMT 1910 Ja 10 +-4:42:45 - SMT 1910 Ja 10 -5 - -05 1916 Jul --4:42:46 - SMT 1918 S 10 +-4:42:45 - SMT 1918 S 10 -4 - -04 1919 Jul --4:42:46 - SMT 1927 S +-4:42:45 - SMT 1927 S -5 x -05/-04 1932 S -4 - -04 1942 Jun -5 - -05 1942 Au diff --git a/lib/tzlocal/__init__.py b/lib/tzlocal/__init__.py index c8196d66..98ed04fd 100644 --- a/lib/tzlocal/__init__.py +++ b/lib/tzlocal/__init__.py @@ -1,5 +1,13 @@ import sys -if sys.platform == 'win32': - from tzlocal.win32 import get_localzone, reload_localzone + +if sys.platform == "win32": + from tzlocal.win32 import ( + get_localzone, + get_localzone_name, + reload_localzone, + ) # pragma: no cover else: - from tzlocal.unix import get_localzone, reload_localzone + from tzlocal.unix import get_localzone, get_localzone_name, reload_localzone + + +__all__ = ["get_localzone", "get_localzone_name", "reload_localzone"] diff --git a/lib/tzlocal/unix.py b/lib/tzlocal/unix.py index 8574965a..eaf96d92 100644 --- a/lib/tzlocal/unix.py +++ b/lib/tzlocal/unix.py @@ -1,97 +1,75 @@ import os -import pytz import re +import sys import warnings +from datetime import timezone +import pytz_deprecation_shim as pds from tzlocal import utils +if sys.version_info >= (3, 9): + from zoneinfo import ZoneInfo # pragma: no cover +else: + from backports.zoneinfo import ZoneInfo # pragma: no cover + _cache_tz = None +_cache_tz_name = None -def _tz_from_env(tzenv): - if tzenv[0] == ':': - tzenv = tzenv[1:] - - # TZ specifies a file - if os.path.isabs(tzenv) and os.path.exists(tzenv): - with open(tzenv, 'rb') as tzfile: - return pytz.tzfile.build_tzinfo('local', tzfile) - - # TZ specifies a zoneinfo zone. - try: - tz = pytz.timezone(tzenv) - # That worked, so we return this: - return tz - except pytz.UnknownTimeZoneError: - raise pytz.UnknownTimeZoneError( - "tzlocal() does not support non-zoneinfo timezones like %s. \n" - "Please use a timezone in the form of Continent/City") - - -def _try_tz_from_env(): - tzenv = os.environ.get('TZ') - if tzenv: - try: - return _tz_from_env(tzenv) - except pytz.UnknownTimeZoneError: - pass - - -def _get_localzone(_root='/'): +def _get_localzone_name(_root="/"): """Tries to find the local timezone configuration. - This method prefers finding the timezone name and passing that to pytz, - over passing in the localtime file, as in the later case the zoneinfo - name is unknown. + This method finds the timezone name, if it can, or it returns None. The parameter _root makes the function look for files like /etc/localtime beneath the _root directory. This is primarily used by the tests. In normal usage you call the function without parameters.""" - tzenv = _try_tz_from_env() + # First try the ENV setting. + tzenv = utils._tz_name_from_env() if tzenv: return tzenv # Are we under Termux on Android? - if os.path.exists('/system/bin/getprop'): + if os.path.exists(os.path.join(_root, "system/bin/getprop")): import subprocess - androidtz = subprocess.check_output(['getprop', 'persist.sys.timezone']).strip().decode() - return pytz.timezone(androidtz) + + androidtz = ( + subprocess.check_output(["getprop", "persist.sys.timezone"]) + .strip() + .decode() + ) + return androidtz # Now look for distribution specific configuration files # that contain the timezone name. - for configfile in ('etc/timezone', 'var/db/zoneinfo'): + + # Stick all of them in a dict, to compare later. + found_configs = {} + + for configfile in ("etc/timezone", "var/db/zoneinfo"): tzpath = os.path.join(_root, configfile) try: - with open(tzpath, 'rb') as tzfile: + with open(tzpath, "rt") as tzfile: data = tzfile.read() - # Issue #3 was that /etc/timezone was a zoneinfo file. - # That's a misconfiguration, but we need to handle it gracefully: - if data[:5] == b'TZif2': - continue - - etctz = data.strip().decode() + etctz = data.strip('/ \t\r\n') if not etctz: # Empty file, skip continue - for etctz in data.decode().splitlines(): + for etctz in etctz.splitlines(): # Get rid of host definitions and comments: - if ' ' in etctz: - etctz, dummy = etctz.split(' ', 1) - if '#' in etctz: - etctz, dummy = etctz.split('#', 1) + if " " in etctz: + etctz, dummy = etctz.split(" ", 1) + if "#" in etctz: + etctz, dummy = etctz.split("#", 1) if not etctz: continue - tz = pytz.timezone(etctz.replace(' ', '_')) - if _root == '/': - # We are using a file in etc to name the timezone. - # Verify that the timezone specified there is actually used: - utils.assert_tz_offset(tz) - return tz - except IOError: - # File doesn't exist or is a directory + found_configs[tzpath] = etctz.replace(" ", "_") + + except (IOError, UnicodeDecodeError): + # File doesn't exist or is a directory, or it's a binary file. continue # CentOS has a ZONE setting in /etc/sysconfig/clock, @@ -99,14 +77,14 @@ def _get_localzone(_root='/'): # Gentoo has a TIMEZONE setting in /etc/conf.d/clock # We look through these files for a timezone: - zone_re = re.compile(r'\s*ZONE\s*=\s*\"') - timezone_re = re.compile(r'\s*TIMEZONE\s*=\s*\"') - end_re = re.compile('\"') + zone_re = re.compile(r"\s*ZONE\s*=\s*\"") + timezone_re = re.compile(r"\s*TIMEZONE\s*=\s*\"") + end_re = re.compile('"') - for filename in ('etc/sysconfig/clock', 'etc/conf.d/clock'): + for filename in ("etc/sysconfig/clock", "etc/conf.d/clock"): tzpath = os.path.join(_root, filename) try: - with open(tzpath, 'rt') as tzfile: + with open(tzpath, "rt") as tzfile: data = tzfile.readlines() for line in data: @@ -118,48 +96,108 @@ def _get_localzone(_root='/'): if match is not None: # Some setting existed line = line[match.end():] - etctz = line[:end_re.search(line).start()] + etctz = line[: end_re.search(line).start()] # We found a timezone - tz = pytz.timezone(etctz.replace(' ', '_')) - if _root == '/': - # We are using a file in etc to name the timezone. - # Verify that the timezone specified there is actually used: - utils.assert_tz_offset(tz) - return tz + found_configs[tzpath] = etctz.replace(" ", "_") - except IOError: - # File doesn't exist or is a directory + except (IOError, UnicodeDecodeError): + # UnicodeDecode handles when clock is symlink to /etc/localtime continue # systemd distributions use symlinks that include the zone name, # see manpage of localtime(5) and timedatectl(1) - tzpath = os.path.join(_root, 'etc/localtime') + tzpath = os.path.join(_root, "etc/localtime") if os.path.exists(tzpath) and os.path.islink(tzpath): - tzpath = os.path.realpath(tzpath) - start = tzpath.find("/")+1 + etctz = realtzpath = os.path.realpath(tzpath) + start = etctz.find("/") + 1 while start != 0: - tzpath = tzpath[start:] + etctz = etctz[start:] try: - return pytz.timezone(tzpath) - except pytz.UnknownTimeZoneError: + pds.timezone(etctz) + tzinfo = f"{tzpath} is a symlink to" + found_configs[tzinfo] = etctz.replace(" ", "_") + except pds.UnknownTimeZoneError: pass - start = tzpath.find("/")+1 + start = etctz.find("/") + 1 - # No explicit setting existed. Use localtime - for filename in ('etc/localtime', 'usr/local/etc/localtime'): - tzpath = os.path.join(_root, filename) + if len(found_configs) > 0: + # We found some explicit config of some sort! + if len(found_configs) > 1: + # Uh-oh, multiple configs. See if they match: + unique_tzs = set() + zoneinfo = os.path.join(_root, "usr", "share", "zoneinfo") + directory_depth = len(zoneinfo.split(os.path.sep)) - if not os.path.exists(tzpath): - continue - with open(tzpath, 'rb') as tzfile: - return pytz.tzfile.build_tzinfo('local', tzfile) + for tzname in found_configs.values(): + # Look them up in /usr/share/zoneinfo, and find what they + # really point to: + path = os.path.realpath(os.path.join(zoneinfo, *tzname.split("/"))) + real_zone_name = "/".join(path.split(os.path.sep)[directory_depth:]) + unique_tzs.add(real_zone_name) + + if len(unique_tzs) != 1: + message = "Multiple conflicting time zone configurations found:\n" + for key, value in found_configs.items(): + message += f"{key}: {value}\n" + message += "Fix the configuration, or set the time zone in a TZ environment variable.\n" + raise utils.ZoneInfoNotFoundError(message) + + # We found exactly one config! Use it. + return list(found_configs.values())[0] + + +def _get_localzone(_root="/"): + """Creates a timezone object from the timezone name. + + If there is no timezone config, it will try to create a file from the + localtime timezone, and if there isn't one, it will default to UTC. + + The parameter _root makes the function look for files like /etc/localtime + beneath the _root directory. This is primarily used by the tests. + In normal usage you call the function without parameters.""" + + # First try the ENV setting. + tzenv = utils._tz_from_env() + if tzenv: + return tzenv + + tzname = _get_localzone_name(_root) + if tzname is None: + # No explicit setting existed. Use localtime + for filename in ("etc/localtime", "usr/local/etc/localtime"): + tzpath = os.path.join(_root, filename) + + if not os.path.exists(tzpath): + continue + with open(tzpath, "rb") as tzfile: + tz = pds.wrap_zone(ZoneInfo.from_file(tzfile, key="local")) + break + else: + warnings.warn("Can not find any timezone configuration, defaulting to UTC.") + tz = timezone.utc + else: + tz = pds.timezone(tzname) + + if _root == "/": + # We are using a file in etc to name the timezone. + # Verify that the timezone specified there is actually used: + utils.assert_tz_offset(tz) + return tz + + +def get_localzone_name(): + """Get the computers configured local timezone name, if any.""" + global _cache_tz_name + if _cache_tz_name is None: + _cache_tz_name = _get_localzone_name() + + return _cache_tz_name - warnings.warn('Can not find any timezone configuration, defaulting to UTC.') - return pytz.utc def get_localzone(): """Get the computers configured local timezone, if any.""" + global _cache_tz if _cache_tz is None: _cache_tz = _get_localzone() @@ -169,6 +207,9 @@ def get_localzone(): def reload_localzone(): """Reload the cached localzone. You need to call this if the timezone has changed.""" + global _cache_tz_name global _cache_tz + _cache_tz_name = _get_localzone_name() _cache_tz = _get_localzone() + return _cache_tz diff --git a/lib/tzlocal/utils.py b/lib/tzlocal/utils.py index 5a677990..d1026b32 100644 --- a/lib/tzlocal/utils.py +++ b/lib/tzlocal/utils.py @@ -1,7 +1,24 @@ # -*- coding: utf-8 -*- +import os import time import datetime import calendar +import pytz_deprecation_shim as pds + +try: + import zoneinfo # pragma: no cover +except ImportError: + from backports import zoneinfo # pragma: no cover + +from tzlocal import windows_tz + + +class ZoneInfoNotFoundError(pds.UnknownTimeZoneError, zoneinfo.ZoneInfoNotFoundError): + """An exception derived from both pytz and zoneinfo + + This exception will be trappable both by pytz expecting clients and + zoneinfo expecting clients. + """ def get_system_offset(): @@ -21,9 +38,9 @@ def get_system_offset(): # so we check that the difference is less than one minute, because nobody # has that small DST differences. if abs(offset - time.altzone) < 60: - return -time.altzone + return -time.altzone # pragma: no cover else: - return -time.timezone + return -time.timezone # pragma: no cover def get_tz_offset(tz): @@ -39,8 +56,73 @@ def assert_tz_offset(tz): tz_offset = get_tz_offset(tz) system_offset = get_system_offset() if tz_offset != system_offset: - msg = ('Timezone offset does not match system offset: {0} != {1}. ' - 'Please, check your config files.').format( - tz_offset, system_offset - ) + msg = ( + "Timezone offset does not match system offset: {} != {}. " + "Please, check your config files." + ).format(tz_offset, system_offset) raise ValueError(msg) + + +def _tz_name_from_env(tzenv=None): + if tzenv is None: + tzenv = os.environ.get("TZ") + + if not tzenv: + return None + + if tzenv[0] == ":": + tzenv = tzenv[1:] + + if tzenv in windows_tz.tz_win: + # Yup, it's a timezone + return tzenv + + if os.path.isabs(tzenv) and os.path.exists(tzenv): + # It's a file specification, expand it, if possible + parts = os.path.realpath(tzenv).split(os.sep) + + # Is it a zone info zone? + possible_tz = "/".join(parts[-2:]) + if possible_tz in windows_tz.tz_win: + # Yup, it is + return possible_tz + + # Maybe it's a short one, like UTC? + if parts[-1] in windows_tz.tz_win: + # Indeed + return parts[-1] + + +def _tz_from_env(tzenv=None): + if tzenv is None: + tzenv = os.environ.get("TZ") + + if not tzenv: + return None + + # Some weird format that exists: + if tzenv[0] == ":": + tzenv = tzenv[1:] + + # TZ specifies a file + if os.path.isabs(tzenv) and os.path.exists(tzenv): + # Try to see if we can figure out the name + tzname = _tz_name_from_env(tzenv) + if not tzname: + # Nope, not a standard timezone name, just take the filename + tzname = tzenv.split(os.sep)[-1] + with open(tzenv, "rb") as tzfile: + zone = zoneinfo.ZoneInfo.from_file(tzfile, key=tzname) + return pds.wrap_zone(zone) + + # TZ must specify a zoneinfo zone. + try: + tz = pds.timezone(tzenv) + # That worked, so we return this: + return tz + except pds.UnknownTimeZoneError: + # Nope, it's something like "PST4DST" etc, we can't handle that. + raise ZoneInfoNotFoundError( + "tzlocal() does not support non-zoneinfo timezones like %s. \n" + "Please use a timezone in the form of Continent/City" + ) from None diff --git a/lib/tzlocal/win32.py b/lib/tzlocal/win32.py index fcc42a23..720ab2b7 100644 --- a/lib/tzlocal/win32.py +++ b/lib/tzlocal/win32.py @@ -1,32 +1,53 @@ +from datetime import datetime +import pytz_deprecation_shim as pds + try: import _winreg as winreg except ImportError: import winreg -import pytz - from tzlocal.windows_tz import win_tz from tzlocal import utils _cache_tz = None +_cache_tz_name = None def valuestodict(key): """Convert a registry key's values to a dictionary.""" - dict = {} + result = {} size = winreg.QueryInfoKey(key)[1] for i in range(size): data = winreg.EnumValue(key, i) - dict[data[0]] = data[1] - return dict + result[data[0]] = data[1] + return result -def get_localzone_name(): +def _get_dst_info(tz): + # Find the offset for when it doesn't have DST: + dst_offset = std_offset = None + has_dst = False + year = datetime.now().year + for dt in (datetime(year, 1, 1), datetime(year, 6, 1)): + if tz.dst(dt).total_seconds() == 0.0: + # OK, no DST during winter, get this offset + std_offset = tz.utcoffset(dt).total_seconds() + else: + has_dst = True + + return has_dst, std_offset, dst_offset + + +def _get_localzone_name(): # Windows is special. It has unique time zone names (in several # meanings of the word) available, but unfortunately, they can be # translated to the language of the operating system, so we need to # do a backwards lookup, by going through all time zones and see which # one matches. + tzenv = utils._tz_name_from_env() + if tzenv: + return tzenv + handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation" @@ -34,44 +55,16 @@ def get_localzone_name(): keyvalues = valuestodict(localtz) localtz.Close() - if 'TimeZoneKeyName' in keyvalues: - # Windows 7 (and Vista?) + if "TimeZoneKeyName" in keyvalues: + # Windows 7 and later # For some reason this returns a string with loads of NUL bytes at # least on some systems. I don't know if this is a bug somewhere, I # just work around it. - tzkeyname = keyvalues['TimeZoneKeyName'].split('\x00', 1)[0] + tzkeyname = keyvalues["TimeZoneKeyName"].split("\x00", 1)[0] else: - # Windows 2000 or XP - - # This is the localized name: - tzwin = keyvalues['StandardName'] - - # Open the list of timezones to look up the real name: - TZKEYNAME = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones" - tzkey = winreg.OpenKey(handle, TZKEYNAME) - - # Now, match this value to Time Zone information - tzkeyname = None - for i in range(winreg.QueryInfoKey(tzkey)[0]): - subkey = winreg.EnumKey(tzkey, i) - sub = winreg.OpenKey(tzkey, subkey) - data = valuestodict(sub) - sub.Close() - try: - if data['Std'] == tzwin: - tzkeyname = subkey - break - except KeyError: - # This timezone didn't have proper configuration. - # Ignore it. - pass - - tzkey.Close() - handle.Close() - - if tzkeyname is None: - raise LookupError('Can not find Windows timezone configuration') + # Don't support XP any longer + raise LookupError("Can not find Windows timezone configuration") timezone = win_tz.get(tzkeyname) if timezone is None: @@ -81,24 +74,64 @@ def get_localzone_name(): # Return what we have. if timezone is None: - raise pytz.UnknownTimeZoneError('Can not find timezone ' + tzkeyname) + raise utils.ZoneInfoNotFoundError(tzkeyname) + + if keyvalues.get("DynamicDaylightTimeDisabled", 0) == 1: + # DST is disabled, so don't return the timezone name, + # instead return Etc/GMT+offset + + tz = pds.timezone(timezone) + has_dst, std_offset, dst_offset = _get_dst_info(tz) + if not has_dst: + # The DST is turned off in the windows configuration, + # but this timezone doesn't have DST so it doesn't matter + return timezone + + if std_offset is None: + raise utils.ZoneInfoNotFoundError( + f"{tzkeyname} claims to not have a non-DST time!?") + + if std_offset % 3600: + # I can't convert this to an hourly offset + raise utils.ZoneInfoNotFoundError( + f"tzlocal can't support disabling DST in the {timezone} zone.") + + # This has whole hours as offset, return it as Etc/GMT + return f"Etc/GMT{-std_offset//3600:+.0f}" return timezone +def get_localzone_name(): + """Get the zoneinfo timezone name that matches the Windows-configured timezone.""" + global _cache_tz_name + if _cache_tz_name is None: + _cache_tz_name = _get_localzone_name() + + return _cache_tz_name + + def get_localzone(): """Returns the zoneinfo-based tzinfo object that matches the Windows-configured timezone.""" + global _cache_tz if _cache_tz is None: - _cache_tz = pytz.timezone(get_localzone_name()) + _cache_tz = pds.timezone(get_localzone_name()) + + if not utils._tz_name_from_env(): + # If the timezone does NOT come from a TZ environment variable, + # verify that it's correct. If it's from the environment, + # we accept it, this is so you can run tests with different timezones. + utils.assert_tz_offset(_cache_tz) - utils.assert_tz_offset(_cache_tz) return _cache_tz def reload_localzone(): """Reload the cached localzone. You need to call this if the timezone has changed.""" global _cache_tz - _cache_tz = pytz.timezone(get_localzone_name()) + global _cache_tz_name + _cache_tz_name = _get_localzone_name() + _cache_tz = pds.timezone(_cache_tz_name) utils.assert_tz_offset(_cache_tz) return _cache_tz diff --git a/lib/tzlocal/windows_tz.py b/lib/tzlocal/windows_tz.py index 86ba807d..0d285037 100644 --- a/lib/tzlocal/windows_tz.py +++ b/lib/tzlocal/windows_tz.py @@ -104,6 +104,7 @@ win_tz = {'AUS Central Standard Time': 'Australia/Darwin', 'Saratov Standard Time': 'Europe/Saratov', 'Singapore Standard Time': 'Asia/Singapore', 'South Africa Standard Time': 'Africa/Johannesburg', + 'South Sudan Standard Time': 'Africa/Juba', 'Sri Lanka Standard Time': 'Asia/Colombo', 'Sudan Standard Time': 'Africa/Khartoum', 'Syria Standard Time': 'Asia/Damascus', @@ -118,7 +119,7 @@ win_tz = {'AUS Central Standard Time': 'Australia/Darwin', 'Turks And Caicos Standard Time': 'America/Grand_Turk', 'US Eastern Standard Time': 'America/Indianapolis', 'US Mountain Standard Time': 'America/Phoenix', - 'UTC': 'Etc/GMT', + 'UTC': 'Etc/UTC', 'UTC+12': 'Etc/GMT-12', 'UTC+13': 'Etc/GMT-13', 'UTC-02': 'Etc/GMT+2', @@ -136,7 +137,8 @@ win_tz = {'AUS Central Standard Time': 'Australia/Darwin', 'West Asia Standard Time': 'Asia/Tashkent', 'West Bank Standard Time': 'Asia/Hebron', 'West Pacific Standard Time': 'Pacific/Port_Moresby', - 'Yakutsk Standard Time': 'Asia/Yakutsk'} + 'Yakutsk Standard Time': 'Asia/Yakutsk', + 'Yukon Standard Time': 'America/Whitehorse'} # Old name for the win_tz variable: tz_names = win_tz @@ -166,7 +168,7 @@ tz_win = {'Africa/Abidjan': 'Greenwich Standard Time', 'Africa/Gaborone': 'South Africa Standard Time', 'Africa/Harare': 'South Africa Standard Time', 'Africa/Johannesburg': 'South Africa Standard Time', - 'Africa/Juba': 'E. Africa Standard Time', + 'Africa/Juba': 'South Sudan Standard Time', 'Africa/Kampala': 'E. Africa Standard Time', 'Africa/Khartoum': 'Sudan Standard Time', 'Africa/Kigali': 'South Africa Standard Time', @@ -234,8 +236,8 @@ tz_win = {'Africa/Abidjan': 'Greenwich Standard Time', 'America/Creston': 'US Mountain Standard Time', 'America/Cuiaba': 'Central Brazilian Standard Time', 'America/Curacao': 'SA Western Standard Time', - 'America/Danmarkshavn': 'UTC', - 'America/Dawson': 'Pacific Standard Time', + 'America/Danmarkshavn': 'Greenwich Standard Time', + 'America/Dawson': 'Yukon Standard Time', 'America/Dawson_Creek': 'US Mountain Standard Time', 'America/Denver': 'Mountain Standard Time', 'America/Detroit': 'Eastern Standard Time', @@ -345,14 +347,14 @@ tz_win = {'Africa/Abidjan': 'Greenwich Standard Time', 'America/Tortola': 'SA Western Standard Time', 'America/Vancouver': 'Pacific Standard Time', 'America/Virgin': 'SA Western Standard Time', - 'America/Whitehorse': 'Pacific Standard Time', + 'America/Whitehorse': 'Yukon Standard Time', 'America/Winnipeg': 'Central Standard Time', 'America/Yakutat': 'Alaskan Standard Time', 'America/Yellowknife': 'Mountain Standard Time', - 'Antarctica/Casey': 'Singapore Standard Time', + 'Antarctica/Casey': 'Central Pacific Standard Time', 'Antarctica/Davis': 'SE Asia Standard Time', 'Antarctica/DumontDUrville': 'West Pacific Standard Time', - 'Antarctica/Macquarie': 'Central Pacific Standard Time', + 'Antarctica/Macquarie': 'Tasmania Standard Time', 'Antarctica/Mawson': 'West Asia Standard Time', 'Antarctica/McMurdo': 'New Zealand Standard Time', 'Antarctica/Palmer': 'SA Eastern Standard Time', @@ -501,7 +503,7 @@ tz_win = {'Africa/Abidjan': 'Greenwich Standard Time', 'Canada/Newfoundland': 'Newfoundland Standard Time', 'Canada/Pacific': 'Pacific Standard Time', 'Canada/Saskatchewan': 'Canada Central Standard Time', - 'Canada/Yukon': 'Pacific Standard Time', + 'Canada/Yukon': 'Yukon Standard Time', 'Chile/Continental': 'Pacific SA Standard Time', 'Chile/EasterIsland': 'Easter Island Standard Time', 'Cuba': 'Cuba Standard Time', diff --git a/package/requirements-package.txt b/package/requirements-package.txt index c7d7245e..5acd462f 100644 --- a/package/requirements-package.txt +++ b/package/requirements-package.txt @@ -1,4 +1,4 @@ -apscheduler==3.8.0 +apscheduler==3.9.1 importlib-resources==5.6.0 pyinstaller==4.9 pyopenssl==22.0.0 diff --git a/requirements.txt b/requirements.txt index d433889a..12ec6726 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ appdirs==1.4.4 -apscheduler==3.8.0 +apscheduler==3.9.1 arrow==1.2.2 backports.csv==1.0.7 backports.functools-lru-cache==1.6.4 @@ -33,7 +33,7 @@ PyJWT==2.3.0 pyparsing==3.0.7 python-dateutil==2.8.2 python-twitter==3.5 -pytz==2021.3 +pytz==2022.1 requests==2.27.1 requests-oauthlib==1.3.1 rumps==0.3.0; platform_system == "Darwin" @@ -42,8 +42,8 @@ six==1.16.0 soupsieve==2.3.1 tempora==5.0.1 tokenize-rt==4.2.1 -tzdata==2021.5 -tzlocal==2.1 # apscheduler==3.8.0 requires tzlocal~=2.0 +tzdata==2022.1 +tzlocal==4.2 urllib3==1.26.8 webencodings==0.5.1 websocket-client==1.2.3 From aa0c58ef0e92915c88803f9f5ac533485a893832 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 May 2022 20:33:29 -0700 Subject: [PATCH 076/743] Bump dnspython from 2.2.0 to 2.2.1 (#1679) * Bump dnspython from 2.2.0 to 2.2.1 Bumps [dnspython](https://github.com/rthalley/dnspython) from 2.2.0 to 2.2.1. - [Release notes](https://github.com/rthalley/dnspython/releases) - [Changelog](https://github.com/rthalley/dnspython/blob/master/doc/whatsnew.rst) - [Commits](https://github.com/rthalley/dnspython/compare/v2.2.0...v2.2.1) --- updated-dependencies: - dependency-name: dnspython dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Update dnspython==2.2.1 Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci] --- lib/dns/_immutable_attr.py | 2 +- lib/dns/asyncquery.pyi | 6 +++--- lib/dns/dnssec.py | 2 +- lib/dns/exception.py | 2 +- lib/dns/exception.pyi | 2 ++ lib/dns/message.py | 2 +- lib/dns/query.py | 4 ++-- lib/dns/rdata.py | 2 +- lib/dns/rdataset.py | 2 +- lib/dns/rdtypes/ANY/TKEY.py | 2 +- lib/dns/rdtypes/ANY/__init__.py | 4 ++++ lib/dns/rdtypes/svcbbase.py | 2 +- lib/dns/rdtypes/txtbase.pyi | 8 +++++++- lib/dns/renderer.py | 4 ++-- lib/dns/resolver.py | 10 +++++----- lib/dns/resolver.pyi | 7 ++++++- lib/dns/tokenizer.py | 7 ++++++- lib/dns/transaction.py | 13 +++++++++++-- lib/dns/tsigkeyring.py | 1 + lib/dns/version.py | 2 +- lib/dns/versioned.py | 2 +- lib/dns/zone.py | 30 ++++++++++++++++++++++++++++-- requirements.txt | 2 +- 23 files changed, 88 insertions(+), 30 deletions(-) diff --git a/lib/dns/_immutable_attr.py b/lib/dns/_immutable_attr.py index f7b9f8b0..4d89be90 100644 --- a/lib/dns/_immutable_attr.py +++ b/lib/dns/_immutable_attr.py @@ -49,7 +49,7 @@ def _immutable_init(f): f(*args, **kwargs) finally: if not previous: - # If we started the initialzation, establish immutability + # If we started the initialization, establish immutability # by removing the attribute that allows mutation object.__delattr__(args[0], '_immutable_init') nf.__signature__ = inspect.signature(f) diff --git a/lib/dns/asyncquery.pyi b/lib/dns/asyncquery.pyi index 21ef60dd..a03434c2 100644 --- a/lib/dns/asyncquery.pyi +++ b/lib/dns/asyncquery.pyi @@ -19,7 +19,7 @@ async def udp(q : message.Message, where : str, one_rr_per_rrset : Optional[bool] = False, ignore_trailing : Optional[bool] = False, sock : Optional[asyncbackend.DatagramSocket] = None, - backend : Optional[asyncbackend.Backend]) -> message.Message: + backend : Optional[asyncbackend.Backend] = None) -> message.Message: pass async def tcp(q : message.Message, where : str, timeout : float = None, port=53, @@ -28,7 +28,7 @@ async def tcp(q : message.Message, where : str, timeout : float = None, port=53, one_rr_per_rrset : Optional[bool] = False, ignore_trailing : Optional[bool] = False, sock : Optional[asyncbackend.StreamSocket] = None, - backend : Optional[asyncbackend.Backend]) -> message.Message: + backend : Optional[asyncbackend.Backend] = None) -> message.Message: pass async def tls(q : message.Message, where : str, @@ -37,7 +37,7 @@ async def tls(q : message.Message, where : str, one_rr_per_rrset : Optional[bool] = False, ignore_trailing : Optional[bool] = False, sock : Optional[asyncbackend.StreamSocket] = None, - backend : Optional[asyncbackend.Backend], + backend : Optional[asyncbackend.Backend] = None, ssl_context: Optional[ssl.SSLContext] = None, server_hostname: Optional[str] = None) -> message.Message: pass diff --git a/lib/dns/dnssec.py b/lib/dns/dnssec.py index 6e9946f4..dee4e618 100644 --- a/lib/dns/dnssec.py +++ b/lib/dns/dnssec.py @@ -108,7 +108,7 @@ def key_id(key): return total & 0xffff class DSDigest(dns.enum.IntEnum): - """DNSSEC Delgation Signer Digest Algorithm""" + """DNSSEC Delegation Signer Digest Algorithm""" SHA1 = 1 SHA256 = 2 diff --git a/lib/dns/exception.py b/lib/dns/exception.py index 93923734..08393821 100644 --- a/lib/dns/exception.py +++ b/lib/dns/exception.py @@ -125,7 +125,7 @@ class TooBig(DNSException): class Timeout(DNSException): """The DNS operation timed out.""" supp_kwargs = {'timeout'} - fmt = "The DNS operation timed out after {timeout} seconds" + fmt = "The DNS operation timed out after {timeout:.3f} seconds" class ExceptionWrapper: diff --git a/lib/dns/exception.pyi b/lib/dns/exception.pyi index b29bfbea..dc571264 100644 --- a/lib/dns/exception.pyi +++ b/lib/dns/exception.pyi @@ -8,3 +8,5 @@ class DNSException(Exception): class SyntaxError(DNSException): ... class FormError(DNSException): ... class Timeout(DNSException): ... +class TooBig(DNSException): ... +class UnexpectedEnd(SyntaxError): ... diff --git a/lib/dns/message.py b/lib/dns/message.py index 1e67a17b..c2751a90 100644 --- a/lib/dns/message.py +++ b/lib/dns/message.py @@ -509,7 +509,7 @@ class Message: expected to return a key. *keyname*, a ``dns.name.Name``, ``str`` or ``None``, the name of - thes TSIG key to use; defaults to ``None``. If *keyring* is a + this TSIG key to use; defaults to ``None``. If *keyring* is a ``dict``, the key must be defined in it. If *keyring* is a ``dns.tsig.Key``, this is ignored. diff --git a/lib/dns/query.py b/lib/dns/query.py index fbf76d8b..6d924b5f 100644 --- a/lib/dns/query.py +++ b/lib/dns/query.py @@ -1085,10 +1085,10 @@ def inbound_xfr(where, txn_manager, query=None, *udp_mode*, a ``dns.query.UDPMode``, determines how UDP is used for IXFRs. The default is ``dns.UDPMode.NEVER``, i.e. only use - TCP. Other possibilites are ``dns.UDPMode.TRY_FIRST``, which + TCP. Other possibilities are ``dns.UDPMode.TRY_FIRST``, which means "try UDP but fallback to TCP if needed", and ``dns.UDPMode.ONLY``, which means "try UDP and raise - ``dns.xfr.UseTCP`` if it does not succeeed. + ``dns.xfr.UseTCP`` if it does not succeed. Raises on errors. """ diff --git a/lib/dns/rdata.py b/lib/dns/rdata.py index 624063e0..6b5b5c5a 100644 --- a/lib/dns/rdata.py +++ b/lib/dns/rdata.py @@ -557,7 +557,7 @@ class GenericRdata(Rdata): raise dns.exception.SyntaxError( r'generic rdata does not start with \#') length = tok.get_int() - hex = tok.concatenate_remaining_identifiers().encode() + hex = tok.concatenate_remaining_identifiers(True).encode() data = binascii.unhexlify(hex) if len(data) != length: raise dns.exception.SyntaxError( diff --git a/lib/dns/rdataset.py b/lib/dns/rdataset.py index e69ee232..579bc964 100644 --- a/lib/dns/rdataset.py +++ b/lib/dns/rdataset.py @@ -312,7 +312,7 @@ class Rdataset(dns.set.Set): def processing_order(self): """Return rdatas in a valid processing order according to the type's specification. For example, MX records are in preference order from - lowest to highest preferences, with items of the same perference + lowest to highest preferences, with items of the same preference shuffled. For types that do not define a processing order, the rdatas are diff --git a/lib/dns/rdtypes/ANY/TKEY.py b/lib/dns/rdtypes/ANY/TKEY.py index f8c47372..861fc4e3 100644 --- a/lib/dns/rdtypes/ANY/TKEY.py +++ b/lib/dns/rdtypes/ANY/TKEY.py @@ -63,7 +63,7 @@ class TKEY(dns.rdata.Rdata): error = tok.get_uint16() key_b64 = tok.get_string().encode() key = base64.b64decode(key_b64) - other_b64 = tok.concatenate_remaining_identifiers().encode() + other_b64 = tok.concatenate_remaining_identifiers(True).encode() other = base64.b64decode(other_b64) return cls(rdclass, rdtype, algorithm, inception, expiration, mode, diff --git a/lib/dns/rdtypes/ANY/__init__.py b/lib/dns/rdtypes/ANY/__init__.py index 6c56baff..2cadcde3 100644 --- a/lib/dns/rdtypes/ANY/__init__.py +++ b/lib/dns/rdtypes/ANY/__init__.py @@ -37,8 +37,12 @@ __all__ = [ 'HINFO', 'HIP', 'ISDN', + 'L32', + 'L64', 'LOC', + 'LP', 'MX', + 'NID', 'NINFO', 'NS', 'NSEC', diff --git a/lib/dns/rdtypes/svcbbase.py b/lib/dns/rdtypes/svcbbase.py index 09d7a52b..3362571c 100644 --- a/lib/dns/rdtypes/svcbbase.py +++ b/lib/dns/rdtypes/svcbbase.py @@ -435,7 +435,7 @@ class SVCBBase(dns.rdata.Rdata): if not isinstance(v, Param) and v is not None: raise ValueError("not a Param") self.params = dns.immutable.Dict(params) - # Make sure any paramater listed as mandatory is present in the + # Make sure any parameter listed as mandatory is present in the # record. mandatory = params.get(ParamKey.MANDATORY) if mandatory: diff --git a/lib/dns/rdtypes/txtbase.pyi b/lib/dns/rdtypes/txtbase.pyi index af447d50..f8d5df98 100644 --- a/lib/dns/rdtypes/txtbase.pyi +++ b/lib/dns/rdtypes/txtbase.pyi @@ -1,6 +1,12 @@ +import typing from .. import rdata class TXTBase(rdata.Rdata): - ... + strings: typing.Tuple[bytes, ...] + + def __init__(self, rdclass: int, rdtype: int, strings: typing.Iterable[bytes]) -> None: + ... + def to_text(self, origin: typing.Any, relativize: bool, **kw: typing.Any) -> str: + ... class TXT(TXTBase): ... diff --git a/lib/dns/renderer.py b/lib/dns/renderer.py index 72f0f7a8..4e4391cd 100644 --- a/lib/dns/renderer.py +++ b/lib/dns/renderer.py @@ -49,8 +49,8 @@ class Renderer: r.add_rrset(dns.renderer.ANSWER, rrset_2) r.add_rrset(dns.renderer.AUTHORITY, ns_rrset) r.add_edns(0, 0, 4096) - r.add_rrset(dns.renderer.ADDTIONAL, ad_rrset_1) - r.add_rrset(dns.renderer.ADDTIONAL, ad_rrset_2) + r.add_rrset(dns.renderer.ADDITIONAL, ad_rrset_1) + r.add_rrset(dns.renderer.ADDITIONAL, ad_rrset_2) r.write_header() r.add_tsig(keyname, secret, 300, 1, 0, '', request_mac) wire = r.get_wire() diff --git a/lib/dns/resolver.py b/lib/dns/resolver.py index 166f8492..7da7a613 100644 --- a/lib/dns/resolver.py +++ b/lib/dns/resolver.py @@ -145,7 +145,7 @@ class LifetimeTimeout(dns.exception.Timeout): """The resolution lifetime expired.""" msg = "The resolution lifetime expired." - fmt = "%s after {timeout} seconds: {errors}" % msg[:-1] + fmt = "%s after {timeout:.3f} seconds: {errors}" % msg[:-1] supp_kwargs = {'timeout', 'errors'} def _fmt_kwargs(self, **kwargs): @@ -1018,7 +1018,7 @@ class Resolver(BaseResolver): *source_port*, an ``int``, the port from which to send the message. *lifetime*, a ``float``, how many seconds a query should run - before timing out. + before timing out. *search*, a ``bool`` or ``None``, determines whether the search list configured in the system's resolver configuration @@ -1101,7 +1101,7 @@ class Resolver(BaseResolver): """Query nameservers to find the answer to the question. This method calls resolve() with ``search=True``, and is - provided for backwards compatbility with prior versions of + provided for backwards compatibility with prior versions of dnspython. See the documentation for the resolve() method for further details. """ @@ -1200,7 +1200,7 @@ def query(qname, rdtype=dns.rdatatype.A, rdclass=dns.rdataclass.IN, """Query nameservers to find the answer to the question. This method calls resolve() with ``search=True``, and is - provided for backwards compatbility with prior versions of + provided for backwards compatibility with prior versions of dnspython. See the documentation for the resolve() method for further details. """ @@ -1253,7 +1253,7 @@ def zone_for_name(name, rdclass=dns.rdataclass.IN, tcp=False, resolver=None, root servers in your network and they are misconfigured.) Raises ``dns.resolver.LifetimeTimeout`` if the answer could not be - found in the alotted lifetime. + found in the allotted lifetime. Returns a ``dns.name.Name``. """ diff --git a/lib/dns/resolver.pyi b/lib/dns/resolver.pyi index 6da21f12..348df4da 100644 --- a/lib/dns/resolver.pyi +++ b/lib/dns/resolver.pyi @@ -25,7 +25,7 @@ def query(qname : str, rdtype : Union[int,str] = 0, tcp=False, source=None, raise_on_no_answer=True, source_port=0, lifetime : Optional[float]=None): ... -def resolve_address(self, ipaddr: str, *args: Any, **kwargs: Optional[Dict]): +def resolve_address(ipaddr: str, *args: Any, **kwargs: Optional[Dict]): ... class LRUCache: def __init__(self, max_size=1000): @@ -59,3 +59,8 @@ class Resolver: raise_on_no_answer=True, source_port : int = 0, lifetime : Optional[float]=None): ... +default_resolver: typing.Optional[Resolver] +def reset_default_resolver() -> None: + ... +def get_default_resolver() -> Resolver: + ... diff --git a/lib/dns/tokenizer.py b/lib/dns/tokenizer.py index 7ddc7a96..cb6a6302 100644 --- a/lib/dns/tokenizer.py +++ b/lib/dns/tokenizer.py @@ -600,9 +600,12 @@ class Tokenizer: break return tokens - def concatenate_remaining_identifiers(self): + def concatenate_remaining_identifiers(self, allow_empty=False): """Read the remaining tokens on the line, which should be identifiers. + Raises dns.exception.SyntaxError if there are no remaining tokens, + unless `allow_empty=True` is given. + Raises dns.exception.SyntaxError if a token is seen that is not an identifier. @@ -618,6 +621,8 @@ class Tokenizer: if not token.is_identifier(): raise dns.exception.SyntaxError s += token.value + if not (allow_empty or s): + raise dns.exception.SyntaxError('expecting another identifier') return s def as_name(self, token, origin=None, relativize=False, relativize_to=None): diff --git a/lib/dns/transaction.py b/lib/dns/transaction.py index ae7417ed..d7254924 100644 --- a/lib/dns/transaction.py +++ b/lib/dns/transaction.py @@ -389,7 +389,7 @@ class Transaction: if rdataset.rdclass != self.manager.get_class(): raise ValueError(f'{method} has objects of wrong RdataClass') if rdataset.rdtype == dns.rdatatype.SOA: - (_, _, origin) = self.manager.origin_information() + (_, _, origin) = self._origin_information() if name != origin: raise ValueError(f'{method} has non-origin SOA') self._raise_if_not_empty(method, args) @@ -560,7 +560,7 @@ class Transaction: *commit*, a bool. If ``True``, commit the transaction, otherwise roll it back. - If committing adn the commit fails, then roll back and raise an + If committing and the commit fails, then roll back and raise an exception. """ raise NotImplementedError # pragma: no cover @@ -585,3 +585,12 @@ class Transaction: Returns a node or ``None``. """ raise NotImplementedError # pragma: no cover + + # + # Low-level API with a default implementation, in case a subclass needs + # to override. + # + + def _origin_information(self): + # This is only used by _add() + return self.manager.origin_information() diff --git a/lib/dns/tsigkeyring.py b/lib/dns/tsigkeyring.py index 47a1f79f..788581c9 100644 --- a/lib/dns/tsigkeyring.py +++ b/lib/dns/tsigkeyring.py @@ -20,6 +20,7 @@ import base64 import dns.name +import dns.tsig def from_text(textring): diff --git a/lib/dns/version.py b/lib/dns/version.py index 745a5c7f..65017872 100644 --- a/lib/dns/version.py +++ b/lib/dns/version.py @@ -22,7 +22,7 @@ MAJOR = 2 #: MINOR MINOR = 2 #: MICRO -MICRO = 0 +MICRO = 1 #: RELEASELEVEL RELEASELEVEL = 0x0f #: SERIAL diff --git a/lib/dns/versioned.py b/lib/dns/versioned.py index 42f2c814..8b6c275f 100644 --- a/lib/dns/versioned.py +++ b/lib/dns/versioned.py @@ -131,7 +131,7 @@ class Zone(dns.zone.Zone): # # We only wake one sleeper at a time, so it's important # that no event waiter can exit this method (e.g. via - # cancelation) without returning a transaction or waking + # cancellation) without returning a transaction or waking # someone else up. # # This is not a problem with Threading module threads as diff --git a/lib/dns/zone.py b/lib/dns/zone.py index 2e731446..5a649404 100644 --- a/lib/dns/zone.py +++ b/lib/dns/zone.py @@ -183,6 +183,11 @@ class Zone(dns.transaction.TransactionManager): "name parameter must be a subdomain of the zone origin") if self.relativize: name = name.relativize(self.origin) + elif not self.relativize: + # We have a relative name in a non-relative zone, so derelativize. + if self.origin is None: + raise KeyError('no zone origin is defined') + name = name.derelativize(self.origin) return name def __getitem__(self, key): @@ -870,11 +875,20 @@ class Version: def _validate_name(self, name): if name.is_absolute(): - if not name.is_subdomain(self.zone.origin): + if self.origin is None: + # This should probably never happen as other code (e.g. + # _rr_line) will notice the lack of an origin before us, but + # we check just in case! + raise KeyError('no zone origin is defined') + if not name.is_subdomain(self.origin): raise KeyError("name is not a subdomain of the zone origin") if self.zone.relativize: - # XXXRTH should it be an error if self.origin is still None? name = name.relativize(self.origin) + elif not self.zone.relativize: + # We have a relative name in a non-relative zone, so derelativize. + if self.origin is None: + raise KeyError('no zone origin is defined') + name = name.derelativize(self.origin) return name def get_node(self, name): @@ -1030,6 +1044,18 @@ class Transaction(dns.transaction.Transaction): def _get_node(self, name): return self.version.get_node(name) + def _origin_information(self): + (absolute, relativize, effective) = self.manager.origin_information() + if absolute is None and self.version.origin is not None: + # No origin has been committed yet, but we've learned one as part of + # this txn. Use it. + absolute = self.version.origin + if relativize: + effective = dns.name.empty + else: + effective = absolute + return (absolute, relativize, effective) + def from_text(text, origin=None, rdclass=dns.rdataclass.IN, relativize=True, zone_factory=Zone, filename=None, diff --git a/requirements.txt b/requirements.txt index 12ec6726..1861eeaa 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,7 +11,7 @@ cheroot==8.6.0 cherrypy==18.6.1 cloudinary==1.29.0 distro==1.7.0 -dnspython==2.2.0 +dnspython==2.2.1 facebook-sdk==3.1.0 future==0.18.2 gntp==1.0.3 From 238afb47943f84a81c373e63bb90eb860852a6b5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 May 2022 20:33:50 -0700 Subject: [PATCH 077/743] Bump mako from 1.1.6 to 1.2.0 (#1684) * Bump mako from 1.1.6 to 1.2.0 Bumps [mako](https://github.com/sqlalchemy/mako) from 1.1.6 to 1.2.0. - [Release notes](https://github.com/sqlalchemy/mako/releases) - [Changelog](https://github.com/sqlalchemy/mako/blob/main/CHANGES) - [Commits](https://github.com/sqlalchemy/mako/commits) --- updated-dependencies: - dependency-name: mako dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Update mako==1.2.0 * Update MarkupSafe==2.1.1 * Add importlib-metadata==4.11.3 * Update requirements.txt Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci] --- lib/importlib_metadata/__init__.py | 1075 ++++++++++++++++++++++++ lib/importlib_metadata/_adapters.py | 68 ++ lib/importlib_metadata/_collections.py | 30 + lib/importlib_metadata/_compat.py | 71 ++ lib/importlib_metadata/_functools.py | 104 +++ lib/importlib_metadata/_itertools.py | 73 ++ lib/importlib_metadata/_meta.py | 48 ++ lib/importlib_metadata/_text.py | 99 +++ lib/importlib_metadata/py.typed | 0 lib/mako/__init__.py | 4 +- lib/mako/_ast_util.py | 13 +- lib/mako/ast.py | 29 +- lib/mako/cache.py | 9 +- lib/mako/cmd.py | 9 +- lib/mako/codegen.py | 58 +- lib/mako/compat.py | 141 +--- lib/mako/exceptions.py | 37 +- lib/mako/ext/autohandler.py | 2 +- lib/mako/ext/babelplugin.py | 11 +- lib/mako/ext/beaker_cache.py | 4 +- lib/mako/ext/extract.py | 22 +- lib/mako/ext/linguaplugin.py | 36 +- lib/mako/ext/preprocessors.py | 2 +- lib/mako/ext/pygmentplugin.py | 19 +- lib/mako/ext/turbogears.py | 6 +- lib/mako/filters.py | 96 +-- lib/mako/lexer.py | 232 +++-- lib/mako/lookup.py | 58 +- lib/mako/parsetree.py | 117 ++- lib/mako/pygen.py | 42 +- lib/mako/pyparser.py | 82 +- lib/mako/runtime.py | 90 +- lib/mako/template.py | 119 +-- lib/mako/testing/__init__.py | 0 lib/mako/testing/_config.py | 128 +++ lib/mako/testing/assertions.py | 167 ++++ lib/mako/testing/config.py | 17 + lib/mako/testing/exclusions.py | 80 ++ lib/mako/testing/fixtures.py | 109 +++ lib/mako/testing/helpers.py | 67 ++ lib/mako/util.py | 60 +- lib/markupsafe/__init__.py | 25 +- lib/markupsafe/_native.py | 12 - lib/markupsafe/_speedups.c | 320 +++++++ requirements.txt | 5 +- 45 files changed, 2948 insertions(+), 848 deletions(-) create mode 100644 lib/importlib_metadata/__init__.py create mode 100644 lib/importlib_metadata/_adapters.py create mode 100644 lib/importlib_metadata/_collections.py create mode 100644 lib/importlib_metadata/_compat.py create mode 100644 lib/importlib_metadata/_functools.py create mode 100644 lib/importlib_metadata/_itertools.py create mode 100644 lib/importlib_metadata/_meta.py create mode 100644 lib/importlib_metadata/_text.py create mode 100644 lib/importlib_metadata/py.typed create mode 100644 lib/mako/testing/__init__.py create mode 100644 lib/mako/testing/_config.py create mode 100644 lib/mako/testing/assertions.py create mode 100644 lib/mako/testing/config.py create mode 100644 lib/mako/testing/exclusions.py create mode 100644 lib/mako/testing/fixtures.py create mode 100644 lib/mako/testing/helpers.py create mode 100644 lib/markupsafe/_speedups.c diff --git a/lib/importlib_metadata/__init__.py b/lib/importlib_metadata/__init__.py new file mode 100644 index 00000000..5ac8be23 --- /dev/null +++ b/lib/importlib_metadata/__init__.py @@ -0,0 +1,1075 @@ +import os +import re +import abc +import csv +import sys +import zipp +import email +import pathlib +import operator +import textwrap +import warnings +import functools +import itertools +import posixpath +import collections + +from . import _adapters, _meta +from ._collections import FreezableDefaultDict, Pair +from ._compat import ( + NullFinder, + install, + pypy_partial, +) +from ._functools import method_cache, pass_none +from ._itertools import always_iterable, unique_everseen +from ._meta import PackageMetadata, SimplePath + +from contextlib import suppress +from importlib import import_module +from importlib.abc import MetaPathFinder +from itertools import starmap +from typing import List, Mapping, Optional, Union + + +__all__ = [ + 'Distribution', + 'DistributionFinder', + 'PackageMetadata', + 'PackageNotFoundError', + 'distribution', + 'distributions', + 'entry_points', + 'files', + 'metadata', + 'packages_distributions', + 'requires', + 'version', +] + + +class PackageNotFoundError(ModuleNotFoundError): + """The package was not found.""" + + def __str__(self): + return f"No package metadata was found for {self.name}" + + @property + def name(self): + (name,) = self.args + return name + + +class Sectioned: + """ + A simple entry point config parser for performance + + >>> for item in Sectioned.read(Sectioned._sample): + ... print(item) + Pair(name='sec1', value='# comments ignored') + Pair(name='sec1', value='a = 1') + Pair(name='sec1', value='b = 2') + Pair(name='sec2', value='a = 2') + + >>> res = Sectioned.section_pairs(Sectioned._sample) + >>> item = next(res) + >>> item.name + 'sec1' + >>> item.value + Pair(name='a', value='1') + >>> item = next(res) + >>> item.value + Pair(name='b', value='2') + >>> item = next(res) + >>> item.name + 'sec2' + >>> item.value + Pair(name='a', value='2') + >>> list(res) + [] + """ + + _sample = textwrap.dedent( + """ + [sec1] + # comments ignored + a = 1 + b = 2 + + [sec2] + a = 2 + """ + ).lstrip() + + @classmethod + def section_pairs(cls, text): + return ( + section._replace(value=Pair.parse(section.value)) + for section in cls.read(text, filter_=cls.valid) + if section.name is not None + ) + + @staticmethod + def read(text, filter_=None): + lines = filter(filter_, map(str.strip, text.splitlines())) + name = None + for value in lines: + section_match = value.startswith('[') and value.endswith(']') + if section_match: + name = value.strip('[]') + continue + yield Pair(name, value) + + @staticmethod + def valid(line): + return line and not line.startswith('#') + + +class DeprecatedTuple: + """ + Provide subscript item access for backward compatibility. + + >>> recwarn = getfixture('recwarn') + >>> ep = EntryPoint(name='name', value='value', group='group') + >>> ep[:] + ('name', 'value', 'group') + >>> ep[0] + 'name' + >>> len(recwarn) + 1 + """ + + _warn = functools.partial( + warnings.warn, + "EntryPoint tuple interface is deprecated. Access members by name.", + DeprecationWarning, + stacklevel=pypy_partial(2), + ) + + def __getitem__(self, item): + self._warn() + return self._key()[item] + + +class EntryPoint(DeprecatedTuple): + """An entry point as defined by Python packaging conventions. + + See `the packaging docs on entry points + `_ + for more information. + + >>> ep = EntryPoint( + ... name=None, group=None, value='package.module:attr [extra1, extra2]') + >>> ep.module + 'package.module' + >>> ep.attr + 'attr' + >>> ep.extras + ['extra1', 'extra2'] + """ + + pattern = re.compile( + r'(?P[\w.]+)\s*' + r'(:\s*(?P[\w.]+)\s*)?' + r'((?P\[.*\])\s*)?$' + ) + """ + A regular expression describing the syntax for an entry point, + which might look like: + + - module + - package.module + - package.module:attribute + - package.module:object.attribute + - package.module:attr [extra1, extra2] + + Other combinations are possible as well. + + The expression is lenient about whitespace around the ':', + following the attr, and following any extras. + """ + + dist: Optional['Distribution'] = None + + def __init__(self, name, value, group): + vars(self).update(name=name, value=value, group=group) + + def load(self): + """Load the entry point from its definition. If only a module + is indicated by the value, return that module. Otherwise, + return the named object. + """ + match = self.pattern.match(self.value) + module = import_module(match.group('module')) + attrs = filter(None, (match.group('attr') or '').split('.')) + return functools.reduce(getattr, attrs, module) + + @property + def module(self): + match = self.pattern.match(self.value) + return match.group('module') + + @property + def attr(self): + match = self.pattern.match(self.value) + return match.group('attr') + + @property + def extras(self): + match = self.pattern.match(self.value) + return re.findall(r'\w+', match.group('extras') or '') + + def _for(self, dist): + vars(self).update(dist=dist) + return self + + def __iter__(self): + """ + Supply iter so one may construct dicts of EntryPoints by name. + """ + msg = ( + "Construction of dict of EntryPoints is deprecated in " + "favor of EntryPoints." + ) + warnings.warn(msg, DeprecationWarning) + return iter((self.name, self)) + + def matches(self, **params): + """ + EntryPoint matches the given parameters. + + >>> ep = EntryPoint(group='foo', name='bar', value='bing:bong [extra1, extra2]') + >>> ep.matches(group='foo') + True + >>> ep.matches(name='bar', value='bing:bong [extra1, extra2]') + True + >>> ep.matches(group='foo', name='other') + False + >>> ep.matches() + True + >>> ep.matches(extras=['extra1', 'extra2']) + True + >>> ep.matches(module='bing') + True + >>> ep.matches(attr='bong') + True + """ + attrs = (getattr(self, param) for param in params) + return all(map(operator.eq, params.values(), attrs)) + + def _key(self): + return self.name, self.value, self.group + + def __lt__(self, other): + return self._key() < other._key() + + def __eq__(self, other): + return self._key() == other._key() + + def __setattr__(self, name, value): + raise AttributeError("EntryPoint objects are immutable.") + + def __repr__(self): + return ( + f'EntryPoint(name={self.name!r}, value={self.value!r}, ' + f'group={self.group!r})' + ) + + def __hash__(self): + return hash(self._key()) + + +class DeprecatedList(list): + """ + Allow an otherwise immutable object to implement mutability + for compatibility. + + >>> recwarn = getfixture('recwarn') + >>> dl = DeprecatedList(range(3)) + >>> dl[0] = 1 + >>> dl.append(3) + >>> del dl[3] + >>> dl.reverse() + >>> dl.sort() + >>> dl.extend([4]) + >>> dl.pop(-1) + 4 + >>> dl.remove(1) + >>> dl += [5] + >>> dl + [6] + [1, 2, 5, 6] + >>> dl + (6,) + [1, 2, 5, 6] + >>> dl.insert(0, 0) + >>> dl + [0, 1, 2, 5] + >>> dl == [0, 1, 2, 5] + True + >>> dl == (0, 1, 2, 5) + True + >>> len(recwarn) + 1 + """ + + __slots__ = () + + _warn = functools.partial( + warnings.warn, + "EntryPoints list interface is deprecated. Cast to list if needed.", + DeprecationWarning, + stacklevel=pypy_partial(2), + ) + + def _wrap_deprecated_method(method_name: str): # type: ignore + def wrapped(self, *args, **kwargs): + self._warn() + return getattr(super(), method_name)(*args, **kwargs) + + return method_name, wrapped + + locals().update( + map( + _wrap_deprecated_method, + '__setitem__ __delitem__ append reverse extend pop remove ' + '__iadd__ insert sort'.split(), + ) + ) + + def __add__(self, other): + if not isinstance(other, tuple): + self._warn() + other = tuple(other) + return self.__class__(tuple(self) + other) + + def __eq__(self, other): + if not isinstance(other, tuple): + self._warn() + other = tuple(other) + + return tuple(self).__eq__(other) + + +class EntryPoints(DeprecatedList): + """ + An immutable collection of selectable EntryPoint objects. + """ + + __slots__ = () + + def __getitem__(self, name): # -> EntryPoint: + """ + Get the EntryPoint in self matching name. + """ + if isinstance(name, int): + warnings.warn( + "Accessing entry points by index is deprecated. " + "Cast to tuple if needed.", + DeprecationWarning, + stacklevel=2, + ) + return super().__getitem__(name) + try: + return next(iter(self.select(name=name))) + except StopIteration: + raise KeyError(name) + + def select(self, **params): + """ + Select entry points from self that match the + given parameters (typically group and/or name). + """ + return EntryPoints(ep for ep in self if ep.matches(**params)) + + @property + def names(self): + """ + Return the set of all names of all entry points. + """ + return {ep.name for ep in self} + + @property + def groups(self): + """ + Return the set of all groups of all entry points. + + For coverage while SelectableGroups is present. + >>> EntryPoints().groups + set() + """ + return {ep.group for ep in self} + + @classmethod + def _from_text_for(cls, text, dist): + return cls(ep._for(dist) for ep in cls._from_text(text)) + + @staticmethod + def _from_text(text): + return ( + EntryPoint(name=item.value.name, value=item.value.value, group=item.name) + for item in Sectioned.section_pairs(text or '') + ) + + +class Deprecated: + """ + Compatibility add-in for mapping to indicate that + mapping behavior is deprecated. + + >>> recwarn = getfixture('recwarn') + >>> class DeprecatedDict(Deprecated, dict): pass + >>> dd = DeprecatedDict(foo='bar') + >>> dd.get('baz', None) + >>> dd['foo'] + 'bar' + >>> list(dd) + ['foo'] + >>> list(dd.keys()) + ['foo'] + >>> 'foo' in dd + True + >>> list(dd.values()) + ['bar'] + >>> len(recwarn) + 1 + """ + + _warn = functools.partial( + warnings.warn, + "SelectableGroups dict interface is deprecated. Use select.", + DeprecationWarning, + stacklevel=pypy_partial(2), + ) + + def __getitem__(self, name): + self._warn() + return super().__getitem__(name) + + def get(self, name, default=None): + self._warn() + return super().get(name, default) + + def __iter__(self): + self._warn() + return super().__iter__() + + def __contains__(self, *args): + self._warn() + return super().__contains__(*args) + + def keys(self): + self._warn() + return super().keys() + + def values(self): + self._warn() + return super().values() + + +class SelectableGroups(Deprecated, dict): + """ + A backward- and forward-compatible result from + entry_points that fully implements the dict interface. + """ + + @classmethod + def load(cls, eps): + by_group = operator.attrgetter('group') + ordered = sorted(eps, key=by_group) + grouped = itertools.groupby(ordered, by_group) + return cls((group, EntryPoints(eps)) for group, eps in grouped) + + @property + def _all(self): + """ + Reconstruct a list of all entrypoints from the groups. + """ + groups = super(Deprecated, self).values() + return EntryPoints(itertools.chain.from_iterable(groups)) + + @property + def groups(self): + return self._all.groups + + @property + def names(self): + """ + for coverage: + >>> SelectableGroups().names + set() + """ + return self._all.names + + def select(self, **params): + if not params: + return self + return self._all.select(**params) + + +class PackagePath(pathlib.PurePosixPath): + """A reference to a path in a package""" + + def read_text(self, encoding='utf-8'): + with self.locate().open(encoding=encoding) as stream: + return stream.read() + + def read_binary(self): + with self.locate().open('rb') as stream: + return stream.read() + + def locate(self): + """Return a path-like object for this path""" + return self.dist.locate_file(self) + + +class FileHash: + def __init__(self, spec): + self.mode, _, self.value = spec.partition('=') + + def __repr__(self): + return f'' + + +class Distribution: + """A Python distribution package.""" + + @abc.abstractmethod + def read_text(self, filename): + """Attempt to load metadata file given by the name. + + :param filename: The name of the file in the distribution info. + :return: The text if found, otherwise None. + """ + + @abc.abstractmethod + def locate_file(self, path): + """ + Given a path to a file in this distribution, return a path + to it. + """ + + @classmethod + def from_name(cls, name): + """Return the Distribution for the given package name. + + :param name: The name of the distribution package to search for. + :return: The Distribution instance (or subclass thereof) for the named + package, if found. + :raises PackageNotFoundError: When the named package's distribution + metadata cannot be found. + """ + for resolver in cls._discover_resolvers(): + dists = resolver(DistributionFinder.Context(name=name)) + dist = next(iter(dists), None) + if dist is not None: + return dist + else: + raise PackageNotFoundError(name) + + @classmethod + def discover(cls, **kwargs): + """Return an iterable of Distribution objects for all packages. + + Pass a ``context`` or pass keyword arguments for constructing + a context. + + :context: A ``DistributionFinder.Context`` object. + :return: Iterable of Distribution objects for all packages. + """ + context = kwargs.pop('context', None) + if context and kwargs: + raise ValueError("cannot accept context and kwargs") + context = context or DistributionFinder.Context(**kwargs) + return itertools.chain.from_iterable( + resolver(context) for resolver in cls._discover_resolvers() + ) + + @staticmethod + def at(path): + """Return a Distribution for the indicated metadata path + + :param path: a string or path-like object + :return: a concrete Distribution instance for the path + """ + return PathDistribution(pathlib.Path(path)) + + @staticmethod + def _discover_resolvers(): + """Search the meta_path for resolvers.""" + declared = ( + getattr(finder, 'find_distributions', None) for finder in sys.meta_path + ) + return filter(None, declared) + + @property + def metadata(self) -> _meta.PackageMetadata: + """Return the parsed metadata for this Distribution. + + The returned object will have keys that name the various bits of + metadata. See PEP 566 for details. + """ + text = ( + self.read_text('METADATA') + or self.read_text('PKG-INFO') + # This last clause is here to support old egg-info files. Its + # effect is to just end up using the PathDistribution's self._path + # (which points to the egg-info file) attribute unchanged. + or self.read_text('') + ) + return _adapters.Message(email.message_from_string(text)) + + @property + def name(self): + """Return the 'Name' metadata for the distribution package.""" + return self.metadata['Name'] + + @property + def _normalized_name(self): + """Return a normalized version of the name.""" + return Prepared.normalize(self.name) + + @property + def version(self): + """Return the 'Version' metadata for the distribution package.""" + return self.metadata['Version'] + + @property + def entry_points(self): + return EntryPoints._from_text_for(self.read_text('entry_points.txt'), self) + + @property + def files(self): + """Files in this distribution. + + :return: List of PackagePath for this distribution or None + + Result is `None` if the metadata file that enumerates files + (i.e. RECORD for dist-info or SOURCES.txt for egg-info) is + missing. + Result may be empty if the metadata exists but is empty. + """ + + def make_file(name, hash=None, size_str=None): + result = PackagePath(name) + result.hash = FileHash(hash) if hash else None + result.size = int(size_str) if size_str else None + result.dist = self + return result + + @pass_none + def make_files(lines): + return list(starmap(make_file, csv.reader(lines))) + + return make_files(self._read_files_distinfo() or self._read_files_egginfo()) + + def _read_files_distinfo(self): + """ + Read the lines of RECORD + """ + text = self.read_text('RECORD') + return text and text.splitlines() + + def _read_files_egginfo(self): + """ + SOURCES.txt might contain literal commas, so wrap each line + in quotes. + """ + text = self.read_text('SOURCES.txt') + return text and map('"{}"'.format, text.splitlines()) + + @property + def requires(self): + """Generated requirements specified for this Distribution""" + reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs() + return reqs and list(reqs) + + def _read_dist_info_reqs(self): + return self.metadata.get_all('Requires-Dist') + + def _read_egg_info_reqs(self): + source = self.read_text('requires.txt') + return pass_none(self._deps_from_requires_text)(source) + + @classmethod + def _deps_from_requires_text(cls, source): + return cls._convert_egg_info_reqs_to_simple_reqs(Sectioned.read(source)) + + @staticmethod + def _convert_egg_info_reqs_to_simple_reqs(sections): + """ + Historically, setuptools would solicit and store 'extra' + requirements, including those with environment markers, + in separate sections. More modern tools expect each + dependency to be defined separately, with any relevant + extras and environment markers attached directly to that + requirement. This method converts the former to the + latter. See _test_deps_from_requires_text for an example. + """ + + def make_condition(name): + return name and f'extra == "{name}"' + + def quoted_marker(section): + section = section or '' + extra, sep, markers = section.partition(':') + if extra and markers: + markers = f'({markers})' + conditions = list(filter(None, [markers, make_condition(extra)])) + return '; ' + ' and '.join(conditions) if conditions else '' + + def url_req_space(req): + """ + PEP 508 requires a space between the url_spec and the quoted_marker. + Ref python/importlib_metadata#357. + """ + # '@' is uniquely indicative of a url_req. + return ' ' * ('@' in req) + + for section in sections: + space = url_req_space(section.value) + yield section.value + space + quoted_marker(section.name) + + +class DistributionFinder(MetaPathFinder): + """ + A MetaPathFinder capable of discovering installed distributions. + """ + + class Context: + """ + Keyword arguments presented by the caller to + ``distributions()`` or ``Distribution.discover()`` + to narrow the scope of a search for distributions + in all DistributionFinders. + + Each DistributionFinder may expect any parameters + and should attempt to honor the canonical + parameters defined below when appropriate. + """ + + name = None + """ + Specific name for which a distribution finder should match. + A name of ``None`` matches all distributions. + """ + + def __init__(self, **kwargs): + vars(self).update(kwargs) + + @property + def path(self): + """ + The sequence of directory path that a distribution finder + should search. + + Typically refers to Python installed package paths such as + "site-packages" directories and defaults to ``sys.path``. + """ + return vars(self).get('path', sys.path) + + @abc.abstractmethod + def find_distributions(self, context=Context()): + """ + Find distributions. + + Return an iterable of all Distribution instances capable of + loading the metadata for packages matching the ``context``, + a DistributionFinder.Context instance. + """ + + +class FastPath: + """ + Micro-optimized class for searching a path for + children. + + >>> FastPath('').children() + ['...'] + """ + + @functools.lru_cache() # type: ignore + def __new__(cls, root): + return super().__new__(cls) + + def __init__(self, root): + self.root = root + + def joinpath(self, child): + return pathlib.Path(self.root, child) + + def children(self): + with suppress(Exception): + return os.listdir(self.root or '.') + with suppress(Exception): + return self.zip_children() + return [] + + def zip_children(self): + zip_path = zipp.Path(self.root) + names = zip_path.root.namelist() + self.joinpath = zip_path.joinpath + + return dict.fromkeys(child.split(posixpath.sep, 1)[0] for child in names) + + def search(self, name): + return self.lookup(self.mtime).search(name) + + @property + def mtime(self): + with suppress(OSError): + return os.stat(self.root).st_mtime + self.lookup.cache_clear() + + @method_cache + def lookup(self, mtime): + return Lookup(self) + + +class Lookup: + def __init__(self, path: FastPath): + base = os.path.basename(path.root).lower() + base_is_egg = base.endswith(".egg") + self.infos = FreezableDefaultDict(list) + self.eggs = FreezableDefaultDict(list) + + for child in path.children(): + low = child.lower() + if low.endswith((".dist-info", ".egg-info")): + # rpartition is faster than splitext and suitable for this purpose. + name = low.rpartition(".")[0].partition("-")[0] + normalized = Prepared.normalize(name) + self.infos[normalized].append(path.joinpath(child)) + elif base_is_egg and low == "egg-info": + name = base.rpartition(".")[0].partition("-")[0] + legacy_normalized = Prepared.legacy_normalize(name) + self.eggs[legacy_normalized].append(path.joinpath(child)) + + self.infos.freeze() + self.eggs.freeze() + + def search(self, prepared): + infos = ( + self.infos[prepared.normalized] + if prepared + else itertools.chain.from_iterable(self.infos.values()) + ) + eggs = ( + self.eggs[prepared.legacy_normalized] + if prepared + else itertools.chain.from_iterable(self.eggs.values()) + ) + return itertools.chain(infos, eggs) + + +class Prepared: + """ + A prepared search for metadata on a possibly-named package. + """ + + normalized = None + legacy_normalized = None + + def __init__(self, name): + self.name = name + if name is None: + return + self.normalized = self.normalize(name) + self.legacy_normalized = self.legacy_normalize(name) + + @staticmethod + def normalize(name): + """ + PEP 503 normalization plus dashes as underscores. + """ + return re.sub(r"[-_.]+", "-", name).lower().replace('-', '_') + + @staticmethod + def legacy_normalize(name): + """ + Normalize the package name as found in the convention in + older packaging tools versions and specs. + """ + return name.lower().replace('-', '_') + + def __bool__(self): + return bool(self.name) + + +@install +class MetadataPathFinder(NullFinder, DistributionFinder): + """A degenerate finder for distribution packages on the file system. + + This finder supplies only a find_distributions() method for versions + of Python that do not have a PathFinder find_distributions(). + """ + + def find_distributions(self, context=DistributionFinder.Context()): + """ + Find distributions. + + Return an iterable of all Distribution instances capable of + loading the metadata for packages matching ``context.name`` + (or all names if ``None`` indicated) along the paths in the list + of directories ``context.path``. + """ + found = self._search_paths(context.name, context.path) + return map(PathDistribution, found) + + @classmethod + def _search_paths(cls, name, paths): + """Find metadata directories in paths heuristically.""" + prepared = Prepared(name) + return itertools.chain.from_iterable( + path.search(prepared) for path in map(FastPath, paths) + ) + + def invalidate_caches(cls): + FastPath.__new__.cache_clear() + + +class PathDistribution(Distribution): + def __init__(self, path: SimplePath): + """Construct a distribution. + + :param path: SimplePath indicating the metadata directory. + """ + self._path = path + + def read_text(self, filename): + with suppress( + FileNotFoundError, + IsADirectoryError, + KeyError, + NotADirectoryError, + PermissionError, + ): + return self._path.joinpath(filename).read_text(encoding='utf-8') + + read_text.__doc__ = Distribution.read_text.__doc__ + + def locate_file(self, path): + return self._path.parent / path + + @property + def _normalized_name(self): + """ + Performance optimization: where possible, resolve the + normalized name from the file system path. + """ + stem = os.path.basename(str(self._path)) + return self._name_from_stem(stem) or super()._normalized_name + + def _name_from_stem(self, stem): + name, ext = os.path.splitext(stem) + if ext not in ('.dist-info', '.egg-info'): + return + name, sep, rest = stem.partition('-') + return name + + +def distribution(distribution_name): + """Get the ``Distribution`` instance for the named package. + + :param distribution_name: The name of the distribution package as a string. + :return: A ``Distribution`` instance (or subclass thereof). + """ + return Distribution.from_name(distribution_name) + + +def distributions(**kwargs): + """Get all ``Distribution`` instances in the current environment. + + :return: An iterable of ``Distribution`` instances. + """ + return Distribution.discover(**kwargs) + + +def metadata(distribution_name) -> _meta.PackageMetadata: + """Get the metadata for the named package. + + :param distribution_name: The name of the distribution package to query. + :return: A PackageMetadata containing the parsed metadata. + """ + return Distribution.from_name(distribution_name).metadata + + +def version(distribution_name): + """Get the version string for the named package. + + :param distribution_name: The name of the distribution package to query. + :return: The version string for the package as defined in the package's + "Version" metadata key. + """ + return distribution(distribution_name).version + + +def entry_points(**params) -> Union[EntryPoints, SelectableGroups]: + """Return EntryPoint objects for all installed packages. + + Pass selection parameters (group or name) to filter the + result to entry points matching those properties (see + EntryPoints.select()). + + For compatibility, returns ``SelectableGroups`` object unless + selection parameters are supplied. In the future, this function + will return ``EntryPoints`` instead of ``SelectableGroups`` + even when no selection parameters are supplied. + + For maximum future compatibility, pass selection parameters + or invoke ``.select`` with parameters on the result. + + :return: EntryPoints or SelectableGroups for all installed packages. + """ + norm_name = operator.attrgetter('_normalized_name') + unique = functools.partial(unique_everseen, key=norm_name) + eps = itertools.chain.from_iterable( + dist.entry_points for dist in unique(distributions()) + ) + return SelectableGroups.load(eps).select(**params) + + +def files(distribution_name): + """Return a list of files for the named package. + + :param distribution_name: The name of the distribution package to query. + :return: List of files composing the distribution. + """ + return distribution(distribution_name).files + + +def requires(distribution_name): + """ + Return a list of requirements for the named package. + + :return: An iterator of requirements, suitable for + packaging.requirement.Requirement. + """ + return distribution(distribution_name).requires + + +def packages_distributions() -> Mapping[str, List[str]]: + """ + Return a mapping of top-level packages to their + distributions. + + >>> import collections.abc + >>> pkgs = packages_distributions() + >>> all(isinstance(dist, collections.abc.Sequence) for dist in pkgs.values()) + True + """ + pkg_to_dist = collections.defaultdict(list) + for dist in distributions(): + for pkg in _top_level_declared(dist) or _top_level_inferred(dist): + pkg_to_dist[pkg].append(dist.metadata['Name']) + return dict(pkg_to_dist) + + +def _top_level_declared(dist): + return (dist.read_text('top_level.txt') or '').split() + + +def _top_level_inferred(dist): + return { + f.parts[0] if len(f.parts) > 1 else f.with_suffix('').name + for f in always_iterable(dist.files) + if f.suffix == ".py" + } diff --git a/lib/importlib_metadata/_adapters.py b/lib/importlib_metadata/_adapters.py new file mode 100644 index 00000000..aa460d3e --- /dev/null +++ b/lib/importlib_metadata/_adapters.py @@ -0,0 +1,68 @@ +import re +import textwrap +import email.message + +from ._text import FoldedCase + + +class Message(email.message.Message): + multiple_use_keys = set( + map( + FoldedCase, + [ + 'Classifier', + 'Obsoletes-Dist', + 'Platform', + 'Project-URL', + 'Provides-Dist', + 'Provides-Extra', + 'Requires-Dist', + 'Requires-External', + 'Supported-Platform', + 'Dynamic', + ], + ) + ) + """ + Keys that may be indicated multiple times per PEP 566. + """ + + def __new__(cls, orig: email.message.Message): + res = super().__new__(cls) + vars(res).update(vars(orig)) + return res + + def __init__(self, *args, **kwargs): + self._headers = self._repair_headers() + + # suppress spurious error from mypy + def __iter__(self): + return super().__iter__() + + def _repair_headers(self): + def redent(value): + "Correct for RFC822 indentation" + if not value or '\n' not in value: + return value + return textwrap.dedent(' ' * 8 + value) + + headers = [(key, redent(value)) for key, value in vars(self)['_headers']] + if self._payload: + headers.append(('Description', self.get_payload())) + return headers + + @property + def json(self): + """ + Convert PackageMetadata to a JSON-compatible format + per PEP 0566. + """ + + def transform(key): + value = self.get_all(key) if key in self.multiple_use_keys else self[key] + if key == 'Keywords': + value = re.split(r'\s+', value) + tk = key.lower().replace('-', '_') + return tk, value + + return dict(map(transform, map(FoldedCase, self))) diff --git a/lib/importlib_metadata/_collections.py b/lib/importlib_metadata/_collections.py new file mode 100644 index 00000000..cf0954e1 --- /dev/null +++ b/lib/importlib_metadata/_collections.py @@ -0,0 +1,30 @@ +import collections + + +# from jaraco.collections 3.3 +class FreezableDefaultDict(collections.defaultdict): + """ + Often it is desirable to prevent the mutation of + a default dict after its initial construction, such + as to prevent mutation during iteration. + + >>> dd = FreezableDefaultDict(list) + >>> dd[0].append('1') + >>> dd.freeze() + >>> dd[1] + [] + >>> len(dd) + 1 + """ + + def __missing__(self, key): + return getattr(self, '_frozen', super().__missing__)(key) + + def freeze(self): + self._frozen = lambda key: self.default_factory() + + +class Pair(collections.namedtuple('Pair', 'name value')): + @classmethod + def parse(cls, text): + return cls(*map(str.strip, text.split("=", 1))) diff --git a/lib/importlib_metadata/_compat.py b/lib/importlib_metadata/_compat.py new file mode 100644 index 00000000..8fe4e4e3 --- /dev/null +++ b/lib/importlib_metadata/_compat.py @@ -0,0 +1,71 @@ +import sys +import platform + + +__all__ = ['install', 'NullFinder', 'Protocol'] + + +try: + from typing import Protocol +except ImportError: # pragma: no cover + from typing_extensions import Protocol # type: ignore + + +def install(cls): + """ + Class decorator for installation on sys.meta_path. + + Adds the backport DistributionFinder to sys.meta_path and + attempts to disable the finder functionality of the stdlib + DistributionFinder. + """ + sys.meta_path.append(cls()) + disable_stdlib_finder() + return cls + + +def disable_stdlib_finder(): + """ + Give the backport primacy for discovering path-based distributions + by monkey-patching the stdlib O_O. + + See #91 for more background for rationale on this sketchy + behavior. + """ + + def matches(finder): + return getattr( + finder, '__module__', None + ) == '_frozen_importlib_external' and hasattr(finder, 'find_distributions') + + for finder in filter(matches, sys.meta_path): # pragma: nocover + del finder.find_distributions + + +class NullFinder: + """ + A "Finder" (aka "MetaClassFinder") that never finds any modules, + but may find distributions. + """ + + @staticmethod + def find_spec(*args, **kwargs): + return None + + # In Python 2, the import system requires finders + # to have a find_module() method, but this usage + # is deprecated in Python 3 in favor of find_spec(). + # For the purposes of this finder (i.e. being present + # on sys.meta_path but having no other import + # system functionality), the two methods are identical. + find_module = find_spec + + +def pypy_partial(val): + """ + Adjust for variable stacklevel on partial under PyPy. + + Workaround for #327. + """ + is_pypy = platform.python_implementation() == 'PyPy' + return val + is_pypy diff --git a/lib/importlib_metadata/_functools.py b/lib/importlib_metadata/_functools.py new file mode 100644 index 00000000..71f66bd0 --- /dev/null +++ b/lib/importlib_metadata/_functools.py @@ -0,0 +1,104 @@ +import types +import functools + + +# from jaraco.functools 3.3 +def method_cache(method, cache_wrapper=None): + """ + Wrap lru_cache to support storing the cache data in the object instances. + + Abstracts the common paradigm where the method explicitly saves an + underscore-prefixed protected property on first call and returns that + subsequently. + + >>> class MyClass: + ... calls = 0 + ... + ... @method_cache + ... def method(self, value): + ... self.calls += 1 + ... return value + + >>> a = MyClass() + >>> a.method(3) + 3 + >>> for x in range(75): + ... res = a.method(x) + >>> a.calls + 75 + + Note that the apparent behavior will be exactly like that of lru_cache + except that the cache is stored on each instance, so values in one + instance will not flush values from another, and when an instance is + deleted, so are the cached values for that instance. + + >>> b = MyClass() + >>> for x in range(35): + ... res = b.method(x) + >>> b.calls + 35 + >>> a.method(0) + 0 + >>> a.calls + 75 + + Note that if method had been decorated with ``functools.lru_cache()``, + a.calls would have been 76 (due to the cached value of 0 having been + flushed by the 'b' instance). + + Clear the cache with ``.cache_clear()`` + + >>> a.method.cache_clear() + + Same for a method that hasn't yet been called. + + >>> c = MyClass() + >>> c.method.cache_clear() + + Another cache wrapper may be supplied: + + >>> cache = functools.lru_cache(maxsize=2) + >>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache) + >>> a = MyClass() + >>> a.method2() + 3 + + Caution - do not subsequently wrap the method with another decorator, such + as ``@property``, which changes the semantics of the function. + + See also + http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/ + for another implementation and additional justification. + """ + cache_wrapper = cache_wrapper or functools.lru_cache() + + def wrapper(self, *args, **kwargs): + # it's the first call, replace the method with a cached, bound method + bound_method = types.MethodType(method, self) + cached_method = cache_wrapper(bound_method) + setattr(self, method.__name__, cached_method) + return cached_method(*args, **kwargs) + + # Support cache clear even before cache has been created. + wrapper.cache_clear = lambda: None + + return wrapper + + +# From jaraco.functools 3.3 +def pass_none(func): + """ + Wrap func so it's not called if its first param is None + + >>> print_text = pass_none(print) + >>> print_text('text') + text + >>> print_text(None) + """ + + @functools.wraps(func) + def wrapper(param, *args, **kwargs): + if param is not None: + return func(param, *args, **kwargs) + + return wrapper diff --git a/lib/importlib_metadata/_itertools.py b/lib/importlib_metadata/_itertools.py new file mode 100644 index 00000000..d4ca9b91 --- /dev/null +++ b/lib/importlib_metadata/_itertools.py @@ -0,0 +1,73 @@ +from itertools import filterfalse + + +def unique_everseen(iterable, key=None): + "List unique elements, preserving order. Remember all elements ever seen." + # unique_everseen('AAAABBBCCDAABBB') --> A B C D + # unique_everseen('ABBCcAD', str.lower) --> A B C D + seen = set() + seen_add = seen.add + if key is None: + for element in filterfalse(seen.__contains__, iterable): + seen_add(element) + yield element + else: + for element in iterable: + k = key(element) + if k not in seen: + seen_add(k) + yield element + + +# copied from more_itertools 8.8 +def always_iterable(obj, base_type=(str, bytes)): + """If *obj* is iterable, return an iterator over its items:: + + >>> obj = (1, 2, 3) + >>> list(always_iterable(obj)) + [1, 2, 3] + + If *obj* is not iterable, return a one-item iterable containing *obj*:: + + >>> obj = 1 + >>> list(always_iterable(obj)) + [1] + + If *obj* is ``None``, return an empty iterable: + + >>> obj = None + >>> list(always_iterable(None)) + [] + + By default, binary and text strings are not considered iterable:: + + >>> obj = 'foo' + >>> list(always_iterable(obj)) + ['foo'] + + If *base_type* is set, objects for which ``isinstance(obj, base_type)`` + returns ``True`` won't be considered iterable. + + >>> obj = {'a': 1} + >>> list(always_iterable(obj)) # Iterate over the dict's keys + ['a'] + >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit + [{'a': 1}] + + Set *base_type* to ``None`` to avoid any special handling and treat objects + Python considers iterable as iterable: + + >>> obj = 'foo' + >>> list(always_iterable(obj, base_type=None)) + ['f', 'o', 'o'] + """ + if obj is None: + return iter(()) + + if (base_type is not None) and isinstance(obj, base_type): + return iter((obj,)) + + try: + return iter(obj) + except TypeError: + return iter((obj,)) diff --git a/lib/importlib_metadata/_meta.py b/lib/importlib_metadata/_meta.py new file mode 100644 index 00000000..37ee43e6 --- /dev/null +++ b/lib/importlib_metadata/_meta.py @@ -0,0 +1,48 @@ +from ._compat import Protocol +from typing import Any, Dict, Iterator, List, TypeVar, Union + + +_T = TypeVar("_T") + + +class PackageMetadata(Protocol): + def __len__(self) -> int: + ... # pragma: no cover + + def __contains__(self, item: str) -> bool: + ... # pragma: no cover + + def __getitem__(self, key: str) -> str: + ... # pragma: no cover + + def __iter__(self) -> Iterator[str]: + ... # pragma: no cover + + def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]: + """ + Return all values associated with a possibly multi-valued key. + """ + + @property + def json(self) -> Dict[str, Union[str, List[str]]]: + """ + A JSON-compatible form of the metadata. + """ + + +class SimplePath(Protocol): + """ + A minimal subset of pathlib.Path required by PathDistribution. + """ + + def joinpath(self) -> 'SimplePath': + ... # pragma: no cover + + def __truediv__(self) -> 'SimplePath': + ... # pragma: no cover + + def parent(self) -> 'SimplePath': + ... # pragma: no cover + + def read_text(self) -> str: + ... # pragma: no cover diff --git a/lib/importlib_metadata/_text.py b/lib/importlib_metadata/_text.py new file mode 100644 index 00000000..c88cfbb2 --- /dev/null +++ b/lib/importlib_metadata/_text.py @@ -0,0 +1,99 @@ +import re + +from ._functools import method_cache + + +# from jaraco.text 3.5 +class FoldedCase(str): + """ + A case insensitive string class; behaves just like str + except compares equal when the only variation is case. + + >>> s = FoldedCase('hello world') + + >>> s == 'Hello World' + True + + >>> 'Hello World' == s + True + + >>> s != 'Hello World' + False + + >>> s.index('O') + 4 + + >>> s.split('O') + ['hell', ' w', 'rld'] + + >>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta'])) + ['alpha', 'Beta', 'GAMMA'] + + Sequence membership is straightforward. + + >>> "Hello World" in [s] + True + >>> s in ["Hello World"] + True + + You may test for set inclusion, but candidate and elements + must both be folded. + + >>> FoldedCase("Hello World") in {s} + True + >>> s in {FoldedCase("Hello World")} + True + + String inclusion works as long as the FoldedCase object + is on the right. + + >>> "hello" in FoldedCase("Hello World") + True + + But not if the FoldedCase object is on the left: + + >>> FoldedCase('hello') in 'Hello World' + False + + In that case, use in_: + + >>> FoldedCase('hello').in_('Hello World') + True + + >>> FoldedCase('hello') > FoldedCase('Hello') + False + """ + + def __lt__(self, other): + return self.lower() < other.lower() + + def __gt__(self, other): + return self.lower() > other.lower() + + def __eq__(self, other): + return self.lower() == other.lower() + + def __ne__(self, other): + return self.lower() != other.lower() + + def __hash__(self): + return hash(self.lower()) + + def __contains__(self, other): + return super().lower().__contains__(other.lower()) + + def in_(self, other): + "Does self appear in other?" + return self in FoldedCase(other) + + # cache lower since it's likely to be called frequently. + @method_cache + def lower(self): + return super().lower() + + def index(self, sub): + return self.lower().index(sub.lower()) + + def split(self, splitter=' ', maxsplit=0): + pattern = re.compile(re.escape(splitter), re.I) + return pattern.split(self, maxsplit) diff --git a/lib/importlib_metadata/py.typed b/lib/importlib_metadata/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/lib/mako/__init__.py b/lib/mako/__init__.py index df2ae480..5ae55011 100644 --- a/lib/mako/__init__.py +++ b/lib/mako/__init__.py @@ -1,8 +1,8 @@ # mako/__init__.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -__version__ = "1.1.6" +__version__ = "1.2.0" diff --git a/lib/mako/_ast_util.py b/lib/mako/_ast_util.py index bdcdbf69..b8615335 100644 --- a/lib/mako/_ast_util.py +++ b/lib/mako/_ast_util.py @@ -1,5 +1,5 @@ # mako/_ast_util.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -47,7 +47,6 @@ from _ast import Sub from _ast import UAdd from _ast import USub -from mako.compat import arg_stringname BOOLOP_SYMBOLS = {And: "and", Or: "or"} @@ -94,9 +93,7 @@ def parse(expr, filename="", mode="exec"): def iter_fields(node): """Iterate over all fields of a node, only yielding existing fields.""" - # CPython 2.5 compat - if not hasattr(node, "_fields") or not node._fields: - return + for field in node._fields: try: yield field, getattr(node, field) @@ -104,7 +101,7 @@ def iter_fields(node): pass -class NodeVisitor(object): +class NodeVisitor: """ Walks the abstract syntax tree and call visitor functions for every node @@ -266,10 +263,10 @@ class SourceGenerator(NodeVisitor): self.visit(default) if node.vararg is not None: write_comma() - self.write("*" + arg_stringname(node.vararg)) + self.write("*" + node.vararg.arg) if node.kwarg is not None: write_comma() - self.write("**" + arg_stringname(node.kwarg)) + self.write("**" + node.kwarg.arg) def decorators(self, node): for decorator in node.decorator_list: diff --git a/lib/mako/ast.py b/lib/mako/ast.py index cfae2806..f879e8b4 100644 --- a/lib/mako/ast.py +++ b/lib/mako/ast.py @@ -1,5 +1,5 @@ # mako/ast.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -9,12 +9,11 @@ code, as well as generating Python from AST nodes""" import re -from mako import compat from mako import exceptions from mako import pyparser -class PythonCode(object): +class PythonCode: """represents information about a string containing Python code""" @@ -39,7 +38,7 @@ class PythonCode(object): # - AST is less likely to break with version changes # (for example, the behavior of co_names changed a little bit # in python version 2.5) - if isinstance(code, compat.string_types): + if isinstance(code, str): expr = pyparser.parse(code.lstrip(), "exec", **exception_kwargs) else: expr = code @@ -48,7 +47,7 @@ class PythonCode(object): f.visit(expr) -class ArgumentList(object): +class ArgumentList: """parses a fragment of code as a comma-separated list of expressions""" @@ -57,7 +56,7 @@ class ArgumentList(object): self.args = [] self.declared_identifiers = set() self.undeclared_identifiers = set() - if isinstance(code, compat.string_types): + if isinstance(code, str): if re.match(r"\S", code) and not re.match(r",\s*$", code): # if theres text and no trailing comma, insure its parsed # as a tuple by adding a trailing comma @@ -88,7 +87,7 @@ class PythonFragment(PythonCode): if not m: raise exceptions.CompileException( "Fragment '%s' is not a partial control statement" % code, - **exception_kwargs + **exception_kwargs, ) if m.group(3): code = code[: m.start(3)] @@ -97,7 +96,7 @@ class PythonFragment(PythonCode): code = code + "pass" elif keyword == "try": code = code + "pass\nexcept:pass" - elif keyword == "elif" or keyword == "else": + elif keyword in ["elif", "else"]: code = "if False:pass\n" + code + "pass" elif keyword == "except": code = "try:pass\n" + code + "pass" @@ -106,12 +105,12 @@ class PythonFragment(PythonCode): else: raise exceptions.CompileException( "Unsupported control keyword: '%s'" % keyword, - **exception_kwargs + **exception_kwargs, ) - super(PythonFragment, self).__init__(code, **exception_kwargs) + super().__init__(code, **exception_kwargs) -class FunctionDecl(object): +class FunctionDecl: """function declaration""" @@ -124,13 +123,13 @@ class FunctionDecl(object): if not hasattr(self, "funcname"): raise exceptions.CompileException( "Code '%s' is not a function declaration" % code, - **exception_kwargs + **exception_kwargs, ) if not allow_kwargs and self.kwargs: raise exceptions.CompileException( "'**%s' keyword argument not allowed here" % self.kwargnames[-1], - **exception_kwargs + **exception_kwargs, ) def get_argument_expressions(self, as_call=False): @@ -200,6 +199,4 @@ class FunctionArgs(FunctionDecl): """the argument portion of a function declaration""" def __init__(self, code, **kwargs): - super(FunctionArgs, self).__init__( - "def ANON(%s):pass" % code, **kwargs - ) + super().__init__("def ANON(%s):pass" % code, **kwargs) diff --git a/lib/mako/cache.py b/lib/mako/cache.py index 26aa93ee..d77be271 100644 --- a/lib/mako/cache.py +++ b/lib/mako/cache.py @@ -1,10 +1,9 @@ # mako/cache.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -from mako import compat from mako import util _cache_plugins = util.PluginLoader("mako.cache") @@ -13,7 +12,7 @@ register_plugin = _cache_plugins.register register_plugin("beaker", "mako.ext.beaker_cache", "BeakerCacheImpl") -class Cache(object): +class Cache: """Represents a data content cache made available to the module space of a specific :class:`.Template` object. @@ -66,7 +65,7 @@ class Cache(object): def __init__(self, template, *args): # check for a stale template calling the # constructor - if isinstance(template, compat.string_types) and args: + if isinstance(template, str) and args: return self.template = template self.id = template.module.__name__ @@ -181,7 +180,7 @@ class Cache(object): return tmpl_kw -class CacheImpl(object): +class CacheImpl: """Provide a cache implementation for use by :class:`.Cache`.""" diff --git a/lib/mako/cmd.py b/lib/mako/cmd.py index c0f2c754..7592fb27 100755 --- a/lib/mako/cmd.py +++ b/lib/mako/cmd.py @@ -1,10 +1,9 @@ # mako/cmd.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php from argparse import ArgumentParser -import io from os.path import dirname from os.path import isfile import sys @@ -85,16 +84,14 @@ def cmdline(argv=None): except: _exit() - kw = dict([varsplit(var) for var in options.var]) + kw = dict(varsplit(var) for var in options.var) try: rendered = template.render(**kw) except: _exit() else: if output_file: - io.open(output_file, "wt", encoding=output_encoding).write( - rendered - ) + open(output_file, "wt", encoding=output_encoding).write(rendered) else: sys.stdout.write(rendered) diff --git a/lib/mako/codegen.py b/lib/mako/codegen.py index a9ae55b8..c897f0ff 100644 --- a/lib/mako/codegen.py +++ b/lib/mako/codegen.py @@ -1,5 +1,5 @@ # mako/codegen.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -12,7 +12,6 @@ import re import time from mako import ast -from mako import compat from mako import exceptions from mako import filters from mako import parsetree @@ -25,8 +24,8 @@ MAGIC_NUMBER = 10 # names which are hardwired into the # template and are not accessed via the # context itself -TOPLEVEL_DECLARED = set(["UNDEFINED", "STOP_RENDERING"]) -RESERVED_NAMES = set(["context", "loop"]).union(TOPLEVEL_DECLARED) +TOPLEVEL_DECLARED = {"UNDEFINED", "STOP_RENDERING"} +RESERVED_NAMES = {"context", "loop"}.union(TOPLEVEL_DECLARED) def compile( # noqa @@ -39,20 +38,12 @@ def compile( # noqa future_imports=None, source_encoding=None, generate_magic_comment=True, - disable_unicode=False, strict_undefined=False, enable_loop=True, reserved_names=frozenset(), ): """Generate module source code given a parsetree node, - uri, and optional source filename""" - - # if on Py2K, push the "source_encoding" string to be - # a bytestring itself, as we will be embedding it into - # the generated source and we don't want to coerce the - # result into a unicode object, in "disable_unicode" mode - if not compat.py3k and isinstance(source_encoding, compat.text_type): - source_encoding = source_encoding.encode(source_encoding) + uri, and optional source filename""" buf = util.FastEncodingBuffer() @@ -68,7 +59,6 @@ def compile( # noqa future_imports, source_encoding, generate_magic_comment, - disable_unicode, strict_undefined, enable_loop, reserved_names, @@ -78,7 +68,7 @@ def compile( # noqa return buf.getvalue() -class _CompileContext(object): +class _CompileContext: def __init__( self, uri, @@ -89,7 +79,6 @@ class _CompileContext(object): future_imports, source_encoding, generate_magic_comment, - disable_unicode, strict_undefined, enable_loop, reserved_names, @@ -102,16 +91,15 @@ class _CompileContext(object): self.future_imports = future_imports self.source_encoding = source_encoding self.generate_magic_comment = generate_magic_comment - self.disable_unicode = disable_unicode self.strict_undefined = strict_undefined self.enable_loop = enable_loop self.reserved_names = reserved_names -class _GenerateRenderMethod(object): +class _GenerateRenderMethod: """A template visitor object which generates the - full module source for a template. + full module source for a template. """ @@ -196,7 +184,7 @@ class _GenerateRenderMethod(object): self.compiler.pagetag = None - class FindTopLevel(object): + class FindTopLevel: def visitInheritTag(s, node): inherit.append(node) @@ -392,7 +380,7 @@ class _GenerateRenderMethod(object): identifiers = self.compiler.identifiers.branch(node) self.in_def = True - class NSDefVisitor(object): + class NSDefVisitor: def visitDefTag(s, node): s.visitDefOrBase(node) @@ -404,7 +392,7 @@ class _GenerateRenderMethod(object): raise exceptions.CompileException( "Can't put anonymous blocks inside " "<%namespace>", - **node.exception_kwargs + **node.exception_kwargs, ) self.write_inline_def(node, identifiers, nested=False) export.append(node.funcname) @@ -481,7 +469,7 @@ class _GenerateRenderMethod(object): """ # collection of all defs available to us in this scope - comp_idents = dict([(c.funcname, c) for c in identifiers.defs]) + comp_idents = {c.funcname: c for c in identifiers.defs} to_write = set() # write "context.get()" for all variables we are going to @@ -714,7 +702,7 @@ class _GenerateRenderMethod(object): toplevel=False, ): """write a post-function decorator to replace a rendering - callable with a cached version of itself.""" + callable with a cached version of itself.""" self.printer.writeline("__M_%s = %s" % (name, name)) cachekey = node_or_pagetag.parsed_attributes.get( @@ -794,8 +782,6 @@ class _GenerateRenderMethod(object): def locate_encode(name): if re.match(r"decode\..+", name): return "filters." + name - elif self.compiler.disable_unicode: - return filters.NON_UNICODE_ESCAPES.get(name, name) else: return filters.DEFAULT_ESCAPES.get(name, name) @@ -859,11 +845,11 @@ class _GenerateRenderMethod(object): # and end control lines, and # 3) any control line with no content other than comments if not children or ( - compat.all( + all( isinstance(c, (parsetree.Comment, parsetree.ControlLine)) for c in children ) - and compat.all( + and all( (node.is_ternary(c.keyword) or c.isend) for c in children if isinstance(c, parsetree.ControlLine) @@ -969,7 +955,7 @@ class _GenerateRenderMethod(object): self.identifier_stack.append(body_identifiers) - class DefVisitor(object): + class DefVisitor: def visitDefTag(s, node): s.visitDefOrBase(node) @@ -1025,7 +1011,7 @@ class _GenerateRenderMethod(object): ) -class _Identifiers(object): +class _Identifiers: """tracks the status of identifier names as template code is rendered.""" @@ -1098,7 +1084,7 @@ class _Identifiers(object): def branch(self, node, **kwargs): """create a new Identifiers for a new Node, with - this Identifiers as the parent.""" + this Identifiers as the parent.""" return _Identifiers(self.compiler, node, self, **kwargs) @@ -1123,7 +1109,7 @@ class _Identifiers(object): def check_declared(self, node): """update the state of this Identifiers with the undeclared - and declared identifiers of the given node.""" + and declared identifiers of the given node.""" for ident in node.undeclared_identifiers(): if ident != "context" and ident not in self.declared.union( @@ -1170,7 +1156,7 @@ class _Identifiers(object): raise exceptions.CompileException( "%%def or %%block named '%s' already " "exists in this template." % node.funcname, - **node.exception_kwargs + **node.exception_kwargs, ) def visitDefTag(self, node): @@ -1200,7 +1186,7 @@ class _Identifiers(object): raise exceptions.CompileException( "Named block '%s' not allowed inside of def '%s'" % (node.name, self.node.name), - **node.exception_kwargs + **node.exception_kwargs, ) elif isinstance( self.node, (parsetree.CallTag, parsetree.CallNamespaceTag) @@ -1208,7 +1194,7 @@ class _Identifiers(object): raise exceptions.CompileException( "Named block '%s' not allowed inside of <%%call> tag" % (node.name,), - **node.exception_kwargs + **node.exception_kwargs, ) for ident in node.undeclared_identifiers(): @@ -1293,7 +1279,7 @@ def mangle_mako_loop(node, printer): return text -class LoopVariable(object): +class LoopVariable: """A node visitor which looks for the name 'loop' within undeclared identifiers.""" diff --git a/lib/mako/compat.py b/lib/mako/compat.py index 06bb8d99..68bc03b1 100644 --- a/lib/mako/compat.py +++ b/lib/mako/compat.py @@ -1,19 +1,17 @@ # mako/compat.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php import collections +from importlib import util import inspect import sys -py3k = sys.version_info >= (3, 0) -py2k = sys.version_info < (3,) -py27 = sys.version_info >= (2, 7) -jython = sys.platform.startswith("java") win32 = sys.platform.startswith("win") pypy = hasattr(sys, "pypy_version_info") +py38 = sys.version_info >= (3, 8) ArgSpec = collections.namedtuple( "ArgSpec", ["args", "varargs", "keywords", "defaults"] @@ -26,15 +24,15 @@ def inspect_getargspec(func): if inspect.ismethod(func): func = func.__func__ if not inspect.isfunction(func): - raise TypeError("{!r} is not a Python function".format(func)) + raise TypeError(f"{func!r} is not a Python function") co = func.__code__ if not inspect.iscode(co): - raise TypeError("{!r} is not a code object".format(co)) + raise TypeError(f"{co!r} is not a code object") nargs = co.co_argcount names = co.co_varnames - nkwargs = co.co_kwonlyargcount if py3k else 0 + nkwargs = co.co_kwonlyargcount args = list(names[:nargs]) nargs += nkwargs @@ -49,129 +47,30 @@ def inspect_getargspec(func): return ArgSpec(args, varargs, varkw, func.__defaults__) -if py3k: - from io import StringIO - import builtins as compat_builtins - from urllib.parse import quote_plus, unquote_plus - from html.entities import codepoint2name, name2codepoint - - string_types = (str,) - binary_type = bytes - text_type = str - - from io import BytesIO as byte_buffer - - def u(s): - return s - - def b(s): - return s.encode("latin-1") - - def octal(lit): - return eval("0o" + lit) - - -else: - import __builtin__ as compat_builtins # noqa - - try: - from cStringIO import StringIO - except: - from StringIO import StringIO - - byte_buffer = StringIO - - from urllib import quote_plus, unquote_plus # noqa - from htmlentitydefs import codepoint2name, name2codepoint # noqa - - string_types = (basestring,) # noqa - binary_type = str - text_type = unicode # noqa - - def u(s): - return unicode(s, "utf-8") # noqa - - def b(s): - return s - - def octal(lit): - return eval("0" + lit) - - -if py3k: - from importlib import machinery, util - - if hasattr(util, 'module_from_spec'): - # Python 3.5+ - def load_module(module_id, path): - spec = util.spec_from_file_location(module_id, path) - module = util.module_from_spec(spec) - spec.loader.exec_module(module) - return module - else: - def load_module(module_id, path): - module = machinery.SourceFileLoader(module_id, path).load_module() - del sys.modules[module_id] - return module - -else: - import imp - - def load_module(module_id, path): - fp = open(path, "rb") - try: - module = imp.load_source(module_id, path, fp) - del sys.modules[module_id] - return module - finally: - fp.close() - - -if py3k: - - def reraise(tp, value, tb=None, cause=None): - if cause is not None: - value.__cause__ = cause - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - - -else: - exec( - "def reraise(tp, value, tb=None, cause=None):\n" - " raise tp, value, tb\n" - ) +def load_module(module_id, path): + spec = util.spec_from_file_location(module_id, path) + module = util.module_from_spec(spec) + spec.loader.exec_module(module) + return module def exception_as(): return sys.exc_info()[1] -all = all # noqa - - def exception_name(exc): return exc.__class__.__name__ -################################################ -# cross-compatible metaclass implementation -# Copyright (c) 2010-2012 Benjamin Peterson -def with_metaclass(meta, base=object): - """Create a base class with a metaclass.""" - return meta("%sBase" % meta.__name__, (base,), {}) +if py38: + from importlib import metadata as importlib_metadata +else: + import importlib_metadata # noqa -################################################ - - -def arg_stringname(func_arg): - """Gets the string name of a kwarg or vararg - In Python3.4 a function's args are - of _ast.arg type not _ast.name - """ - if hasattr(func_arg, "arg"): - return func_arg.arg +def importlib_metadata_get(group): + ep = importlib_metadata.entry_points() + if hasattr(ep, "select"): + return ep.select(group=group) else: - return str(func_arg) + return ep.get(group, ()) diff --git a/lib/mako/exceptions.py b/lib/mako/exceptions.py index ea7b20db..a0a5feca 100644 --- a/lib/mako/exceptions.py +++ b/lib/mako/exceptions.py @@ -1,5 +1,5 @@ # mako/exceptions.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -68,7 +68,7 @@ class TopLevelLookupException(TemplateLookupException): pass -class RichTraceback(object): +class RichTraceback: """Pull the current exception from the ``sys`` traceback and extracts Mako-specific template information. @@ -106,7 +106,7 @@ class RichTraceback(object): def _init_message(self): """Find a unicode representation of self.error""" try: - self.message = compat.text_type(self.error) + self.message = str(self.error) except UnicodeError: try: self.message = str(self.error) @@ -114,8 +114,8 @@ class RichTraceback(object): # Fallback to args as neither unicode nor # str(Exception(u'\xe6')) work in Python < 2.6 self.message = self.error.args[0] - if not isinstance(self.message, compat.text_type): - self.message = compat.text_type(self.message, "ascii", "replace") + if not isinstance(self.message, str): + self.message = str(self.message, "ascii", "replace") def _get_reformatted_records(self, records): for rec in records: @@ -139,8 +139,7 @@ class RichTraceback(object): @property def reverse_traceback(self): - """Return the same data as traceback, except in reverse order. - """ + """Return the same data as traceback, except in reverse order.""" return list(self._get_reformatted_records(self.reverse_records)) @@ -170,17 +169,6 @@ class RichTraceback(object): ) except KeyError: # A normal .py file (not a Template) - if not compat.py3k: - try: - fp = open(filename, "rb") - encoding = util.parse_encoding(fp) - fp.close() - except IOError: - encoding = None - if encoding: - line = line.decode(encoding) - else: - line = line.decode("ascii", "replace") new_trcback.append( ( filename, @@ -236,13 +224,12 @@ class RichTraceback(object): if new_trcback: try: # A normal .py file (not a Template) - fp = open(new_trcback[-1][0], "rb") - encoding = util.parse_encoding(fp) - if compat.py3k and not encoding: - encoding = "utf-8" - fp.seek(0) - self.source = fp.read() - fp.close() + with open(new_trcback[-1][0], "rb") as fp: + encoding = util.parse_encoding(fp) + if not encoding: + encoding = "utf-8" + fp.seek(0) + self.source = fp.read() if encoding: self.source = self.source.decode(encoding) except IOError: diff --git a/lib/mako/ext/autohandler.py b/lib/mako/ext/autohandler.py index 8b1324ef..e8fdac89 100644 --- a/lib/mako/ext/autohandler.py +++ b/lib/mako/ext/autohandler.py @@ -1,5 +1,5 @@ # ext/autohandler.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/mako/ext/babelplugin.py b/lib/mako/ext/babelplugin.py index 76bbc5b0..f015ec25 100644 --- a/lib/mako/ext/babelplugin.py +++ b/lib/mako/ext/babelplugin.py @@ -1,10 +1,10 @@ # ext/babelplugin.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -"""gettext message extraction via Babel: http://babel.edgewall.org/""" +"""gettext message extraction via Babel: https://pypi.org/project/Babel/""" from babel.messages.extract import extract_python from mako.ext.extract import MessageExtractor @@ -15,12 +15,12 @@ class BabelMakoExtractor(MessageExtractor): self.keywords = keywords self.options = options self.config = { - "comment-tags": u" ".join(comment_tags), + "comment-tags": " ".join(comment_tags), "encoding": options.get( "input_encoding", options.get("encoding", None) ), } - super(BabelMakoExtractor, self).__init__() + super().__init__() def __call__(self, fileobj): return self.process_file(fileobj) @@ -54,5 +54,4 @@ def extract(fileobj, keywords, comment_tags, options): :rtype: ``iterator`` """ extractor = BabelMakoExtractor(keywords, comment_tags, options) - for message in extractor(fileobj): - yield message + yield from extractor(fileobj) diff --git a/lib/mako/ext/beaker_cache.py b/lib/mako/ext/beaker_cache.py index f65ce43a..a40b09cf 100644 --- a/lib/mako/ext/beaker_cache.py +++ b/lib/mako/ext/beaker_cache.py @@ -1,5 +1,5 @@ # ext/beaker_cache.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -40,7 +40,7 @@ class BeakerCacheImpl(CacheImpl): _beaker_cache = cache.template.cache_args["manager"] else: _beaker_cache = beaker_cache.CacheManager() - super(BeakerCacheImpl, self).__init__(cache) + super().__init__(cache) def _get_cache(self, **kw): expiretime = kw.pop("timeout", None) diff --git a/lib/mako/ext/extract.py b/lib/mako/ext/extract.py index ad2348a5..74d067d8 100644 --- a/lib/mako/ext/extract.py +++ b/lib/mako/ext/extract.py @@ -1,23 +1,25 @@ # ext/extract.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php +from io import BytesIO +from io import StringIO import re -from mako import compat from mako import lexer from mako import parsetree -class MessageExtractor(object): +class MessageExtractor: + use_bytes = True + def process_file(self, fileobj): template_node = lexer.Lexer( fileobj.read(), input_encoding=self.config["encoding"] ).parse() - for extracted in self.extract_nodes(template_node.get_children()): - yield extracted + yield from self.extract_nodes(template_node.get_children()) def extract_nodes(self, nodes): translator_comments = [] @@ -90,7 +92,7 @@ class MessageExtractor(object): comment[1] for comment in translator_comments ] - if isinstance(code, compat.text_type): + if isinstance(code, str) and self.use_bytes: code = code.encode(input_encoding, "backslashreplace") used_translator_comments = False @@ -99,7 +101,10 @@ class MessageExtractor(object): # input string of the input is non-ascii) # Also, because we added it, we have to subtract one from # node.lineno - code = compat.byte_buffer(compat.b("\n") + code) + if self.use_bytes: + code = BytesIO(b"\n" + code) + else: + code = StringIO("\n" + code) for message in self.process_python( code, node.lineno - 1, translator_strings @@ -112,8 +117,7 @@ class MessageExtractor(object): in_translator_comments = False if child_nodes: - for extracted in self.extract_nodes(child_nodes): - yield extracted + yield from self.extract_nodes(child_nodes) @staticmethod def _split_comment(lineno, comment): diff --git a/lib/mako/ext/linguaplugin.py b/lib/mako/ext/linguaplugin.py index c40fa748..4cce6262 100644 --- a/lib/mako/ext/linguaplugin.py +++ b/lib/mako/ext/linguaplugin.py @@ -1,23 +1,23 @@ # ext/linguaplugin.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php +import contextlib import io from lingua.extractors import Extractor from lingua.extractors import get_extractor from lingua.extractors import Message -from mako import compat from mako.ext.extract import MessageExtractor class LinguaMakoExtractor(Extractor, MessageExtractor): - """Mako templates""" + use_bytes = False extensions = [".mako"] default_config = {"encoding": "utf-8", "comment-tags": ""} @@ -26,29 +26,21 @@ class LinguaMakoExtractor(Extractor, MessageExtractor): self.filename = filename self.python_extractor = get_extractor("x.py") if fileobj is None: - fileobj = open(filename, "rb") - must_close = True + ctx = open(filename, "r") else: - must_close = False - try: - for message in self.process_file(fileobj): - yield message - finally: - if must_close: - fileobj.close() + ctx = contextlib.nullcontext(fileobj) + with ctx as file_: + yield from self.process_file(file_) def process_python(self, code, code_lineno, translator_strings): source = code.getvalue().strip() - if source.endswith(compat.b(":")): - if source in ( - compat.b("try:"), - compat.b("else:"), - ) or source.startswith(compat.b("except")): - source = compat.b("") # Ignore try/except and else - elif source.startswith(compat.b("elif")): + if source.endswith(":"): + if source in ("try:", "else:") or source.startswith("except"): + source = "" # Ignore try/except and else + elif source.startswith("elif"): source = source[2:] # Replace "elif" with "if" - source += compat.b("pass") - code = io.BytesIO(source) + source += "pass" + code = io.StringIO(source) for msg in self.python_extractor( self.filename, self.options, code, code_lineno - 1 ): @@ -58,7 +50,7 @@ class LinguaMakoExtractor(Extractor, MessageExtractor): msg.msgid, msg.msgid_plural, msg.flags, - compat.u(" ").join(translator_strings + [msg.comment]), + " ".join(translator_strings + [msg.comment]), msg.tcomment, msg.location, ) diff --git a/lib/mako/ext/preprocessors.py b/lib/mako/ext/preprocessors.py index 9cc06214..6855eeb4 100644 --- a/lib/mako/ext/preprocessors.py +++ b/lib/mako/ext/preprocessors.py @@ -1,5 +1,5 @@ # ext/preprocessors.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php diff --git a/lib/mako/ext/pygmentplugin.py b/lib/mako/ext/pygmentplugin.py index 943a67a4..38d6a71b 100644 --- a/lib/mako/ext/pygmentplugin.py +++ b/lib/mako/ext/pygmentplugin.py @@ -1,5 +1,5 @@ # ext/pygmentplugin.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -25,8 +25,6 @@ from pygments.token import Other from pygments.token import String from pygments.token import Text -from mako import compat - class MakoLexer(RegexLexer): name = "Mako" @@ -108,7 +106,7 @@ class MakoHtmlLexer(DelegatingLexer): aliases = ["html+mako"] def __init__(self, **options): - super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer, **options) + super().__init__(HtmlLexer, MakoLexer, **options) class MakoXmlLexer(DelegatingLexer): @@ -116,7 +114,7 @@ class MakoXmlLexer(DelegatingLexer): aliases = ["xml+mako"] def __init__(self, **options): - super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer, **options) + super().__init__(XmlLexer, MakoLexer, **options) class MakoJavascriptLexer(DelegatingLexer): @@ -124,9 +122,7 @@ class MakoJavascriptLexer(DelegatingLexer): aliases = ["js+mako", "javascript+mako"] def __init__(self, **options): - super(MakoJavascriptLexer, self).__init__( - JavascriptLexer, MakoLexer, **options - ) + super().__init__(JavascriptLexer, MakoLexer, **options) class MakoCssLexer(DelegatingLexer): @@ -134,7 +130,7 @@ class MakoCssLexer(DelegatingLexer): aliases = ["css+mako"] def __init__(self, **options): - super(MakoCssLexer, self).__init__(CssLexer, MakoLexer, **options) + super().__init__(CssLexer, MakoLexer, **options) pygments_html_formatter = HtmlFormatter( @@ -144,10 +140,7 @@ pygments_html_formatter = HtmlFormatter( def syntax_highlight(filename="", language=None): mako_lexer = MakoLexer() - if compat.py3k: - python_lexer = Python3Lexer() - else: - python_lexer = PythonLexer() + python_lexer = Python3Lexer() if filename.startswith("memory:") or language == "mako": return lambda string: highlight( string, mako_lexer, pygments_html_formatter diff --git a/lib/mako/ext/turbogears.py b/lib/mako/ext/turbogears.py index 722a6b4b..413d9f74 100644 --- a/lib/mako/ext/turbogears.py +++ b/lib/mako/ext/turbogears.py @@ -1,5 +1,5 @@ # ext/turbogears.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -9,7 +9,7 @@ from mako.lookup import TemplateLookup from mako.template import Template -class TGPlugin(object): +class TGPlugin: """TurboGears compatible Template Plugin.""" @@ -51,7 +51,7 @@ class TGPlugin(object): def render( self, info, format="html", fragment=False, template=None # noqa ): - if isinstance(template, compat.string_types): + if isinstance(template, str): template = self.load_template(template) # Load extra vars func if provided diff --git a/lib/mako/filters.py b/lib/mako/filters.py index 0ae33ff4..26edd8ee 100644 --- a/lib/mako/filters.py +++ b/lib/mako/filters.py @@ -1,18 +1,19 @@ # mako/filters.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php import codecs +from html.entities import codepoint2name +from html.entities import name2codepoint import re +from urllib.parse import quote_plus -from mako import compat -from mako.compat import codepoint2name -from mako.compat import name2codepoint -from mako.compat import quote_plus -from mako.compat import unquote_plus +import markupsafe + +html_escape = markupsafe.escape xml_escapes = { "&": "&", @@ -22,27 +23,6 @@ xml_escapes = { "'": "'", # also ' in html-only } -# XXX: " is valid in HTML and XML -# ' is not valid HTML, but is valid XML - - -def legacy_html_escape(s): - """legacy HTML escape for non-unicode mode.""" - s = s.replace("&", "&") - s = s.replace(">", ">") - s = s.replace("<", "<") - s = s.replace('"', """) - s = s.replace("'", "'") - return s - - -try: - import markupsafe - - html_escape = markupsafe.escape -except ImportError: - html_escape = legacy_html_escape - def xml_escape(string): return re.sub(r'([&<"\'>])', lambda m: xml_escapes[m.group()], string) @@ -54,31 +34,19 @@ def url_escape(string): return quote_plus(string) -def legacy_url_escape(string): - # convert into a list of octets - return quote_plus(string) - - -def url_unescape(string): - text = unquote_plus(string) - if not is_ascii_str(text): - text = text.decode("utf8") - return text - - def trim(string): return string.strip() -class Decode(object): +class Decode: def __getattr__(self, key): def decode(x): - if isinstance(x, compat.text_type): + if isinstance(x, str): return x - elif not isinstance(x, compat.binary_type): + elif not isinstance(x, bytes): return decode(str(x)) else: - return compat.text_type(x, encoding=key) + return str(x, encoding=key) return decode @@ -86,24 +54,11 @@ class Decode(object): decode = Decode() -_ASCII_re = re.compile(r"\A[\x00-\x7f]*\Z") - - -def is_ascii_str(text): - return isinstance(text, str) and _ASCII_re.match(text) - - -################################################################ - - -class XMLEntityEscaper(object): +class XMLEntityEscaper: def __init__(self, codepoint2name, name2codepoint): - self.codepoint2entity = dict( - [ - (c, compat.text_type("&%s;" % n)) - for c, n in codepoint2name.items() - ] - ) + self.codepoint2entity = { + c: str("&%s;" % n) for c, n in codepoint2name.items() + } self.name2codepoint = name2codepoint def escape_entities(self, text): @@ -111,7 +66,7 @@ class XMLEntityEscaper(object): Only characters corresponding to a named entity are replaced. """ - return compat.text_type(text).translate(self.codepoint2entity) + return str(text).translate(self.codepoint2entity) def __escape(self, m): codepoint = ord(m.group()) @@ -131,9 +86,7 @@ class XMLEntityEscaper(object): The return value is guaranteed to be ASCII. """ - return self.__escapable.sub( - self.__escape, compat.text_type(text) - ).encode("ascii") + return self.__escapable.sub(self.__escape, str(text)).encode("ascii") # XXX: This regexp will not match all valid XML entity names__. # (It punts on details involving involving CombiningChars and Extenders.) @@ -183,37 +136,28 @@ def htmlentityreplace_errors(ex): characters with HTML entities, or, if no HTML entity exists for the character, XML character references:: - >>> u'The cost was \u20ac12.'.encode('latin1', 'htmlentityreplace') + >>> 'The cost was \u20ac12.'.encode('latin1', 'htmlentityreplace') 'The cost was €12.' """ if isinstance(ex, UnicodeEncodeError): # Handle encoding errors bad_text = ex.object[ex.start : ex.end] text = _html_entities_escaper.escape(bad_text) - return (compat.text_type(text), ex.end) + return (str(text), ex.end) raise ex codecs.register_error("htmlentityreplace", htmlentityreplace_errors) -# TODO: options to make this dynamic per-compilation will be added in a later -# release DEFAULT_ESCAPES = { "x": "filters.xml_escape", "h": "filters.html_escape", "u": "filters.url_escape", "trim": "filters.trim", "entity": "filters.html_entities_escape", - "unicode": "unicode", + "unicode": "str", "decode": "decode", "str": "str", "n": "n", } - -if compat.py3k: - DEFAULT_ESCAPES.update({"unicode": "str"}) - -NON_UNICODE_ESCAPES = DEFAULT_ESCAPES.copy() -NON_UNICODE_ESCAPES["h"] = "filters.legacy_html_escape" -NON_UNICODE_ESCAPES["u"] = "filters.legacy_url_escape" diff --git a/lib/mako/lexer.py b/lib/mako/lexer.py index bbf0c3a5..527c4b51 100644 --- a/lib/mako/lexer.py +++ b/lib/mako/lexer.py @@ -1,5 +1,5 @@ # mako/lexer.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -9,7 +9,6 @@ import codecs import re -from mako import compat from mako import exceptions from mako import parsetree from mako.pygen import adjust_whitespace @@ -17,14 +16,9 @@ from mako.pygen import adjust_whitespace _regexp_cache = {} -class Lexer(object): +class Lexer: def __init__( - self, - text, - filename=None, - disable_unicode=False, - input_encoding=None, - preprocessor=None, + self, text, filename=None, input_encoding=None, preprocessor=None ): self.text = text self.filename = filename @@ -36,14 +30,8 @@ class Lexer(object): self.tag = [] self.control_line = [] self.ternary_stack = [] - self.disable_unicode = disable_unicode self.encoding = input_encoding - if compat.py3k and disable_unicode: - raise exceptions.UnsupportedError( - "Mako for Python 3 does not " "support disabling Unicode" - ) - if preprocessor is None: self.preprocessor = [] elif not hasattr(preprocessor, "__iter__"): @@ -66,10 +54,7 @@ class Lexer(object): try: reg = _regexp_cache[(regexp, flags)] except KeyError: - if flags: - reg = re.compile(regexp, flags) - else: - reg = re.compile(regexp) + reg = re.compile(regexp, flags) if flags else re.compile(regexp) _regexp_cache[(regexp, flags)] = reg return self.match_reg(reg) @@ -87,10 +72,7 @@ class Lexer(object): match = reg.match(self.text, self.match_position) if match: (start, end) = match.span() - if end == start: - self.match_position = end + 1 - else: - self.match_position = end + self.match_position = end + 1 if end == start else end self.matched_lineno = self.lineno lines = re.findall(r"\n", self.text[mp : self.match_position]) cp = mp - 1 @@ -98,10 +80,6 @@ class Lexer(object): cp -= 1 self.matched_charpos = mp - cp self.lineno += len(lines) - # print "MATCHED:", match.group(0), "LINE START:", - # self.matched_lineno, "LINE END:", self.lineno - # print "MATCH:", regexp, "\n", self.text[mp : mp + 15], \ - # (match and "TRUE" or "FALSE") return match def parse_until_text(self, watch_nesting, *text): @@ -161,12 +139,15 @@ class Lexer(object): if self.control_line: control_frame = self.control_line[-1] control_frame.nodes.append(node) - if not ( - isinstance(node, parsetree.ControlLine) - and control_frame.is_ternary(node.keyword) + if ( + not ( + isinstance(node, parsetree.ControlLine) + and control_frame.is_ternary(node.keyword) + ) + and self.ternary_stack + and self.ternary_stack[-1] ): - if self.ternary_stack and self.ternary_stack[-1]: - self.ternary_stack[-1][-1].nodes.append(node) + self.ternary_stack[-1][-1].nodes.append(node) if isinstance(node, parsetree.Tag): if len(self.tag): node.parent = self.tag[-1] @@ -188,18 +169,18 @@ class Lexer(object): raise exceptions.SyntaxException( "Keyword '%s' not a legal ternary for keyword '%s'" % (node.keyword, self.control_line[-1].keyword), - **self.exception_kwargs + **self.exception_kwargs, ) _coding_re = re.compile(r"#.*coding[:=]\s*([-\w.]+).*\r?\n") def decode_raw_stream(self, text, decode_raw, known_encoding, filename): """given string/unicode or bytes/string, determine encoding - from magic encoding comment, return body as unicode - or raw if decode_raw=False + from magic encoding comment, return body as unicode + or raw if decode_raw=False """ - if isinstance(text, compat.text_type): + if isinstance(text, str): m = self._coding_re.match(text) encoding = m and m.group(1) or known_encoding or "utf-8" return encoding, text @@ -219,11 +200,7 @@ class Lexer(object): ) else: m = self._coding_re.match(text.decode("utf-8", "ignore")) - if m: - parsed_encoding = m.group(1) - else: - parsed_encoding = known_encoding or "utf-8" - + parsed_encoding = m.group(1) if m else known_encoding or "utf-8" if decode_raw: try: text = text.decode(parsed_encoding) @@ -241,7 +218,7 @@ class Lexer(object): def parse(self): self.encoding, self.text = self.decode_raw_stream( - self.text, not self.disable_unicode, self.encoding, self.filename + self.text, True, self.encoding, self.filename ) for preproc in self.preprocessor: @@ -276,12 +253,13 @@ class Lexer(object): if self.match_position > self.textlength: break - raise exceptions.CompileException("assertion failed") + # TODO: no coverage here + raise exceptions.MakoException("assertion failed") if len(self.tag): raise exceptions.SyntaxException( "Unclosed tag: <%%%s>" % self.tag[-1].keyword, - **self.exception_kwargs + **self.exception_kwargs, ) if len(self.control_line): raise exceptions.SyntaxException( @@ -312,35 +290,34 @@ class Lexer(object): re.I | re.S | re.X, ) - if match: - keyword, attr, isend = match.groups() - self.keyword = keyword - attributes = {} - if attr: - for att in re.findall( - r"\s*(\w+)\s*=\s*(?:'([^']*)'|\"([^\"]*)\")", attr - ): - key, val1, val2 = att - text = val1 or val2 - text = text.replace("\r\n", "\n") - attributes[key] = text - self.append_node(parsetree.Tag, keyword, attributes) - if isend: - self.tag.pop() - else: - if keyword == "text": - match = self.match(r"(.*?)(?=\)", re.S) - if not match: - raise exceptions.SyntaxException( - "Unclosed tag: <%%%s>" % self.tag[-1].keyword, - **self.exception_kwargs - ) - self.append_node(parsetree.Text, match.group(1)) - return self.match_tag_end() - return True - else: + if not match: return False + keyword, attr, isend = match.groups() + self.keyword = keyword + attributes = {} + if attr: + for att in re.findall( + r"\s*(\w+)\s*=\s*(?:'([^']*)'|\"([^\"]*)\")", attr + ): + key, val1, val2 = att + text = val1 or val2 + text = text.replace("\r\n", "\n") + attributes[key] = text + self.append_node(parsetree.Tag, keyword, attributes) + if isend: + self.tag.pop() + elif keyword == "text": + match = self.match(r"(.*?)(?=\)", re.S) + if not match: + raise exceptions.SyntaxException( + "Unclosed tag: <%%%s>" % self.tag[-1].keyword, + **self.exception_kwargs, + ) + self.append_node(parsetree.Text, match.group(1)) + return self.match_tag_end() + return True + def match_tag_end(self): match = self.match(r"\") if match: @@ -348,13 +325,13 @@ class Lexer(object): raise exceptions.SyntaxException( "Closing tag without opening tag: " % match.group(1), - **self.exception_kwargs + **self.exception_kwargs, ) elif self.tag[-1].keyword != match.group(1): raise exceptions.SyntaxException( "Closing tag does not match tag: <%%%s>" % (match.group(1), self.tag[-1].keyword), - **self.exception_kwargs + **self.exception_kwargs, ) self.tag.pop() return True @@ -363,15 +340,15 @@ class Lexer(object): def match_end(self): match = self.match(r"\Z", re.S) - if match: - string = match.group() - if string: - return string - else: - return True - else: + if not match: return False + string = match.group() + if string: + return string + else: + return True + def match_text(self): match = self.match( r""" @@ -422,64 +399,63 @@ class Lexer(object): def match_expression(self): match = self.match(r"\${") - if match: - line, pos = self.matched_lineno, self.matched_charpos - text, end = self.parse_until_text(True, r"\|", r"}") - if end == "|": - escapes, end = self.parse_until_text(True, r"}") - else: - escapes = "" - text = text.replace("\r\n", "\n") - self.append_node( - parsetree.Expression, - text, - escapes.strip(), - lineno=line, - pos=pos, - ) - return True - else: + if not match: return False + line, pos = self.matched_lineno, self.matched_charpos + text, end = self.parse_until_text(True, r"\|", r"}") + if end == "|": + escapes, end = self.parse_until_text(True, r"}") + else: + escapes = "" + text = text.replace("\r\n", "\n") + self.append_node( + parsetree.Expression, + text, + escapes.strip(), + lineno=line, + pos=pos, + ) + return True + def match_control_line(self): match = self.match( r"(?<=^)[\t ]*(%(?!%)|##)[\t ]*((?:(?:\\\r?\n)|[^\r\n])*)" r"(?:\r?\n|\Z)", re.M, ) - if match: - operator = match.group(1) - text = match.group(2) - if operator == "%": - m2 = re.match(r"(end)?(\w+)\s*(.*)", text) - if not m2: - raise exceptions.SyntaxException( - "Invalid control line: '%s'" % text, - **self.exception_kwargs - ) - isend, keyword = m2.group(1, 2) - isend = isend is not None - - if isend: - if not len(self.control_line): - raise exceptions.SyntaxException( - "No starting keyword '%s' for '%s'" - % (keyword, text), - **self.exception_kwargs - ) - elif self.control_line[-1].keyword != keyword: - raise exceptions.SyntaxException( - "Keyword '%s' doesn't match keyword '%s'" - % (text, self.control_line[-1].keyword), - **self.exception_kwargs - ) - self.append_node(parsetree.ControlLine, keyword, isend, text) - else: - self.append_node(parsetree.Comment, text) - return True - else: + if not match: return False + operator = match.group(1) + text = match.group(2) + if operator == "%": + m2 = re.match(r"(end)?(\w+)\s*(.*)", text) + if not m2: + raise exceptions.SyntaxException( + "Invalid control line: '%s'" % text, + **self.exception_kwargs, + ) + isend, keyword = m2.group(1, 2) + isend = isend is not None + + if isend: + if not len(self.control_line): + raise exceptions.SyntaxException( + "No starting keyword '%s' for '%s'" % (keyword, text), + **self.exception_kwargs, + ) + elif self.control_line[-1].keyword != keyword: + raise exceptions.SyntaxException( + "Keyword '%s' doesn't match keyword '%s'" + % (text, self.control_line[-1].keyword), + **self.exception_kwargs, + ) + self.append_node(parsetree.ControlLine, keyword, isend, text) + else: + self.append_node(parsetree.Comment, text) + return True + def match_comment(self): """matches the multiline version of a comment""" match = self.match(r"<%doc>(.*?)", re.S) diff --git a/lib/mako/lookup.py b/lib/mako/lookup.py index 476326d4..7afe242b 100644 --- a/lib/mako/lookup.py +++ b/lib/mako/lookup.py @@ -1,5 +1,5 @@ # mako/lookup.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -8,18 +8,14 @@ import os import posixpath import re import stat +import threading from mako import exceptions from mako import util from mako.template import Template -try: - import threading -except: - import dummy_threading as threading - -class TemplateCollection(object): +class TemplateCollection: """Represent a collection of :class:`.Template` objects, identifiable via URI. @@ -34,7 +30,7 @@ class TemplateCollection(object): :class:`.TemplateCollection` is an abstract class, with the usual default implementation being :class:`.TemplateLookup`. - """ + """ def has_template(self, uri): """Return ``True`` if this :class:`.TemplateLookup` is @@ -68,7 +64,7 @@ class TemplateCollection(object): def filename_to_uri(self, uri, filename): """Convert the given ``filename`` to a URI relative to - this :class:`.TemplateCollection`.""" + this :class:`.TemplateCollection`.""" return uri @@ -161,8 +157,6 @@ class TemplateLookup(TemplateCollection): collection_size=-1, format_exceptions=False, error_handler=None, - disable_unicode=False, - bytestring_passthrough=False, output_encoding=None, encoding_errors="strict", cache_args=None, @@ -207,8 +201,6 @@ class TemplateLookup(TemplateCollection): "format_exceptions": format_exceptions, "error_handler": error_handler, "include_error_handler": include_error_handler, - "disable_unicode": disable_unicode, - "bytestring_passthrough": bytestring_passthrough, "output_encoding": output_encoding, "cache_impl": cache_impl, "encoding_errors": encoding_errors, @@ -249,7 +241,7 @@ class TemplateLookup(TemplateCollection): return self._check(uri, self._collection[uri]) else: return self._collection[uri] - except KeyError: + except KeyError as e: u = re.sub(r"^\/+", "", uri) for dir_ in self.directories: # make sure the path seperators are posix - os.altsep is empty @@ -260,8 +252,8 @@ class TemplateLookup(TemplateCollection): return self._load(srcfile, uri) else: raise exceptions.TopLevelLookupException( - "Cant locate template for uri %r" % uri - ) + "Can't locate template for uri %r" % uri + ) from e def adjust_uri(self, uri, relativeto): """Adjust the given ``uri`` based on the given relative URI.""" @@ -270,20 +262,19 @@ class TemplateLookup(TemplateCollection): if key in self._uri_cache: return self._uri_cache[key] - if uri[0] != "/": - if relativeto is not None: - v = self._uri_cache[key] = posixpath.join( - posixpath.dirname(relativeto), uri - ) - else: - v = self._uri_cache[key] = "/" + uri - else: + if uri[0] == "/": v = self._uri_cache[key] = uri + elif relativeto is not None: + v = self._uri_cache[key] = posixpath.join( + posixpath.dirname(relativeto), uri + ) + else: + v = self._uri_cache[key] = "/" + uri return v def filename_to_uri(self, filename): """Convert the given ``filename`` to a URI relative to - this :class:`.TemplateCollection`.""" + this :class:`.TemplateCollection`.""" try: return self._uri_cache[filename] @@ -294,7 +285,7 @@ class TemplateLookup(TemplateCollection): def _relativeize(self, filename): """Return the portion of a filename that is 'relative' - to the directories in this lookup. + to the directories in this lookup. """ @@ -324,7 +315,7 @@ class TemplateLookup(TemplateCollection): filename=posixpath.normpath(filename), lookup=self, module_filename=module_filename, - **self.template_args + **self.template_args, ) return template except: @@ -342,16 +333,15 @@ class TemplateLookup(TemplateCollection): try: template_stat = os.stat(template.filename) - if template.module._modified_time < template_stat[stat.ST_MTIME]: - self._collection.pop(uri, None) - return self._load(template.filename, uri) - else: + if template.module._modified_time >= template_stat[stat.ST_MTIME]: return template - except OSError: + self._collection.pop(uri, None) + return self._load(template.filename, uri) + except OSError as e: self._collection.pop(uri, None) raise exceptions.TemplateLookupException( - "Cant locate template for uri %r" % uri - ) + "Can't locate template for uri %r" % uri + ) from e def put_string(self, uri, text): """Place a new :class:`.Template` object into this diff --git a/lib/mako/parsetree.py b/lib/mako/parsetree.py index 801e48a7..2135769f 100644 --- a/lib/mako/parsetree.py +++ b/lib/mako/parsetree.py @@ -1,5 +1,5 @@ # mako/parsetree.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -9,13 +9,12 @@ import re from mako import ast -from mako import compat from mako import exceptions from mako import filters from mako import util -class Node(object): +class Node: """base class for a Node in the parse tree.""" @@ -51,7 +50,7 @@ class TemplateNode(Node): """a 'container' node that stores the overall collection of nodes.""" def __init__(self, filename): - super(TemplateNode, self).__init__("", 0, 0, filename) + super().__init__("", 0, 0, filename) self.nodes = [] self.page_attributes = {} @@ -80,7 +79,7 @@ class ControlLine(Node): has_loop_context = False def __init__(self, keyword, isend, text, **kwargs): - super(ControlLine, self).__init__(**kwargs) + super().__init__(**kwargs) self.text = text self.keyword = keyword self.isend = isend @@ -107,11 +106,13 @@ class ControlLine(Node): """return true if the given keyword is a ternary keyword for this ControlLine""" - return keyword in { - "if": set(["else", "elif"]), - "try": set(["except", "finally"]), - "for": set(["else"]), - }.get(self.keyword, []) + cases = { + "if": {"else", "elif"}, + "try": {"except", "finally"}, + "for": {"else"}, + } + + return keyword in cases.get(self.keyword, set()) def __repr__(self): return "ControlLine(%r, %r, %r, %r)" % ( @@ -123,11 +124,10 @@ class ControlLine(Node): class Text(Node): - """defines plain text in the template.""" def __init__(self, content, **kwargs): - super(Text, self).__init__(**kwargs) + super().__init__(**kwargs) self.content = content def __repr__(self): @@ -135,7 +135,6 @@ class Text(Node): class Code(Node): - """defines a Python code block, either inline or module level. e.g.:: @@ -153,7 +152,7 @@ class Code(Node): """ def __init__(self, text, ismodule, **kwargs): - super(Code, self).__init__(**kwargs) + super().__init__(**kwargs) self.text = text self.ismodule = ismodule self.code = ast.PythonCode(text, **self.exception_kwargs) @@ -173,7 +172,6 @@ class Code(Node): class Comment(Node): - """defines a comment line. # this is a comment @@ -181,7 +179,7 @@ class Comment(Node): """ def __init__(self, text, **kwargs): - super(Comment, self).__init__(**kwargs) + super().__init__(**kwargs) self.text = text def __repr__(self): @@ -189,7 +187,6 @@ class Comment(Node): class Expression(Node): - """defines an inline expression. ${x+y} @@ -197,7 +194,7 @@ class Expression(Node): """ def __init__(self, text, escapes, **kwargs): - super(Expression, self).__init__(**kwargs) + super().__init__(**kwargs) self.text = text self.escapes = escapes self.escapes_code = ast.ArgumentList(escapes, **self.exception_kwargs) @@ -210,7 +207,7 @@ class Expression(Node): # TODO: make the "filter" shortcut list configurable at parse/gen time return self.code.undeclared_identifiers.union( self.escapes_code.undeclared_identifiers.difference( - set(filters.DEFAULT_ESCAPES.keys()) + filters.DEFAULT_ESCAPES ) ).difference(self.code.declared_identifiers) @@ -223,7 +220,6 @@ class Expression(Node): class _TagMeta(type): - """metaclass to allow Tag to produce a subclass according to its keyword""" @@ -232,7 +228,7 @@ class _TagMeta(type): def __init__(cls, clsname, bases, dict_): if getattr(cls, "__keyword__", None) is not None: cls._classmap[cls.__keyword__] = cls - super(_TagMeta, cls).__init__(clsname, bases, dict_) + super().__init__(clsname, bases, dict_) def __call__(cls, keyword, attributes, **kwargs): if ":" in keyword: @@ -254,7 +250,7 @@ class _TagMeta(type): return type.__call__(cls, keyword, attributes, **kwargs) -class Tag(compat.with_metaclass(_TagMeta, Node)): +class Tag(Node, metaclass=_TagMeta): """abstract base class for tags. e.g.:: @@ -276,7 +272,7 @@ class Tag(compat.with_metaclass(_TagMeta, Node)): expressions, nonexpressions, required, - **kwargs + **kwargs, ): r"""construct a new Tag instance. @@ -297,17 +293,20 @@ class Tag(compat.with_metaclass(_TagMeta, Node)): other arguments passed to the Node superclass (lineno, pos) """ - super(Tag, self).__init__(**kwargs) + super().__init__(**kwargs) self.keyword = keyword self.attributes = attributes self._parse_attributes(expressions, nonexpressions) missing = [r for r in required if r not in self.parsed_attributes] if len(missing): raise exceptions.CompileException( - "Missing attribute(s): %s" - % ",".join([repr(m) for m in missing]), - **self.exception_kwargs + ( + "Missing attribute(s): %s" + % ",".join(repr(m) for m in missing) + ), + **self.exception_kwargs, ) + self.parent = None self.nodes = [] @@ -339,23 +338,22 @@ class Tag(compat.with_metaclass(_TagMeta, Node)): code.undeclared_identifiers ) expr.append("(%s)" % m.group(1)) - else: - if x: - expr.append(repr(x)) + elif x: + expr.append(repr(x)) self.parsed_attributes[key] = " + ".join(expr) or repr("") elif key in nonexpressions: if re.search(r"\${.+?}", self.attributes[key]): raise exceptions.CompileException( - "Attibute '%s' in tag '%s' does not allow embedded " + "Attribute '%s' in tag '%s' does not allow embedded " "expressions" % (key, self.keyword), - **self.exception_kwargs + **self.exception_kwargs, ) self.parsed_attributes[key] = repr(self.attributes[key]) else: raise exceptions.CompileException( "Invalid attribute for tag '%s': '%s'" % (self.keyword, key), - **self.exception_kwargs + **self.exception_kwargs, ) self.expression_undeclared_identifiers = undeclared_identifiers @@ -379,13 +377,13 @@ class IncludeTag(Tag): __keyword__ = "include" def __init__(self, keyword, attributes, **kwargs): - super(IncludeTag, self).__init__( + super().__init__( keyword, attributes, ("file", "import", "args"), (), ("file",), - **kwargs + **kwargs, ) self.page_args = ast.PythonCode( "__DUMMY(%s)" % attributes.get("args", ""), **self.exception_kwargs @@ -396,24 +394,22 @@ class IncludeTag(Tag): def undeclared_identifiers(self): identifiers = self.page_args.undeclared_identifiers.difference( - set(["__DUMMY"]) + {"__DUMMY"} ).difference(self.page_args.declared_identifiers) - return identifiers.union( - super(IncludeTag, self).undeclared_identifiers() - ) + return identifiers.union(super().undeclared_identifiers()) class NamespaceTag(Tag): __keyword__ = "namespace" def __init__(self, keyword, attributes, **kwargs): - super(NamespaceTag, self).__init__( + super().__init__( keyword, attributes, ("file",), ("name", "inheritable", "import", "module"), (), - **kwargs + **kwargs, ) self.name = attributes.get("name", "__anon_%s" % hex(abs(id(self)))) @@ -421,12 +417,12 @@ class NamespaceTag(Tag): raise exceptions.CompileException( "'name' and/or 'import' attributes are required " "for <%namespace>", - **self.exception_kwargs + **self.exception_kwargs, ) if "file" in attributes and "module" in attributes: raise exceptions.CompileException( "<%namespace> may only have one of 'file' or 'module'", - **self.exception_kwargs + **self.exception_kwargs, ) def declared_identifiers(self): @@ -437,9 +433,7 @@ class TextTag(Tag): __keyword__ = "text" def __init__(self, keyword, attributes, **kwargs): - super(TextTag, self).__init__( - keyword, attributes, (), ("filter"), (), **kwargs - ) + super().__init__(keyword, attributes, (), ("filter"), (), **kwargs) self.filter_args = ast.ArgumentList( attributes.get("filter", ""), **self.exception_kwargs ) @@ -458,13 +452,13 @@ class DefTag(Tag): c for c in attributes if c.startswith("cache_") ] - super(DefTag, self).__init__( + super().__init__( keyword, attributes, expressions, ("name", "filter", "decorator"), ("name",), - **kwargs + **kwargs, ) name = attributes["name"] if re.match(r"^[\w_]+$", name): @@ -521,19 +515,19 @@ class BlockTag(Tag): c for c in attributes if c.startswith("cache_") ] - super(BlockTag, self).__init__( + super().__init__( keyword, attributes, expressions, ("name", "filter", "decorator"), (), - **kwargs + **kwargs, ) name = attributes.get("name") if name and not re.match(r"^[\w_]+$", name): raise exceptions.CompileException( "%block may not specify an argument signature", - **self.exception_kwargs + **self.exception_kwargs, ) if not name and attributes.get("args", None): raise exceptions.CompileException( @@ -577,7 +571,7 @@ class CallTag(Tag): __keyword__ = "call" def __init__(self, keyword, attributes, **kwargs): - super(CallTag, self).__init__( + super().__init__( keyword, attributes, ("args"), ("expr",), ("expr",), **kwargs ) self.expression = attributes["expr"] @@ -597,26 +591,25 @@ class CallTag(Tag): class CallNamespaceTag(Tag): def __init__(self, namespace, defname, attributes, **kwargs): - super(CallNamespaceTag, self).__init__( + super().__init__( namespace + ":" + defname, attributes, tuple(attributes.keys()) + ("args",), (), (), - **kwargs + **kwargs, ) self.expression = "%s.%s(%s)" % ( namespace, defname, ",".join( - [ - "%s=%s" % (k, v) - for k, v in self.parsed_attributes.items() - if k != "args" - ] + "%s=%s" % (k, v) + for k, v in self.parsed_attributes.items() + if k != "args" ), ) + self.code = ast.PythonCode(self.expression, **self.exception_kwargs) self.body_decl = ast.FunctionArgs( attributes.get("args", ""), **self.exception_kwargs @@ -635,7 +628,7 @@ class InheritTag(Tag): __keyword__ = "inherit" def __init__(self, keyword, attributes, **kwargs): - super(InheritTag, self).__init__( + super().__init__( keyword, attributes, ("file",), (), ("file",), **kwargs ) @@ -651,9 +644,7 @@ class PageTag(Tag): "enable_loop", ] + [c for c in attributes if c.startswith("cache_")] - super(PageTag, self).__init__( - keyword, attributes, expressions, (), (), **kwargs - ) + super().__init__(keyword, attributes, expressions, (), (), **kwargs) self.body_decl = ast.FunctionArgs( attributes.get("args", ""), **self.exception_kwargs ) diff --git a/lib/mako/pygen.py b/lib/mako/pygen.py index 947721f1..46b0b52f 100644 --- a/lib/mako/pygen.py +++ b/lib/mako/pygen.py @@ -1,5 +1,5 @@ # mako/pygen.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -11,7 +11,7 @@ import re from mako import exceptions -class PythonPrinter(object): +class PythonPrinter: def __init__(self, stream): # indentation counter self.indent = 0 @@ -96,18 +96,19 @@ class PythonPrinter(object): is_comment = line and len(line) and line[0] == "#" # see if this line should decrease the indentation level - if not is_comment and (not hastext or self._is_unindentor(line)): - - if self.indent > 0: - self.indent -= 1 - # if the indent_detail stack is empty, the user - # probably put extra closures - the resulting - # module wont compile. - if len(self.indent_detail) == 0: - raise exceptions.SyntaxException( - "Too many whitespace closures" - ) - self.indent_detail.pop() + if ( + not is_comment + and (not hastext or self._is_unindentor(line)) + and self.indent > 0 + ): + self.indent -= 1 + # if the indent_detail stack is empty, the user + # probably put extra closures - the resulting + # module wont compile. + if len(self.indent_detail) == 0: + # TODO: no coverage here + raise exceptions.MakoException("Too many whitespace closures") + self.indent_detail.pop() if line is None: return @@ -167,13 +168,10 @@ class PythonPrinter(object): # if the current line doesnt have one of the "unindentor" keywords, # return False match = re.match(r"^\s*(else|elif|except|finally).*\:", line) - if not match: - return False - - # whitespace matches up, we have a compound indentor, + # if True, whitespace matches up, we have a compound indentor, # and this line has an unindentor, this # is probably good enough - return True + return bool(match) # should we decide that its not good enough, heres # more stuff to check. @@ -218,11 +216,7 @@ class PythonPrinter(object): current_state = self.backslashed or self.triplequoted - if re.search(r"\\$", line): - self.backslashed = True - else: - self.backslashed = False - + self.backslashed = bool(re.search(r"\\$", line)) triples = len(re.findall(r"\"\"\"|\'\'\'", line)) if triples == 1 or triples % 2 != 0: self.triplequoted = not self.triplequoted diff --git a/lib/mako/pyparser.py b/lib/mako/pyparser.py index b16672d6..5c55505b 100644 --- a/lib/mako/pyparser.py +++ b/lib/mako/pyparser.py @@ -1,5 +1,5 @@ # mako/pyparser.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -18,22 +18,13 @@ from mako import _ast_util from mako import compat from mako import exceptions from mako import util -from mako.compat import arg_stringname -if compat.py3k: - # words that cannot be assigned to (notably - # smaller than the total keys in __builtins__) - reserved = set(["True", "False", "None", "print"]) +# words that cannot be assigned to (notably +# smaller than the total keys in __builtins__) +reserved = {"True", "False", "None", "print"} - # the "id" attribute on a function node - arg_id = operator.attrgetter("arg") -else: - # words that cannot be assigned to (notably - # smaller than the total keys in __builtins__) - reserved = set(["True", "False", "None"]) - - # the "id" attribute on a function node - arg_id = operator.attrgetter("id") +# the "id" attribute on a function node +arg_id = operator.attrgetter("arg") util.restore__ast(_ast) @@ -43,7 +34,7 @@ def parse(code, mode="exec", **exception_kwargs): try: return _ast_util.parse(code, "", mode) - except Exception: + except Exception as e: raise exceptions.SyntaxException( "(%s) %s (%r)" % ( @@ -51,8 +42,8 @@ def parse(code, mode="exec", **exception_kwargs): compat.exception_as(), code[0:50], ), - **exception_kwargs - ) + **exception_kwargs, + ) from e class FindIdentifiers(_ast_util.NodeVisitor): @@ -85,18 +76,13 @@ class FindIdentifiers(_ast_util.NodeVisitor): self.visit(n) self.in_assign_targets = in_a - if compat.py3k: - - # ExceptHandler is in Python 2, but this block only works in - # Python 3 (and is required there) - - def visit_ExceptHandler(self, node): - if node.name is not None: - self._add_declared(node.name) - if node.type is not None: - self.visit(node.type) - for statement in node.body: - self.visit(statement) + def visit_ExceptHandler(self, node): + if node.name is not None: + self._add_declared(node.name) + if node.type is not None: + self.visit(node.type) + for statement in node.body: + self.visit(statement) def visit_Lambda(self, node, *args): self._visit_function(node, True) @@ -108,8 +94,7 @@ class FindIdentifiers(_ast_util.NodeVisitor): def _expand_tuples(self, args): for arg in args: if isinstance(arg, _ast.Tuple): - for n in arg.elts: - yield n + yield from arg.elts else: yield arg @@ -170,15 +155,15 @@ class FindIdentifiers(_ast_util.NodeVisitor): for name in node.names: if name.asname is not None: self._add_declared(name.asname) + elif name.name == "*": + raise exceptions.CompileException( + "'import *' is not supported, since all identifier " + "names must be explicitly declared. Please use the " + "form 'from import , , " + "...' instead.", + **self.exception_kwargs, + ) else: - if name.name == "*": - raise exceptions.CompileException( - "'import *' is not supported, since all identifier " - "names must be explicitly declared. Please use the " - "form 'from import , , " - "...' instead.", - **self.exception_kwargs - ) self._add_declared(name.name) @@ -213,27 +198,20 @@ class ParseFunc(_ast_util.NodeVisitor): argnames = [arg_id(arg) for arg in node.args.args] if node.args.vararg: - argnames.append(arg_stringname(node.args.vararg)) + argnames.append(node.args.vararg.arg) - if compat.py2k: - # kw-only args don't exist in Python 2 - kwargnames = [] - else: - kwargnames = [arg_id(arg) for arg in node.args.kwonlyargs] + kwargnames = [arg_id(arg) for arg in node.args.kwonlyargs] if node.args.kwarg: - kwargnames.append(arg_stringname(node.args.kwarg)) + kwargnames.append(node.args.kwarg.arg) self.listener.argnames = argnames self.listener.defaults = node.args.defaults # ast self.listener.kwargnames = kwargnames - if compat.py2k: - self.listener.kwdefaults = [] - else: - self.listener.kwdefaults = node.args.kw_defaults + self.listener.kwdefaults = node.args.kw_defaults self.listener.varargs = node.args.vararg self.listener.kwargs = node.args.kwarg -class ExpressionGenerator(object): +class ExpressionGenerator: def __init__(self, astnode): self.generator = _ast_util.SourceGenerator(" " * 4) self.generator.visit(astnode) diff --git a/lib/mako/runtime.py b/lib/mako/runtime.py index 465908e6..6d7fa684 100644 --- a/lib/mako/runtime.py +++ b/lib/mako/runtime.py @@ -7,16 +7,16 @@ """provides runtime services for templates, including Context, Namespace, and various helper functions.""" +import builtins import functools import sys from mako import compat from mako import exceptions from mako import util -from mako.compat import compat_builtins -class Context(object): +class Context: """Provides runtime namespace, output buffer, and various callstacks for templates. @@ -24,7 +24,7 @@ class Context(object): See :ref:`runtime_toplevel` for detail on the usage of :class:`.Context`. - """ + """ def __init__(self, buffer, **data): self._buffer_stack = [buffer] @@ -103,7 +103,7 @@ class Context(object): if key in self._data: return self._data[key] else: - return compat_builtins.__dict__[key] + return builtins.__dict__[key] def _push_writer(self): """push a capturing buffer onto this Context and return @@ -135,7 +135,7 @@ class Context(object): def get(self, key, default=None): """Return a value from this :class:`.Context`.""" - return self._data.get(key, compat_builtins.__dict__.get(key, default)) + return self._data.get(key, builtins.__dict__.get(key, default)) def write(self, string): """Write a string to this :class:`.Context` object's @@ -216,7 +216,7 @@ class CallerStack(list): self.nextcaller = self.pop() -class Undefined(object): +class Undefined: """Represents an undefined value in a template. @@ -240,7 +240,7 @@ UNDEFINED = Undefined() STOP_RENDERING = "" -class LoopStack(object): +class LoopStack: """a stack for LoopContexts that implements the context manager protocol to automatically pop off the top of the stack on context exit @@ -280,7 +280,7 @@ class LoopStack(object): return iter(self._top) -class LoopContext(object): +class LoopContext: """A magic loop variable. Automatically accessible in any ``% for`` block. @@ -339,14 +339,13 @@ class LoopContext(object): return bool(self.index % 2) def cycle(self, *values): - """Cycle through values as the loop progresses. - """ + """Cycle through values as the loop progresses.""" if not values: raise ValueError("You must provide values to cycle through") return values[self.index % len(values)] -class _NSAttr(object): +class _NSAttr: def __init__(self, parent): self.__parent = parent @@ -360,22 +359,22 @@ class _NSAttr(object): raise AttributeError(key) -class Namespace(object): +class Namespace: """Provides access to collections of rendering methods, which - can be local, from other templates, or from imported modules. + can be local, from other templates, or from imported modules. - To access a particular rendering method referenced by a - :class:`.Namespace`, use plain attribute access: + To access a particular rendering method referenced by a + :class:`.Namespace`, use plain attribute access: - .. sourcecode:: mako + .. sourcecode:: mako - ${some_namespace.foo(x, y, z)} + ${some_namespace.foo(x, y, z)} - :class:`.Namespace` also contains several built-in attributes - described here. + :class:`.Namespace` also contains several built-in attributes + described here. - """ + """ def __init__( self, @@ -390,7 +389,7 @@ class Namespace(object): self.context = context self.inherits = inherits if callables is not None: - self.callables = dict([(c.__name__, c) for c in callables]) + self.callables = {c.__name__: c for c in callables} callables = () @@ -482,15 +481,14 @@ class Namespace(object): key = (self, uri) if key in self.context.namespaces: return self.context.namespaces[key] - else: - ns = TemplateNamespace( - uri, - self.context._copy(), - templateuri=uri, - calling_uri=self._templateuri, - ) - self.context.namespaces[key] = ns - return ns + ns = TemplateNamespace( + uri, + self.context._copy(), + templateuri=uri, + calling_uri=self._templateuri, + ) + self.context.namespaces[key] = ns + return ns def get_template(self, uri): """Return a :class:`.Template` from the given ``uri``. @@ -574,7 +572,7 @@ class TemplateNamespace(Namespace): self.context = context self.inherits = inherits if callables is not None: - self.callables = dict([(c.__name__, c) for c in callables]) + self.callables = {c.__name__: c for c in callables} if templateuri is not None: self.template = _lookup_template(context, templateuri, calling_uri) @@ -666,7 +664,7 @@ class ModuleNamespace(Namespace): self.context = context self.inherits = inherits if callables is not None: - self.callables = dict([(c.__name__, c) for c in callables]) + self.callables = {c.__name__: c for c in callables} mod = __import__(module) for token in module.split(".")[1:]: @@ -790,7 +788,7 @@ def _include_file(context, uri, calling_uri, **kwargs): except Exception: result = template.include_error_handler(ctx, compat.exception_as()) if not result: - compat.reraise(*sys.exc_info()) + raise else: callable_(ctx, **kwargs) @@ -837,8 +835,10 @@ def _lookup_template(context, uri, relativeto): uri = lookup.adjust_uri(uri, relativeto) try: return lookup.get_template(uri) - except exceptions.TopLevelLookupException: - raise exceptions.TemplateLookupException(str(compat.exception_as())) + except exceptions.TopLevelLookupException as e: + raise exceptions.TemplateLookupException( + str(compat.exception_as()) + ) from e def _populate_self_namespace(context, template, self_ns=None): @@ -862,14 +862,10 @@ def _render(template, callable_, args, data, as_unicode=False): output of the given template and template callable.""" if as_unicode: - buf = util.FastEncodingBuffer(as_unicode=True) - elif template.bytestring_passthrough: - buf = compat.StringIO() + buf = util.FastEncodingBuffer() else: buf = util.FastEncodingBuffer( - as_unicode=as_unicode, - encoding=template.output_encoding, - errors=template.encoding_errors, + encoding=template.output_encoding, errors=template.encoding_errors ) context = Context(buf, **data) context._outputting_as_unicode = as_unicode @@ -880,7 +876,7 @@ def _render(template, callable_, args, data, as_unicode=False): callable_, context, *args, - **_kwargs_for_callable(callable_, data) + **_kwargs_for_callable(callable_, data), ) return context._pop_buffer().getvalue() @@ -951,13 +947,15 @@ def _render_error(template, context, error): if template.error_handler: result = template.error_handler(context, error) if not result: - compat.reraise(*sys.exc_info()) + tp, value, tb = sys.exc_info() + if value and tb: + raise value.with_traceback(tb) + else: + raise error else: error_template = exceptions.html_error_template() if context._outputting_as_unicode: - context._buffer_stack[:] = [ - util.FastEncodingBuffer(as_unicode=True) - ] + context._buffer_stack[:] = [util.FastEncodingBuffer()] else: context._buffer_stack[:] = [ util.FastEncodingBuffer( diff --git a/lib/mako/template.py b/lib/mako/template.py index 5ed23204..bbbe73cb 100644 --- a/lib/mako/template.py +++ b/lib/mako/template.py @@ -1,5 +1,5 @@ # mako/template.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -25,7 +25,7 @@ from mako import util from mako.lexer import Lexer -class Template(object): +class Template: r"""Represents a compiled template. @@ -53,17 +53,6 @@ class Template(object): of return-valued ``%def``\ s "opt out" of that filtering via passing special attributes or objects. - :param bytestring_passthrough: When ``True``, and ``output_encoding`` is - set to ``None``, and :meth:`.Template.render` is used to render, - the `StringIO` or `cStringIO` buffer will be used instead of the - default "fast" buffer. This allows raw bytestrings in the - output stream, such as in expressions, to pass straight - through to the buffer. This flag is forced - to ``True`` if ``disable_unicode`` is also configured. - - .. versionadded:: 0.4 - Added to provide the same behavior as that of the previous series. - :param cache_args: Dictionary of cache configuration arguments that will be passed to the :class:`.CacheImpl`. See :ref:`caching_toplevel`. @@ -94,9 +83,6 @@ class Template(object): :param default_filters: List of string filter names that will be applied to all expressions. See :ref:`filtering_default_filters`. - :param disable_unicode: Disables all awareness of Python Unicode - objects. See :ref:`unicode_disabled`. - :param enable_loop: When ``True``, enable the ``loop`` context variable. This can be set to ``False`` to support templates that may be making usage of the name "``loop``". Individual templates can @@ -255,9 +241,7 @@ class Template(object): cache_url=None, module_filename=None, input_encoding=None, - disable_unicode=False, module_writer=None, - bytestring_passthrough=False, default_filters=None, buffer_filters=(), strict_undefined=False, @@ -294,26 +278,12 @@ class Template(object): self.input_encoding = input_encoding self.output_encoding = output_encoding self.encoding_errors = encoding_errors - self.disable_unicode = disable_unicode - self.bytestring_passthrough = bytestring_passthrough or disable_unicode self.enable_loop = enable_loop self.strict_undefined = strict_undefined self.module_writer = module_writer - if compat.py3k and disable_unicode: - raise exceptions.UnsupportedError( - "Mako for Python 3 does not " "support disabling Unicode" - ) - elif output_encoding and disable_unicode: - raise exceptions.UnsupportedError( - "output_encoding must be set to " - "None when disable_unicode is used." - ) if default_filters is None: - if compat.py3k or self.disable_unicode: - self.default_filters = ["str"] - else: - self.default_filters = ["unicode"] + self.default_filters = ["str"] else: self.default_filters = default_filters self.buffer_filters = buffer_filters @@ -387,11 +357,7 @@ class Template(object): ): self.cache_impl = cache_impl self.cache_enabled = cache_enabled - if cache_args: - self.cache_args = cache_args - else: - self.cache_args = {} - + self.cache_args = cache_args or {} # transfer deprecated cache_* args if cache_type: self.cache_args["type"] = cache_type @@ -463,7 +429,7 @@ class Template(object): If the template specifies an output encoding, the string will be encoded accordingly, else the output is raw (raw - output uses `cStringIO` and can't handle multibyte + output uses `StringIO` and can't handle multibyte characters). A :class:`.Context` object is created corresponding to the given data. Arguments that are explicitly declared by this template's internal rendering method are also @@ -517,17 +483,17 @@ class ModuleTemplate(Template): """A Template which is constructed given an existing Python module. - e.g.:: + e.g.:: - t = Template("this is a template") - f = file("mymodule.py", "w") - f.write(t.code) - f.close() + t = Template("this is a template") + f = file("mymodule.py", "w") + f.write(t.code) + f.close() - import mymodule + import mymodule - t = ModuleTemplate(mymodule) - print(t.render()) + t = ModuleTemplate(mymodule) + print(t.render()) """ @@ -541,8 +507,6 @@ class ModuleTemplate(Template): template_source=None, output_encoding=None, encoding_errors="strict", - disable_unicode=False, - bytestring_passthrough=False, format_exceptions=False, error_handler=None, lookup=None, @@ -559,20 +523,8 @@ class ModuleTemplate(Template): self.input_encoding = module._source_encoding self.output_encoding = output_encoding self.encoding_errors = encoding_errors - self.disable_unicode = disable_unicode - self.bytestring_passthrough = bytestring_passthrough or disable_unicode self.enable_loop = module._enable_loop - if compat.py3k and disable_unicode: - raise exceptions.UnsupportedError( - "Mako for Python 3 does not " "support disabling Unicode" - ) - elif output_encoding and disable_unicode: - raise exceptions.UnsupportedError( - "output_encoding must be set to " - "None when disable_unicode is used." - ) - self.module = module self.filename = template_filename ModuleInfo( @@ -616,19 +568,18 @@ class DefTemplate(Template): self.include_error_handler = parent.include_error_handler self.enable_loop = parent.enable_loop self.lookup = parent.lookup - self.bytestring_passthrough = parent.bytestring_passthrough def get_def(self, name): return self.parent.get_def(name) -class ModuleInfo(object): +class ModuleInfo: """Stores information about a module currently loaded into memory, provides reverse lookups of template source, module source code based on a module's identifier. - """ + """ _modules = weakref.WeakValueDictionary() @@ -658,9 +609,9 @@ class ModuleInfo(object): r"__M_BEGIN_METADATA(.+?)__M_END_METADATA", module_source, re.S ).group(1) source_map = json.loads(source_map) - source_map["line_map"] = dict( - (int(k), int(v)) for k, v in source_map["line_map"].items() - ) + source_map["line_map"] = { + int(k): int(v) for k, v in source_map["line_map"].items() + } if full_line_map: f_line_map = source_map["full_line_map"] = [] line_map = source_map["line_map"] @@ -681,28 +632,25 @@ class ModuleInfo(object): @property def source(self): - if self.template_source is not None: - if self.module._source_encoding and not isinstance( - self.template_source, compat.text_type - ): - return self.template_source.decode( - self.module._source_encoding - ) - else: - return self.template_source - else: + if self.template_source is None: data = util.read_file(self.template_filename) if self.module._source_encoding: return data.decode(self.module._source_encoding) else: return data + elif self.module._source_encoding and not isinstance( + self.template_source, str + ): + return self.template_source.decode(self.module._source_encoding) + else: + return self.template_source + def _compile(template, text, filename, generate_magic_comment): lexer = template.lexer_cls( text, filename, - disable_unicode=template.disable_unicode, input_encoding=template.input_encoding, preprocessor=template.preprocessor, ) @@ -717,7 +665,6 @@ def _compile(template, text, filename, generate_magic_comment): future_imports=template.future_imports, source_encoding=lexer.encoding, generate_magic_comment=generate_magic_comment, - disable_unicode=template.disable_unicode, strict_undefined=template.strict_undefined, enable_loop=template.enable_loop, reserved_names=template.reserved_names, @@ -728,15 +675,10 @@ def _compile(template, text, filename, generate_magic_comment): def _compile_text(template, text, filename): identifier = template.module_id source, lexer = _compile( - template, - text, - filename, - generate_magic_comment=template.disable_unicode, + template, text, filename, generate_magic_comment=False ) cid = identifier - if not compat.py3k and isinstance(cid, compat.text_type): - cid = cid.encode() module = types.ModuleType(cid) code = compile(source, cid, "exec") @@ -750,7 +692,7 @@ def _compile_module_file(template, text, filename, outputpath, module_writer): template, text, filename, generate_magic_comment=True ) - if isinstance(source, compat.text_type): + if isinstance(source, str): source = source.encode(lexer.encoding or "ascii") if module_writer: @@ -767,10 +709,7 @@ def _compile_module_file(template, text, filename, outputpath, module_writer): def _get_module_info_from_callable(callable_): - if compat.py3k: - return _get_module_info(callable_.__globals__["__name__"]) - else: - return _get_module_info(callable_.func_globals["__name__"]) + return _get_module_info(callable_.__globals__["__name__"]) def _get_module_info(filename): diff --git a/lib/mako/testing/__init__.py b/lib/mako/testing/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/lib/mako/testing/_config.py b/lib/mako/testing/_config.py new file mode 100644 index 00000000..4ee3d0a6 --- /dev/null +++ b/lib/mako/testing/_config.py @@ -0,0 +1,128 @@ +import configparser +import dataclasses +from dataclasses import dataclass +from pathlib import Path +from typing import Callable +from typing import ClassVar +from typing import Optional +from typing import Union + +from .helpers import make_path + + +class ConfigError(BaseException): + pass + + +class MissingConfig(ConfigError): + pass + + +class MissingConfigSection(ConfigError): + pass + + +class MissingConfigItem(ConfigError): + pass + + +class ConfigValueTypeError(ConfigError): + pass + + +class _GetterDispatch: + def __init__(self, initialdata, default_getter: Callable): + self.default_getter = default_getter + self.data = initialdata + + def get_fn_for_type(self, type_): + return self.data.get(type_, self.default_getter) + + def get_typed_value(self, type_, name): + get_fn = self.get_fn_for_type(type_) + return get_fn(name) + + +def _parse_cfg_file(filespec: Union[Path, str]): + cfg = configparser.ConfigParser() + try: + filepath = make_path(filespec, check_exists=True) + except FileNotFoundError as e: + raise MissingConfig(f"No config file found at {filespec}") from e + else: + with open(filepath, encoding="utf-8") as f: + cfg.read_file(f) + return cfg + + +def _build_getter(cfg_obj, cfg_section, method, converter=None): + def caller(option, **kwargs): + try: + rv = getattr(cfg_obj, method)(cfg_section, option, **kwargs) + except configparser.NoSectionError as nse: + raise MissingConfigSection( + f"No config section named {cfg_section}" + ) from nse + except configparser.NoOptionError as noe: + raise MissingConfigItem(f"No config item for {option}") from noe + except ValueError as ve: + # ConfigParser.getboolean, .getint, .getfloat raise ValueError + # on bad types + raise ConfigValueTypeError( + f"Wrong value type for {option}" + ) from ve + else: + if converter: + try: + rv = converter(rv) + except Exception as e: + raise ConfigValueTypeError( + f"Wrong value type for {option}" + ) from e + return rv + + return caller + + +def _build_getter_dispatch(cfg_obj, cfg_section, converters=None): + converters = converters or {} + + default_getter = _build_getter(cfg_obj, cfg_section, "get") + + # support ConfigParser builtins + getters = { + int: _build_getter(cfg_obj, cfg_section, "getint"), + bool: _build_getter(cfg_obj, cfg_section, "getboolean"), + float: _build_getter(cfg_obj, cfg_section, "getfloat"), + str: default_getter, + } + + # use ConfigParser.get and convert value + getters.update( + { + type_: _build_getter( + cfg_obj, cfg_section, "get", converter=converter_fn + ) + for type_, converter_fn in converters.items() + } + ) + + return _GetterDispatch(getters, default_getter) + + +@dataclass +class ReadsCfg: + section_header: ClassVar[str] + converters: ClassVar[Optional[dict]] = None + + @classmethod + def from_cfg_file(cls, filespec: Union[Path, str]): + cfg = _parse_cfg_file(filespec) + dispatch = _build_getter_dispatch( + cfg, cls.section_header, converters=cls.converters + ) + kwargs = { + field.name: dispatch.get_typed_value(field.type, field.name) + for field in dataclasses.fields(cls) + } + return cls(**kwargs) diff --git a/lib/mako/testing/assertions.py b/lib/mako/testing/assertions.py new file mode 100644 index 00000000..14ea6352 --- /dev/null +++ b/lib/mako/testing/assertions.py @@ -0,0 +1,167 @@ +import contextlib +import re +import sys + + +def eq_(a, b, msg=None): + """Assert a == b, with repr messaging on failure.""" + assert a == b, msg or "%r != %r" % (a, b) + + +def ne_(a, b, msg=None): + """Assert a != b, with repr messaging on failure.""" + assert a != b, msg or "%r == %r" % (a, b) + + +def in_(a, b, msg=None): + """Assert a in b, with repr messaging on failure.""" + assert a in b, msg or "%r not in %r" % (a, b) + + +def not_in(a, b, msg=None): + """Assert a in not b, with repr messaging on failure.""" + assert a not in b, msg or "%r is in %r" % (a, b) + + +def _assert_proper_exception_context(exception): + """assert that any exception we're catching does not have a __context__ + without a __cause__, and that __suppress_context__ is never set. + + Python 3 will report nested as exceptions as "during the handling of + error X, error Y occurred". That's not what we want to do. We want + these exceptions in a cause chain. + + """ + + if ( + exception.__context__ is not exception.__cause__ + and not exception.__suppress_context__ + ): + assert False, ( + "Exception %r was correctly raised but did not set a cause, " + "within context %r as its cause." + % (exception, exception.__context__) + ) + + +def _assert_proper_cause_cls(exception, cause_cls): + """assert that any exception we're catching does not have a __context__ + without a __cause__, and that __suppress_context__ is never set. + + Python 3 will report nested as exceptions as "during the handling of + error X, error Y occurred". That's not what we want to do. We want + these exceptions in a cause chain. + + """ + assert isinstance(exception.__cause__, cause_cls), ( + "Exception %r was correctly raised but has cause %r, which does not " + "have the expected cause type %r." + % (exception, exception.__cause__, cause_cls) + ) + + +def assert_raises(except_cls, callable_, *args, **kw): + return _assert_raises(except_cls, callable_, args, kw) + + +def assert_raises_with_proper_context(except_cls, callable_, *args, **kw): + return _assert_raises(except_cls, callable_, args, kw, check_context=True) + + +def assert_raises_with_given_cause( + except_cls, cause_cls, callable_, *args, **kw +): + return _assert_raises(except_cls, callable_, args, kw, cause_cls=cause_cls) + + +def assert_raises_message(except_cls, msg, callable_, *args, **kwargs): + return _assert_raises(except_cls, callable_, args, kwargs, msg=msg) + + +def assert_raises_message_with_proper_context( + except_cls, msg, callable_, *args, **kwargs +): + return _assert_raises( + except_cls, callable_, args, kwargs, msg=msg, check_context=True + ) + + +def assert_raises_message_with_given_cause( + except_cls, msg, cause_cls, callable_, *args, **kwargs +): + return _assert_raises( + except_cls, callable_, args, kwargs, msg=msg, cause_cls=cause_cls + ) + + +def _assert_raises( + except_cls, + callable_, + args, + kwargs, + msg=None, + check_context=False, + cause_cls=None, +): + + with _expect_raises(except_cls, msg, check_context, cause_cls) as ec: + callable_(*args, **kwargs) + return ec.error + + +class _ErrorContainer: + error = None + + +@contextlib.contextmanager +def _expect_raises(except_cls, msg=None, check_context=False, cause_cls=None): + ec = _ErrorContainer() + if check_context: + are_we_already_in_a_traceback = sys.exc_info()[0] + try: + yield ec + success = False + except except_cls as err: + ec.error = err + success = True + if msg is not None: + # I'm often pdbing here, and "err" above isn't + # in scope, so assign the string explicitly + error_as_string = str(err) + assert re.search(msg, error_as_string, re.UNICODE), "%r !~ %s" % ( + msg, + error_as_string, + ) + if cause_cls is not None: + _assert_proper_cause_cls(err, cause_cls) + if check_context and not are_we_already_in_a_traceback: + _assert_proper_exception_context(err) + print(str(err).encode("utf-8")) + + # it's generally a good idea to not carry traceback objects outside + # of the except: block, but in this case especially we seem to have + # hit some bug in either python 3.10.0b2 or greenlet or both which + # this seems to fix: + # https://github.com/python-greenlet/greenlet/issues/242 + del ec + + # assert outside the block so it works for AssertionError too ! + assert success, "Callable did not raise an exception" + + +def expect_raises(except_cls, check_context=False): + return _expect_raises(except_cls, check_context=check_context) + + +def expect_raises_message(except_cls, msg, check_context=False): + return _expect_raises(except_cls, msg=msg, check_context=check_context) + + +def expect_raises_with_proper_context(except_cls, check_context=True): + return _expect_raises(except_cls, check_context=check_context) + + +def expect_raises_message_with_proper_context( + except_cls, msg, check_context=True +): + return _expect_raises(except_cls, msg=msg, check_context=check_context) diff --git a/lib/mako/testing/config.py b/lib/mako/testing/config.py new file mode 100644 index 00000000..b77d0c08 --- /dev/null +++ b/lib/mako/testing/config.py @@ -0,0 +1,17 @@ +from dataclasses import dataclass +from pathlib import Path + +from ._config import ReadsCfg +from .helpers import make_path + + +@dataclass +class Config(ReadsCfg): + module_base: Path + template_base: Path + + section_header = "mako_testing" + converters = {Path: make_path} + + +config = Config.from_cfg_file("./setup.cfg") diff --git a/lib/mako/testing/exclusions.py b/lib/mako/testing/exclusions.py new file mode 100644 index 00000000..37b2d14a --- /dev/null +++ b/lib/mako/testing/exclusions.py @@ -0,0 +1,80 @@ +import pytest + +from mako.ext.beaker_cache import has_beaker +from mako.util import update_wrapper + + +try: + import babel.messages.extract as babel +except ImportError: + babel = None + + +try: + import lingua +except ImportError: + lingua = None + + +try: + import dogpile.cache # noqa +except ImportError: + has_dogpile_cache = False +else: + has_dogpile_cache = True + + +requires_beaker = pytest.mark.skipif( + not has_beaker, reason="Beaker is required for these tests." +) + + +requires_babel = pytest.mark.skipif( + babel is None, reason="babel not installed: skipping babelplugin test" +) + + +requires_lingua = pytest.mark.skipif( + lingua is None, reason="lingua not installed: skipping linguaplugin test" +) + + +requires_dogpile_cache = pytest.mark.skipif( + not has_dogpile_cache, + reason="dogpile.cache is required to run these tests", +) + + +def _pygments_version(): + try: + import pygments + + version = pygments.__version__ + except: + version = "0" + return version + + +requires_pygments_14 = pytest.mark.skipif( + _pygments_version() < "1.4", reason="Requires pygments 1.4 or greater" +) + + +# def requires_pygments_14(fn): + +# return skip_if( +# lambda: version < "1.4", "Requires pygments 1.4 or greater" +# )(fn) + + +def requires_no_pygments_exceptions(fn): + def go(*arg, **kw): + from mako import exceptions + + exceptions._install_fallback() + try: + return fn(*arg, **kw) + finally: + exceptions._install_highlighting() + + return update_wrapper(go, fn) diff --git a/lib/mako/testing/fixtures.py b/lib/mako/testing/fixtures.py new file mode 100644 index 00000000..c9379c0c --- /dev/null +++ b/lib/mako/testing/fixtures.py @@ -0,0 +1,109 @@ +import os + +from mako.cache import CacheImpl +from mako.cache import register_plugin +from mako.template import Template +from .assertions import eq_ +from .config import config + + +class TemplateTest: + def _file_template(self, filename, **kw): + filepath = self._file_path(filename) + return Template( + uri=filename, + filename=filepath, + module_directory=config.module_base, + **kw, + ) + + def _file_path(self, filename): + name, ext = os.path.splitext(filename) + py3k_path = os.path.join(config.template_base, name + "_py3k" + ext) + if os.path.exists(py3k_path): + return py3k_path + + return os.path.join(config.template_base, filename) + + def _do_file_test( + self, + filename, + expected, + filters=None, + unicode_=True, + template_args=None, + **kw, + ): + t1 = self._file_template(filename, **kw) + self._do_test( + t1, + expected, + filters=filters, + unicode_=unicode_, + template_args=template_args, + ) + + def _do_memory_test( + self, + source, + expected, + filters=None, + unicode_=True, + template_args=None, + **kw, + ): + t1 = Template(text=source, **kw) + self._do_test( + t1, + expected, + filters=filters, + unicode_=unicode_, + template_args=template_args, + ) + + def _do_test( + self, + template, + expected, + filters=None, + template_args=None, + unicode_=True, + ): + if template_args is None: + template_args = {} + if unicode_: + output = template.render_unicode(**template_args) + else: + output = template.render(**template_args) + + if filters: + output = filters(output) + eq_(output, expected) + + +class PlainCacheImpl(CacheImpl): + """Simple memory cache impl so that tests which + use caching can run without beaker.""" + + def __init__(self, cache): + self.cache = cache + self.data = {} + + def get_or_create(self, key, creation_function, **kw): + if key in self.data: + return self.data[key] + else: + self.data[key] = data = creation_function(**kw) + return data + + def put(self, key, value, **kw): + self.data[key] = value + + def get(self, key, **kw): + return self.data[key] + + def invalidate(self, key, **kw): + del self.data[key] + + +register_plugin("plain", __name__, "PlainCacheImpl") diff --git a/lib/mako/testing/helpers.py b/lib/mako/testing/helpers.py new file mode 100644 index 00000000..77cca367 --- /dev/null +++ b/lib/mako/testing/helpers.py @@ -0,0 +1,67 @@ +import contextlib +import pathlib +from pathlib import Path +import re +import time +from typing import Union +from unittest import mock + + +def flatten_result(result): + return re.sub(r"[\s\r\n]+", " ", result).strip() + + +def result_lines(result): + return [ + x.strip() + for x in re.split(r"\r?\n", re.sub(r" +", " ", result)) + if x.strip() != "" + ] + + +def make_path( + filespec: Union[Path, str], + make_absolute: bool = True, + check_exists: bool = False, +) -> Path: + path = Path(filespec) + if make_absolute: + path = path.resolve(strict=check_exists) + if check_exists and (not path.exists()): + raise FileNotFoundError(f"No file or directory at {filespec}") + return path + + +def _unlink_path(path, missing_ok=False): + # Replicate 3.8+ functionality in 3.7 + cm = contextlib.nullcontext() + if missing_ok: + cm = contextlib.suppress(FileNotFoundError) + + with cm: + path.unlink() + + +def replace_file_with_dir(pathspec): + path = pathlib.Path(pathspec) + _unlink_path(path, missing_ok=True) + path.mkdir(exist_ok=True) + return path + + +def file_with_template_code(filespec): + with open(filespec, "w") as f: + f.write( + """ +i am an artificial template just for you +""" + ) + return filespec + + +@contextlib.contextmanager +def rewind_compile_time(hours=1): + rewound = time.time() - (hours * 3_600) + with mock.patch("mako.codegen.time") as codegen_time: + codegen_time.time.return_value = rewound + yield diff --git a/lib/mako/util.py b/lib/mako/util.py index 16e3c726..74c8b9eb 100644 --- a/lib/mako/util.py +++ b/lib/mako/util.py @@ -1,10 +1,8 @@ # mako/util.py -# Copyright 2006-2020 the Mako authors and contributors +# Copyright 2006-2021 the Mako authors and contributors # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -from __future__ import absolute_import - from ast import parse import codecs import collections @@ -13,7 +11,7 @@ import os import re import timeit -from mako import compat +from .compat import importlib_metadata_get def update_wrapper(decorated, fn): @@ -22,7 +20,7 @@ def update_wrapper(decorated, fn): return decorated -class PluginLoader(object): +class PluginLoader: def __init__(self, group): self.group = group self.impls = {} @@ -30,18 +28,17 @@ class PluginLoader(object): def load(self, name): if name in self.impls: return self.impls[name]() - else: - import pkg_resources - for impl in pkg_resources.iter_entry_points(self.group, name): + for impl in importlib_metadata_get(self.group): + if impl.name == name: self.impls[name] = impl.load return impl.load() - else: - from mako import exceptions - raise exceptions.RuntimeException( - "Can't load plugin %s %s" % (self.group, name) - ) + from mako import exceptions + + raise exceptions.RuntimeException( + "Can't load plugin %s %s" % (self.group, name) + ) def register(self, name, modulepath, objname): def load(): @@ -61,7 +58,7 @@ def verify_directory(dir_): while not os.path.exists(dir_): try: tries += 1 - os.makedirs(dir_, compat.octal("0775")) + os.makedirs(dir_, 0o755) except: if tries > 5: raise @@ -76,7 +73,7 @@ def to_list(x, default=None): return x -class memoized_property(object): +class memoized_property: """A read-only @property that is only evaluated once.""" @@ -92,7 +89,7 @@ class memoized_property(object): return result -class memoized_instancemethod(object): +class memoized_instancemethod: """Decorate a method memoize its return value. @@ -140,19 +137,15 @@ class SetLikeDict(dict): return x -class FastEncodingBuffer(object): +class FastEncodingBuffer: """a very rudimentary buffer that is faster than StringIO, - but doesn't crash on unicode data like cStringIO.""" + and supports unicode data.""" - def __init__(self, encoding=None, errors="strict", as_unicode=False): + def __init__(self, encoding=None, errors="strict"): self.data = collections.deque() self.encoding = encoding - if as_unicode: - self.delim = compat.u("") - else: - self.delim = "" - self.as_unicode = as_unicode + self.delim = "" self.errors = errors self.write = self.data.append @@ -179,7 +172,7 @@ class LRUCache(dict): is inexact. """ - class _Item(object): + class _Item: def __init__(self, key, value): self.key = key self.value = value @@ -203,9 +196,8 @@ class LRUCache(dict): def setdefault(self, key, value): if key in self: return self[key] - else: - self[key] = value - return value + self[key] = value + return value def __setitem__(self, key, value): item = dict.get(self, key) @@ -295,7 +287,7 @@ def sorted_dict_repr(d): """ keys = list(d.keys()) keys.sort() - return "{" + ", ".join(["%r: %r" % (k, d[k]) for k in keys]) + "}" + return "{" + ", ".join("%r: %r" % (k, d[k]) for k in keys) + "}" def restore__ast(_ast): @@ -308,7 +300,7 @@ def restore__ast(_ast): m = compile( """\ def foo(): pass -class Bar(object): pass +class Bar: pass if False: pass baz = 'mako' 1 + 2 - 3 * 4 / 5 @@ -380,12 +372,8 @@ mako in baz not in mako""", def read_file(path, mode="rb"): - fp = open(path, mode) - try: - data = fp.read() - return data - finally: - fp.close() + with open(path, mode) as fp: + return fp.read() def read_python_file(path): diff --git a/lib/markupsafe/__init__.py b/lib/markupsafe/__init__.py index d331ac36..0f1c4f46 100644 --- a/lib/markupsafe/__init__.py +++ b/lib/markupsafe/__init__.py @@ -11,9 +11,10 @@ if t.TYPE_CHECKING: pass -__version__ = "2.0.1" +__version__ = "2.1.1" -_striptags_re = re.compile(r"(|<[^>]*>)") +_strip_comments_re = re.compile(r"") +_strip_tags_re = re.compile(r"<.*?>") def _simple_escaping_wrapper(name: str) -> t.Callable[..., "Markup"]: @@ -92,19 +93,24 @@ class Markup(str): return NotImplemented - def __mul__(self, num: int) -> "Markup": + def __mul__(self, num: "te.SupportsIndex") -> "Markup": if isinstance(num, int): return self.__class__(super().__mul__(num)) - return NotImplemented # type: ignore + return NotImplemented __rmul__ = __mul__ def __mod__(self, arg: t.Any) -> "Markup": if isinstance(arg, tuple): + # a tuple of arguments, each wrapped arg = tuple(_MarkupEscapeHelper(x, self.escape) for x in arg) - else: + elif hasattr(type(arg), "__getitem__") and not isinstance(arg, str): + # a mapping of arguments, wrapped arg = _MarkupEscapeHelper(arg, self.escape) + else: + # a single argument, wrapped with the helper and a tuple + arg = (_MarkupEscapeHelper(arg, self.escape),) return self.__class__(super().__mod__(arg)) @@ -153,8 +159,11 @@ class Markup(str): >>> Markup("Main »\tAbout").striptags() 'Main » About' """ - stripped = " ".join(_striptags_re.sub("", self).split()) - return Markup(stripped).unescape() + # Use two regexes to avoid ambiguous matches. + value = _strip_comments_re.sub("", self) + value = _strip_tags_re.sub("", value) + value = " ".join(value.split()) + return Markup(value).unescape() @classmethod def escape(cls, s: t.Any) -> "Markup": @@ -280,9 +289,7 @@ try: from ._speedups import escape as escape from ._speedups import escape_silent as escape_silent from ._speedups import soft_str as soft_str - from ._speedups import soft_unicode except ImportError: from ._native import escape as escape from ._native import escape_silent as escape_silent # noqa: F401 from ._native import soft_str as soft_str # noqa: F401 - from ._native import soft_unicode # noqa: F401 diff --git a/lib/markupsafe/_native.py b/lib/markupsafe/_native.py index 6f7eb7a8..8117b271 100644 --- a/lib/markupsafe/_native.py +++ b/lib/markupsafe/_native.py @@ -61,15 +61,3 @@ def soft_str(s: t.Any) -> str: return str(s) return s - - -def soft_unicode(s: t.Any) -> str: - import warnings - - warnings.warn( - "'soft_unicode' has been renamed to 'soft_str'. The old name" - " will be removed in MarkupSafe 2.1.", - DeprecationWarning, - stacklevel=2, - ) - return soft_str(s) diff --git a/lib/markupsafe/_speedups.c b/lib/markupsafe/_speedups.c new file mode 100644 index 00000000..3c463fb8 --- /dev/null +++ b/lib/markupsafe/_speedups.c @@ -0,0 +1,320 @@ +#include + +static PyObject* markup; + +static int +init_constants(void) +{ + PyObject *module; + + /* import markup type so that we can mark the return value */ + module = PyImport_ImportModule("markupsafe"); + if (!module) + return 0; + markup = PyObject_GetAttrString(module, "Markup"); + Py_DECREF(module); + + return 1; +} + +#define GET_DELTA(inp, inp_end, delta) \ + while (inp < inp_end) { \ + switch (*inp++) { \ + case '"': \ + case '\'': \ + case '&': \ + delta += 4; \ + break; \ + case '<': \ + case '>': \ + delta += 3; \ + break; \ + } \ + } + +#define DO_ESCAPE(inp, inp_end, outp) \ + { \ + Py_ssize_t ncopy = 0; \ + while (inp < inp_end) { \ + switch (*inp) { \ + case '"': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = '#'; \ + *outp++ = '3'; \ + *outp++ = '4'; \ + *outp++ = ';'; \ + break; \ + case '\'': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = '#'; \ + *outp++ = '3'; \ + *outp++ = '9'; \ + *outp++ = ';'; \ + break; \ + case '&': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = 'a'; \ + *outp++ = 'm'; \ + *outp++ = 'p'; \ + *outp++ = ';'; \ + break; \ + case '<': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = 'l'; \ + *outp++ = 't'; \ + *outp++ = ';'; \ + break; \ + case '>': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = 'g'; \ + *outp++ = 't'; \ + *outp++ = ';'; \ + break; \ + default: \ + ncopy++; \ + } \ + inp++; \ + } \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + } + +static PyObject* +escape_unicode_kind1(PyUnicodeObject *in) +{ + Py_UCS1 *inp = PyUnicode_1BYTE_DATA(in); + Py_UCS1 *inp_end = inp + PyUnicode_GET_LENGTH(in); + Py_UCS1 *outp; + PyObject *out; + Py_ssize_t delta = 0; + + GET_DELTA(inp, inp_end, delta); + if (!delta) { + Py_INCREF(in); + return (PyObject*)in; + } + + out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, + PyUnicode_IS_ASCII(in) ? 127 : 255); + if (!out) + return NULL; + + inp = PyUnicode_1BYTE_DATA(in); + outp = PyUnicode_1BYTE_DATA(out); + DO_ESCAPE(inp, inp_end, outp); + return out; +} + +static PyObject* +escape_unicode_kind2(PyUnicodeObject *in) +{ + Py_UCS2 *inp = PyUnicode_2BYTE_DATA(in); + Py_UCS2 *inp_end = inp + PyUnicode_GET_LENGTH(in); + Py_UCS2 *outp; + PyObject *out; + Py_ssize_t delta = 0; + + GET_DELTA(inp, inp_end, delta); + if (!delta) { + Py_INCREF(in); + return (PyObject*)in; + } + + out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, 65535); + if (!out) + return NULL; + + inp = PyUnicode_2BYTE_DATA(in); + outp = PyUnicode_2BYTE_DATA(out); + DO_ESCAPE(inp, inp_end, outp); + return out; +} + + +static PyObject* +escape_unicode_kind4(PyUnicodeObject *in) +{ + Py_UCS4 *inp = PyUnicode_4BYTE_DATA(in); + Py_UCS4 *inp_end = inp + PyUnicode_GET_LENGTH(in); + Py_UCS4 *outp; + PyObject *out; + Py_ssize_t delta = 0; + + GET_DELTA(inp, inp_end, delta); + if (!delta) { + Py_INCREF(in); + return (PyObject*)in; + } + + out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, 1114111); + if (!out) + return NULL; + + inp = PyUnicode_4BYTE_DATA(in); + outp = PyUnicode_4BYTE_DATA(out); + DO_ESCAPE(inp, inp_end, outp); + return out; +} + +static PyObject* +escape_unicode(PyUnicodeObject *in) +{ + if (PyUnicode_READY(in)) + return NULL; + + switch (PyUnicode_KIND(in)) { + case PyUnicode_1BYTE_KIND: + return escape_unicode_kind1(in); + case PyUnicode_2BYTE_KIND: + return escape_unicode_kind2(in); + case PyUnicode_4BYTE_KIND: + return escape_unicode_kind4(in); + } + assert(0); /* shouldn't happen */ + return NULL; +} + +static PyObject* +escape(PyObject *self, PyObject *text) +{ + static PyObject *id_html; + PyObject *s = NULL, *rv = NULL, *html; + + if (id_html == NULL) { + id_html = PyUnicode_InternFromString("__html__"); + if (id_html == NULL) { + return NULL; + } + } + + /* we don't have to escape integers, bools or floats */ + if (PyLong_CheckExact(text) || + PyFloat_CheckExact(text) || PyBool_Check(text) || + text == Py_None) + return PyObject_CallFunctionObjArgs(markup, text, NULL); + + /* if the object has an __html__ method that performs the escaping */ + html = PyObject_GetAttr(text ,id_html); + if (html) { + s = PyObject_CallObject(html, NULL); + Py_DECREF(html); + if (s == NULL) { + return NULL; + } + /* Convert to Markup object */ + rv = PyObject_CallFunctionObjArgs(markup, (PyObject*)s, NULL); + Py_DECREF(s); + return rv; + } + + /* otherwise make the object unicode if it isn't, then escape */ + PyErr_Clear(); + if (!PyUnicode_Check(text)) { + PyObject *unicode = PyObject_Str(text); + if (!unicode) + return NULL; + s = escape_unicode((PyUnicodeObject*)unicode); + Py_DECREF(unicode); + } + else + s = escape_unicode((PyUnicodeObject*)text); + + /* convert the unicode string into a markup object. */ + rv = PyObject_CallFunctionObjArgs(markup, (PyObject*)s, NULL); + Py_DECREF(s); + return rv; +} + + +static PyObject* +escape_silent(PyObject *self, PyObject *text) +{ + if (text != Py_None) + return escape(self, text); + return PyObject_CallFunctionObjArgs(markup, NULL); +} + + +static PyObject* +soft_str(PyObject *self, PyObject *s) +{ + if (!PyUnicode_Check(s)) + return PyObject_Str(s); + Py_INCREF(s); + return s; +} + + +static PyMethodDef module_methods[] = { + { + "escape", + (PyCFunction)escape, + METH_O, + "Replace the characters ``&``, ``<``, ``>``, ``'``, and ``\"`` in" + " the string with HTML-safe sequences. Use this if you need to display" + " text that might contain such characters in HTML.\n\n" + "If the object has an ``__html__`` method, it is called and the" + " return value is assumed to already be safe for HTML.\n\n" + ":param s: An object to be converted to a string and escaped.\n" + ":return: A :class:`Markup` string with the escaped text.\n" + }, + { + "escape_silent", + (PyCFunction)escape_silent, + METH_O, + "Like :func:`escape` but treats ``None`` as the empty string." + " Useful with optional values, as otherwise you get the string" + " ``'None'`` when the value is ``None``.\n\n" + ">>> escape(None)\n" + "Markup('None')\n" + ">>> escape_silent(None)\n" + "Markup('')\n" + }, + { + "soft_str", + (PyCFunction)soft_str, + METH_O, + "Convert an object to a string if it isn't already. This preserves" + " a :class:`Markup` string rather than converting it back to a basic" + " string, so it will still be marked as safe and won't be escaped" + " again.\n\n" + ">>> value = escape(\"\")\n" + ">>> value\n" + "Markup('<User 1>')\n" + ">>> escape(str(value))\n" + "Markup('&lt;User 1&gt;')\n" + ">>> escape(soft_str(value))\n" + "Markup('<User 1>')\n" + }, + {NULL, NULL, 0, NULL} /* Sentinel */ +}; + +static struct PyModuleDef module_definition = { + PyModuleDef_HEAD_INIT, + "markupsafe._speedups", + NULL, + -1, + module_methods, + NULL, + NULL, + NULL, + NULL +}; + +PyMODINIT_FUNC +PyInit__speedups(void) +{ + if (!init_constants()) + return NULL; + + return PyModule_Create(&module_definition); +} diff --git a/requirements.txt b/requirements.txt index 1861eeaa..58fe21b6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -18,11 +18,12 @@ gntp==1.0.3 html5lib==1.1 httpagentparser==1.9.2 idna==3.3 +importlib-metadata==4.11.3 importlib-resources==5.6.0 git+https://github.com/Tautulli/ipwhois.git@master#egg=ipwhois IPy==1.01 -Mako==1.1.6 -MarkupSafe==2.0.1 +Mako==1.2.0 +MarkupSafe==2.1.1 musicbrainzngs==0.7.1 packaging==21.3 paho-mqtt==1.6.1 From 67baf9a2602b1b0b77455fc1de291f99a589f069 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 May 2022 20:35:58 -0700 Subject: [PATCH 078/743] Bump urllib3 from 1.26.8 to 1.26.9 (#1688) * Bump urllib3 from 1.26.8 to 1.26.9 Bumps [urllib3](https://github.com/urllib3/urllib3) from 1.26.8 to 1.26.9. - [Release notes](https://github.com/urllib3/urllib3/releases) - [Changelog](https://github.com/urllib3/urllib3/blob/1.26.9/CHANGES.rst) - [Commits](https://github.com/urllib3/urllib3/compare/1.26.8...1.26.9) --- updated-dependencies: - dependency-name: urllib3 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Update urllib3==1.26.9 Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci] --- lib/urllib3/_version.py | 2 +- lib/urllib3/connection.py | 6 ++---- lib/urllib3/poolmanager.py | 1 + lib/urllib3/response.py | 5 ++++- lib/urllib3/util/request.py | 5 ++++- lib/urllib3/util/ssl_match_hostname.py | 10 ++++------ requirements.txt | 2 +- 7 files changed, 17 insertions(+), 14 deletions(-) diff --git a/lib/urllib3/_version.py b/lib/urllib3/_version.py index fa8979d7..d905b697 100644 --- a/lib/urllib3/_version.py +++ b/lib/urllib3/_version.py @@ -1,2 +1,2 @@ # This file is protected via CODEOWNERS -__version__ = "1.26.8" +__version__ = "1.26.9" diff --git a/lib/urllib3/connection.py b/lib/urllib3/connection.py index 4d92ac6d..7bf395bd 100644 --- a/lib/urllib3/connection.py +++ b/lib/urllib3/connection.py @@ -355,17 +355,15 @@ class HTTPSConnection(HTTPConnection): def connect(self): # Add certificate verification - conn = self._new_conn() + self.sock = conn = self._new_conn() hostname = self.host tls_in_tls = False if self._is_using_tunnel(): if self.tls_in_tls_required: - conn = self._connect_tls_proxy(hostname, conn) + self.sock = conn = self._connect_tls_proxy(hostname, conn) tls_in_tls = True - self.sock = conn - # Calls self._set_hostport(), so self.host is # self._tunnel_host below. self._tunnel() diff --git a/lib/urllib3/poolmanager.py b/lib/urllib3/poolmanager.py index 3a31a285..ca4ec341 100644 --- a/lib/urllib3/poolmanager.py +++ b/lib/urllib3/poolmanager.py @@ -34,6 +34,7 @@ SSL_KEYWORDS = ( "ca_cert_dir", "ssl_context", "key_password", + "server_hostname", ) # All known keyword arguments that could be provided to the pool manager, its diff --git a/lib/urllib3/response.py b/lib/urllib3/response.py index 38693f4f..fdb50ddb 100644 --- a/lib/urllib3/response.py +++ b/lib/urllib3/response.py @@ -8,7 +8,10 @@ from socket import error as SocketError from socket import timeout as SocketTimeout try: - import brotli + try: + import brotlicffi as brotli + except ImportError: + import brotli except ImportError: brotli = None diff --git a/lib/urllib3/util/request.py b/lib/urllib3/util/request.py index 25103383..b574b081 100644 --- a/lib/urllib3/util/request.py +++ b/lib/urllib3/util/request.py @@ -14,7 +14,10 @@ SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"]) ACCEPT_ENCODING = "gzip,deflate" try: - import brotli as _unused_module_brotli # noqa: F401 + try: + import brotlicffi as _unused_module_brotli # noqa: F401 + except ImportError: + import brotli as _unused_module_brotli # noqa: F401 except ImportError: pass else: diff --git a/lib/urllib3/util/ssl_match_hostname.py b/lib/urllib3/util/ssl_match_hostname.py index a4b4a569..1dd950c4 100644 --- a/lib/urllib3/util/ssl_match_hostname.py +++ b/lib/urllib3/util/ssl_match_hostname.py @@ -112,11 +112,9 @@ def match_hostname(cert, hostname): try: # Divergence from upstream: ipaddress can't handle byte str host_ip = ipaddress.ip_address(_to_unicode(hostname)) - except ValueError: - # Not an IP address (common case) - host_ip = None - except UnicodeError: - # Divergence from upstream: Have to deal with ipaddress not taking + except (UnicodeError, ValueError): + # ValueError: Not an IP address (common case) + # UnicodeError: Divergence from upstream: Have to deal with ipaddress not taking # byte strings. addresses should be all ascii, so we consider it not # an ipaddress in this case host_ip = None @@ -124,7 +122,7 @@ def match_hostname(cert, hostname): # Divergence from upstream: Make ipaddress library optional if ipaddress is None: host_ip = None - else: + else: # Defensive raise dnsnames = [] san = cert.get("subjectAltName", ()) diff --git a/requirements.txt b/requirements.txt index 58fe21b6..99f8033e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -45,7 +45,7 @@ tempora==5.0.1 tokenize-rt==4.2.1 tzdata==2022.1 tzlocal==4.2 -urllib3==1.26.8 +urllib3==1.26.9 webencodings==0.5.1 websocket-client==1.2.3 xmltodict==0.12.0 From d510e0f60079c7a256d0fac551c381ef2e82a477 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 May 2022 20:40:13 -0700 Subject: [PATCH 079/743] Bump websocket-client from 1.2.3 to 1.3.2 (#1700) * Bump websocket-client from 1.2.3 to 1.3.2 Bumps [websocket-client](https://github.com/websocket-client/websocket-client) from 1.2.3 to 1.3.2. - [Release notes](https://github.com/websocket-client/websocket-client/releases) - [Changelog](https://github.com/websocket-client/websocket-client/blob/master/ChangeLog) - [Commits](https://github.com/websocket-client/websocket-client/compare/v1.2.3...v1.3.2) --- updated-dependencies: - dependency-name: websocket-client dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Update websocket-client==1.3.2 Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci] --- lib/websocket/__init__.py | 4 +- lib/websocket/_abnf.py | 47 +++++++++--------- lib/websocket/_app.py | 57 ++++++++++++++-------- lib/websocket/_cookiejar.py | 7 +-- lib/websocket/_core.py | 47 ++++++++---------- lib/websocket/_exceptions.py | 6 +-- lib/websocket/_handshake.py | 13 +++-- lib/websocket/_http.py | 10 ++-- lib/websocket/_logging.py | 7 +-- lib/websocket/_socket.py | 23 ++++----- lib/websocket/_ssl_compat.py | 2 +- lib/websocket/_url.py | 16 +++---- lib/websocket/_utils.py | 2 +- lib/websocket/_wsdump.py | 4 +- lib/websocket/tests/data/header03.txt | 1 + lib/websocket/tests/test_abnf.py | 10 ++-- lib/websocket/tests/test_app.py | 69 +++++++++++++++++++++++---- lib/websocket/tests/test_cookiejar.py | 9 ++-- lib/websocket/tests/test_http.py | 20 ++++---- lib/websocket/tests/test_url.py | 66 ++++++++++++------------- lib/websocket/tests/test_websocket.py | 34 +++++++------ requirements.txt | 2 +- 22 files changed, 251 insertions(+), 205 deletions(-) diff --git a/lib/websocket/__init__.py b/lib/websocket/__init__.py index 05aae2bd..a5a39502 100644 --- a/lib/websocket/__init__.py +++ b/lib/websocket/__init__.py @@ -2,7 +2,7 @@ __init__.py websocket - WebSocket client library for Python -Copyright 2021 engn33r +Copyright 2022 engn33r Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -23,4 +23,4 @@ from ._exceptions import * from ._logging import * from ._socket import * -__version__ = "1.2.3" +__version__ = "1.3.2" diff --git a/lib/websocket/_abnf.py b/lib/websocket/_abnf.py index e9909ff6..2e5ad97c 100644 --- a/lib/websocket/_abnf.py +++ b/lib/websocket/_abnf.py @@ -1,12 +1,17 @@ -""" +import array +import os +import struct +import sys -""" +from ._exceptions import * +from ._utils import validate_utf8 +from threading import Lock """ _abnf.py websocket - WebSocket client library for Python -Copyright 2021 engn33r +Copyright 2022 engn33r Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -20,14 +25,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ -import array -import os -import struct -import sys - -from ._exceptions import * -from ._utils import validate_utf8 -from threading import Lock try: # If wsaccel is available, use compiled routines to mask data. @@ -79,6 +76,8 @@ STATUS_POLICY_VIOLATION = 1008 STATUS_MESSAGE_TOO_BIG = 1009 STATUS_INVALID_EXTENSION = 1010 STATUS_UNEXPECTED_CONDITION = 1011 +STATUS_SERVICE_RESTART = 1012 +STATUS_TRY_AGAIN_LATER = 1013 STATUS_BAD_GATEWAY = 1014 STATUS_TLS_HANDSHAKE_ERROR = 1015 @@ -92,6 +91,8 @@ VALID_CLOSE_STATUS = ( STATUS_MESSAGE_TOO_BIG, STATUS_INVALID_EXTENSION, STATUS_UNEXPECTED_CONDITION, + STATUS_SERVICE_RESTART, + STATUS_TRY_AGAIN_LATER, STATUS_BAD_GATEWAY, ) @@ -146,7 +147,7 @@ class ABNF: self.data = data self.get_mask_key = os.urandom - def validate(self, skip_utf8_validation=False): + def validate(self, skip_utf8_validation=False) -> None: """ Validate the ABNF frame. @@ -174,13 +175,13 @@ class ABNF: code = 256 * self.data[0] + self.data[1] if not self._is_valid_close_status(code): - raise WebSocketProtocolException("Invalid close opcode.") + raise WebSocketProtocolException("Invalid close opcode %r", code) @staticmethod - def _is_valid_close_status(code): + def _is_valid_close_status(code: int) -> bool: return code in VALID_CLOSE_STATUS or (3000 <= code < 5000) - def __str__(self): + def __str__(self) -> str: return "fin=" + str(self.fin) \ + " opcode=" + str(self.opcode) \ + " data=" + str(self.data) @@ -206,7 +207,7 @@ class ABNF: # mask must be set if send data from client return ABNF(fin, 0, 0, 0, opcode, 1, data) - def format(self): + def format(self) -> bytes: """ Format this object to string(byte array) to send data to server. """ @@ -251,9 +252,9 @@ class ABNF: Parameters ---------- - mask_key: - 4 byte string. - data: + mask_key: bytes or str + 4 byte mask. + data: bytes or str data to mask/unmask. """ if data is None: @@ -286,7 +287,7 @@ class frame_buffer: self.length = None self.mask = None - def has_received_header(self): + def has_received_header(self) -> bool: return self.header is None def recv_header(self): @@ -308,7 +309,7 @@ class frame_buffer: return False return self.header[frame_buffer._HEADER_MASK_INDEX] - def has_received_length(self): + def has_received_length(self) -> bool: return self.length is None def recv_length(self): @@ -323,7 +324,7 @@ class frame_buffer: else: self.length = length_bits - def has_received_mask(self): + def has_received_mask(self) -> bool: return self.mask is None def recv_mask(self): @@ -360,7 +361,7 @@ class frame_buffer: return frame - def recv_strict(self, bufsize): + def recv_strict(self, bufsize: int) -> bytes: shortage = bufsize - sum(map(len, self.recv_buffer)) while shortage > 0: # Limit buffer size that we pass to socket.recv() to avoid diff --git a/lib/websocket/_app.py b/lib/websocket/_app.py index 1afd3d20..da49ec78 100644 --- a/lib/websocket/_app.py +++ b/lib/websocket/_app.py @@ -1,12 +1,18 @@ -""" - -""" +import selectors +import sys +import threading +import time +import traceback +from ._abnf import ABNF +from ._core import WebSocket, getdefaulttimeout +from ._exceptions import * +from . import _logging """ _app.py websocket - WebSocket client library for Python -Copyright 2021 engn33r +Copyright 2022 engn33r Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -20,16 +26,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ -import selectors -import sys -import threading -import time -import traceback -from ._abnf import ABNF -from ._core import WebSocket, getdefaulttimeout -from ._exceptions import * -from . import _logging - __all__ = ["WebSocketApp"] @@ -86,6 +82,20 @@ class SSLDispatcher: return r[0][0] +class WrappedDispatcher: + """ + WrappedDispatcher + """ + def __init__(self, app, ping_timeout, dispatcher): + self.app = app + self.ping_timeout = ping_timeout + self.dispatcher = dispatcher + + def read(self, sock, read_callback, check_callback): + self.dispatcher.read(sock, read_callback) + self.ping_timeout and self.dispatcher.timeout(self.ping_timeout, check_callback) + + class WebSocketApp: """ Higher level of APIs are provided. The interface is like JavaScript WebSocket object. @@ -97,7 +107,8 @@ class WebSocketApp: on_cont_message=None, keep_running=True, get_mask_key=None, cookie=None, subprotocols=None, - on_data=None): + on_data=None, + socket=None): """ WebSocketApp initialization @@ -153,6 +164,8 @@ class WebSocketApp: Cookie value. subprotocols: list List of available sub protocols. Default is None. + socket: socket + Pre-initialized stream socket. """ self.url = url self.header = header if header is not None else [] @@ -172,6 +185,7 @@ class WebSocketApp: self.last_ping_tm = 0 self.last_pong_tm = 0 self.subprotocols = subprotocols + self.prepared_socket = socket def send(self, data, opcode=ABNF.OPCODE_TEXT): """ @@ -258,7 +272,8 @@ class WebSocketApp: Returns ------- teardown: bool - False if caught KeyboardInterrupt, True if other exception was raised during a loop + False if the `WebSocketApp` is closed or caught KeyboardInterrupt, + True if any other exception was raised during a loop. """ if ping_timeout is not None and ping_timeout <= 0: @@ -315,9 +330,8 @@ class WebSocketApp: http_proxy_port=http_proxy_port, http_no_proxy=http_no_proxy, http_proxy_auth=http_proxy_auth, subprotocols=self.subprotocols, host=host, origin=origin, suppress_origin=suppress_origin, - proxy_type=proxy_type) - if not dispatcher: - dispatcher = self.create_dispatcher(ping_timeout) + proxy_type=proxy_type, socket=self.prepared_socket) + dispatcher = self.create_dispatcher(ping_timeout, dispatcher) self._callback(self.on_open) @@ -367,6 +381,7 @@ class WebSocketApp: return True dispatcher.read(self.sock.sock, read, check) + return False except (Exception, KeyboardInterrupt, SystemExit) as e: self._callback(self.on_error, e) if isinstance(e, SystemExit): @@ -375,7 +390,9 @@ class WebSocketApp: teardown() return not isinstance(e, KeyboardInterrupt) - def create_dispatcher(self, ping_timeout): + def create_dispatcher(self, ping_timeout, dispatcher=None): + if dispatcher: # If custom dispatcher is set, use WrappedDispatcher + return WrappedDispatcher(self, ping_timeout, dispatcher) timeout = ping_timeout or 10 if self.sock.is_ssl(): return SSLDispatcher(self, timeout) diff --git a/lib/websocket/_cookiejar.py b/lib/websocket/_cookiejar.py index 87853834..5476d1d4 100644 --- a/lib/websocket/_cookiejar.py +++ b/lib/websocket/_cookiejar.py @@ -1,12 +1,10 @@ -""" - -""" +import http.cookies """ _cookiejar.py websocket - WebSocket client library for Python -Copyright 2021 engn33r +Copyright 2022 engn33r Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -20,7 +18,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ -import http.cookies class SimpleCookieJar: diff --git a/lib/websocket/_core.py b/lib/websocket/_core.py index e26c8b11..c36b7800 100644 --- a/lib/websocket/_core.py +++ b/lib/websocket/_core.py @@ -1,27 +1,3 @@ -""" -_core.py -==================================== -WebSocket Python client -""" - -""" -_core.py -websocket - WebSocket client library for Python - -Copyright 2021 engn33r - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" import socket import struct import threading @@ -37,6 +13,25 @@ from ._socket import * from ._ssl_compat import * from ._utils import * +""" +_core.py +websocket - WebSocket client library for Python + +Copyright 2022 engn33r + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + __all__ = ['WebSocket', 'create_connection'] @@ -250,14 +245,14 @@ class WebSocket: options.pop('socket', None)) try: - self.handshake_response = handshake(self.sock, *addrs, **options) + self.handshake_response = handshake(self.sock, url, *addrs, **options) for attempt in range(options.pop('redirect_limit', 3)): if self.handshake_response.status in SUPPORTED_REDIRECT_STATUSES: url = self.handshake_response.headers['location'] self.sock.close() self.sock, addrs = connect(url, self.sock_opt, proxy_info(**options), options.pop('socket', None)) - self.handshake_response = handshake(self.sock, *addrs, **options) + self.handshake_response = handshake(self.sock, url, *addrs, **options) self.connected = True except: if self.sock: diff --git a/lib/websocket/_exceptions.py b/lib/websocket/_exceptions.py index b92b1f40..811d5945 100644 --- a/lib/websocket/_exceptions.py +++ b/lib/websocket/_exceptions.py @@ -1,12 +1,8 @@ -""" -Define WebSocket exceptions -""" - """ _exceptions.py websocket - WebSocket client library for Python -Copyright 2021 engn33r +Copyright 2022 engn33r Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/lib/websocket/_handshake.py b/lib/websocket/_handshake.py index f9dabb57..f032c4b5 100644 --- a/lib/websocket/_handshake.py +++ b/lib/websocket/_handshake.py @@ -2,7 +2,7 @@ _handshake.py websocket - WebSocket client library for Python -Copyright 2021 engn33r +Copyright 2022 engn33r Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -47,8 +47,8 @@ class handshake_response: CookieJar.add(headers.get("set-cookie")) -def handshake(sock, hostname, port, resource, **options): - headers, key = _get_handshake_headers(resource, hostname, port, options) +def handshake(sock, url, hostname, port, resource, **options): + headers, key = _get_handshake_headers(resource, url, hostname, port, options) header_str = "\r\n".join(headers) send(sock, header_str) @@ -72,7 +72,7 @@ def _pack_hostname(hostname): return hostname -def _get_handshake_headers(resource, host, port, options): +def _get_handshake_headers(resource, url, host, port, options): headers = [ "GET %s HTTP/1.1" % resource, "Upgrade: websocket" @@ -86,9 +86,14 @@ def _get_handshake_headers(resource, host, port, options): else: headers.append("Host: %s" % hostport) + # scheme indicates whether http or https is used in Origin + # The same approach is used in parse_url of _url.py to set default port + scheme, url = url.split(":", 1) if "suppress_origin" not in options or not options["suppress_origin"]: if "origin" in options and options["origin"] is not None: headers.append("Origin: %s" % options["origin"]) + elif scheme == "wss": + headers.append("Origin: https://%s" % hostport) else: headers.append("Origin: http://%s" % hostport) diff --git a/lib/websocket/_http.py b/lib/websocket/_http.py index 603fa00f..04f32f5a 100644 --- a/lib/websocket/_http.py +++ b/lib/websocket/_http.py @@ -2,7 +2,7 @@ _http.py websocket - WebSocket client library for Python -Copyright 2021 engn33r +Copyright 2022 engn33r Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -186,12 +186,10 @@ def _open_socket(addrinfo_list, sockopt, timeout): except socket.error as error: error.remote_ip = str(address[0]) try: - eConnRefused = (errno.ECONNREFUSED, errno.WSAECONNREFUSED) + eConnRefused = (errno.ECONNREFUSED, errno.WSAECONNREFUSED, errno.ENETUNREACH) except: - eConnRefused = (errno.ECONNREFUSED, ) - if error.errno == errno.EINTR: - continue - elif error.errno in eConnRefused: + eConnRefused = (errno.ECONNREFUSED, errno.ENETUNREACH) + if error.errno in eConnRefused: err = error continue else: diff --git a/lib/websocket/_logging.py b/lib/websocket/_logging.py index 480d43b0..df690dcc 100644 --- a/lib/websocket/_logging.py +++ b/lib/websocket/_logging.py @@ -1,12 +1,10 @@ -""" - -""" +import logging """ _logging.py websocket - WebSocket client library for Python -Copyright 2021 engn33r +Copyright 2022 engn33r Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -20,7 +18,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ -import logging _logger = logging.getLogger('websocket') try: diff --git a/lib/websocket/_socket.py b/lib/websocket/_socket.py index 4d9cc097..54e63997 100644 --- a/lib/websocket/_socket.py +++ b/lib/websocket/_socket.py @@ -1,12 +1,16 @@ -""" +import errno +import selectors +import socket -""" +from ._exceptions import * +from ._ssl_compat import * +from ._utils import * """ _socket.py websocket - WebSocket client library for Python -Copyright 2021 engn33r +Copyright 2022 engn33r Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -20,13 +24,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ -import errno -import selectors -import socket - -from ._exceptions import * -from ._ssl_compat import * -from ._utils import * DEFAULT_SOCKET_OPTION = [(socket.SOL_TCP, socket.TCP_NODELAY, 1)] if hasattr(socket, "SO_KEEPALIVE"): @@ -92,9 +89,7 @@ def recv(sock, bufsize): pass except socket.error as exc: error_code = extract_error_code(exc) - if error_code is None: - raise - if error_code != errno.EAGAIN or error_code != errno.EWOULDBLOCK: + if error_code != errno.EAGAIN and error_code != errno.EWOULDBLOCK: raise sel = selectors.DefaultSelector() @@ -111,6 +106,8 @@ def recv(sock, bufsize): bytes_ = sock.recv(bufsize) else: bytes_ = _recv() + except TimeoutError: + raise WebSocketTimeoutException("Connection timed out") except socket.timeout as e: message = extract_err_message(e) raise WebSocketTimeoutException(message) diff --git a/lib/websocket/_ssl_compat.py b/lib/websocket/_ssl_compat.py index f4af524e..e2278401 100644 --- a/lib/websocket/_ssl_compat.py +++ b/lib/websocket/_ssl_compat.py @@ -2,7 +2,7 @@ _ssl_compat.py websocket - WebSocket client library for Python -Copyright 2021 engn33r +Copyright 2022 engn33r Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/lib/websocket/_url.py b/lib/websocket/_url.py index f2a55019..2d3d2653 100644 --- a/lib/websocket/_url.py +++ b/lib/websocket/_url.py @@ -1,11 +1,14 @@ -""" +import os +import socket +import struct + +from urllib.parse import unquote, urlparse -""" """ _url.py websocket - WebSocket client library for Python -Copyright 2021 engn33r +Copyright 2022 engn33r Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -20,13 +23,6 @@ See the License for the specific language governing permissions and limitations under the License. """ -import os -import socket -import struct - -from urllib.parse import unquote, urlparse - - __all__ = ["parse_url", "get_proxy_info"] diff --git a/lib/websocket/_utils.py b/lib/websocket/_utils.py index 21fc437c..fdcf345b 100644 --- a/lib/websocket/_utils.py +++ b/lib/websocket/_utils.py @@ -2,7 +2,7 @@ _url.py websocket - WebSocket client library for Python -Copyright 2021 engn33r +Copyright 2022 engn33r Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/lib/websocket/_wsdump.py b/lib/websocket/_wsdump.py index 4d15f413..860ac342 100644 --- a/lib/websocket/_wsdump.py +++ b/lib/websocket/_wsdump.py @@ -4,7 +4,7 @@ wsdump.py websocket - WebSocket client library for Python -Copyright 2021 engn33r +Copyright 2022 engn33r Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -64,7 +64,7 @@ class VAction(argparse.Action): def parse_args(): parser = argparse.ArgumentParser(description="WebSocket Simple Dump Tool") parser.add_argument("url", metavar="ws_url", - help="websocket url. ex. ws://echo.websocket.org/") + help="websocket url. ex. ws://echo.websocket.events/") parser.add_argument("-p", "--proxy", help="proxy url. ex. http://127.0.0.1:8080") parser.add_argument("-v", "--verbose", default=0, nargs='?', action=VAction, diff --git a/lib/websocket/tests/data/header03.txt b/lib/websocket/tests/data/header03.txt index 030e13a8..1a81dc70 100644 --- a/lib/websocket/tests/data/header03.txt +++ b/lib/websocket/tests/data/header03.txt @@ -3,5 +3,6 @@ Connection: Upgrade, Keep-Alive Upgrade: WebSocket Sec-WebSocket-Accept: Kxep+hNu9n51529fGidYu7a3wO0= Set-Cookie: Token=ABCDE +Set-Cookie: Token=FGHIJ some_header: something diff --git a/lib/websocket/tests/test_abnf.py b/lib/websocket/tests/test_abnf.py index 7f156dc9..7c9d89d8 100644 --- a/lib/websocket/tests/test_abnf.py +++ b/lib/websocket/tests/test_abnf.py @@ -1,10 +1,14 @@ # -*- coding: utf-8 -*- # +import websocket as ws +from websocket._abnf import * +import unittest + """ test_abnf.py websocket - WebSocket client library for Python -Copyright 2021 engn33r +Copyright 2022 engn33r Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -19,10 +23,6 @@ See the License for the specific language governing permissions and limitations under the License. """ -import websocket as ws -from websocket._abnf import * -import unittest - class ABNFTest(unittest.TestCase): diff --git a/lib/websocket/tests/test_app.py b/lib/websocket/tests/test_app.py index cd1146b3..ac2a7dd5 100644 --- a/lib/websocket/tests/test_app.py +++ b/lib/websocket/tests/test_app.py @@ -1,10 +1,17 @@ # -*- coding: utf-8 -*- # +import os +import os.path +import threading +import websocket as ws +import ssl +import unittest + """ test_app.py websocket - WebSocket client library for Python -Copyright 2021 engn33r +Copyright 2022 engn33r Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -19,12 +26,6 @@ See the License for the specific language governing permissions and limitations under the License. """ -import os -import os.path -import websocket as ws -import ssl -import unittest - # Skip test to access the internet unless TEST_WITH_INTERNET == 1 TEST_WITH_INTERNET = os.environ.get('TEST_WITH_INTERNET', '0') == '1' # Skip tests relying on local websockets server unless LOCAL_WS_SERVER_PORT != -1 @@ -45,11 +46,13 @@ class WebSocketAppTest(unittest.TestCase): WebSocketAppTest.keep_running_open = WebSocketAppTest.NotSetYet() WebSocketAppTest.keep_running_close = WebSocketAppTest.NotSetYet() WebSocketAppTest.get_mask_key_id = WebSocketAppTest.NotSetYet() + WebSocketAppTest.on_error_data = WebSocketAppTest.NotSetYet() def tearDown(self): WebSocketAppTest.keep_running_open = WebSocketAppTest.NotSetYet() WebSocketAppTest.keep_running_close = WebSocketAppTest.NotSetYet() WebSocketAppTest.get_mask_key_id = WebSocketAppTest.NotSetYet() + WebSocketAppTest.on_error_data = WebSocketAppTest.NotSetYet() @unittest.skipUnless(TEST_WITH_LOCAL_SERVER, "Tests using local websocket server are disabled") def testKeepRunning(self): @@ -77,6 +80,54 @@ class WebSocketAppTest(unittest.TestCase): app = ws.WebSocketApp('ws://127.0.0.1:' + LOCAL_WS_SERVER_PORT, on_open=on_open, on_close=on_close, on_message=on_message) app.run_forever() + @unittest.skipUnless(TEST_WITH_LOCAL_SERVER, "Tests using local websocket server are disabled") + def testRunForeverDispatcher(self): + """ A WebSocketApp should keep running as long as its self.keep_running + is not False (in the boolean context). + """ + + def on_open(self, *args, **kwargs): + """ Send a message, receive, and send one more + """ + self.send("hello!") + self.recv() + self.send("goodbye!") + + def on_message(wsapp, message): + print(message) + self.close() + + app = ws.WebSocketApp('ws://127.0.0.1:' + LOCAL_WS_SERVER_PORT, on_open=on_open, on_message=on_message) + app.run_forever(dispatcher="Dispatcher") + + @unittest.skipUnless(TEST_WITH_LOCAL_SERVER, "Tests using local websocket server are disabled") + def testRunForeverTeardownCleanExit(self): + """ The WebSocketApp.run_forever() method should return `False` when the application ends gracefully. + """ + app = ws.WebSocketApp('ws://127.0.0.1:' + LOCAL_WS_SERVER_PORT) + threading.Timer(interval=0.2, function=app.close).start() + teardown = app.run_forever() + self.assertEqual(teardown, False) + + @unittest.skipUnless(TEST_WITH_LOCAL_SERVER, "Tests using local websocket server are disabled") + def testRunForeverTeardownExceptionalExit(self): + """ The WebSocketApp.run_forever() method should return `True` when the application ends with an exception. + It should also invoke the `on_error` callback before exiting. + """ + + def break_it(): + # Deliberately break the WebSocketApp by closing the inner socket. + app.sock.close() + + def on_error(_, err): + WebSocketAppTest.on_error_data = str(err) + + app = ws.WebSocketApp('ws://127.0.0.1:' + LOCAL_WS_SERVER_PORT, on_error=on_error) + threading.Timer(interval=0.2, function=break_it).start() + teardown = app.run_forever(ping_timeout=0.1) + self.assertEqual(teardown, True) + self.assertTrue(len(WebSocketAppTest.on_error_data) > 0) + @unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled") def testSockMaskKey(self): """ A WebSocketApp should forward the received mask_key function down @@ -86,7 +137,7 @@ class WebSocketAppTest(unittest.TestCase): def my_mask_key_func(): return "\x00\x00\x00\x00" - app = ws.WebSocketApp('wss://stream.meetup.com/2/rsvps', get_mask_key=my_mask_key_func) + app = ws.WebSocketApp('wss://api-pub.bitfinex.com/ws/1', get_mask_key=my_mask_key_func) # if numpy is installed, this assertion fail # Note: We can't use 'is' for comparing the functions directly, need to use 'id'. @@ -136,7 +187,7 @@ class WebSocketAppTest(unittest.TestCase): def testOpcodeBinary(self): """ Test WebSocketApp binary opcode """ - + # The lack of wss:// in the URL below is on purpose app = ws.WebSocketApp('streaming.vn.teslamotors.com/streaming/') app.run_forever(ping_interval=2, ping_timeout=1, ping_payload="Ping payload") diff --git a/lib/websocket/tests/test_cookiejar.py b/lib/websocket/tests/test_cookiejar.py index 5bf1fcae..559b2e00 100644 --- a/lib/websocket/tests/test_cookiejar.py +++ b/lib/websocket/tests/test_cookiejar.py @@ -1,12 +1,11 @@ -""" - -""" +import unittest +from websocket._cookiejar import SimpleCookieJar """ test_cookiejar.py websocket - WebSocket client library for Python -Copyright 2021 engn33r +Copyright 2022 engn33r Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -20,8 +19,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ -import unittest -from websocket._cookiejar import SimpleCookieJar class CookieJarTest(unittest.TestCase): diff --git a/lib/websocket/tests/test_http.py b/lib/websocket/tests/test_http.py index fda467d7..649e0fe6 100644 --- a/lib/websocket/tests/test_http.py +++ b/lib/websocket/tests/test_http.py @@ -1,10 +1,19 @@ # -*- coding: utf-8 -*- # +import os +import os.path +import websocket as ws +from websocket._http import proxy_info, read_headers, _start_proxied_socket, _tunnel, _get_addrinfo_list, connect +import unittest +import ssl +import websocket +import socket + """ test_http.py websocket - WebSocket client library for Python -Copyright 2021 engn33r +Copyright 2022 engn33r Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -19,15 +28,6 @@ See the License for the specific language governing permissions and limitations under the License. """ -import os -import os.path -import websocket as ws -from websocket._http import proxy_info, read_headers, _start_proxied_socket, _tunnel, _get_addrinfo_list, connect -import unittest -import ssl -import websocket -import socket - try: from python_socks._errors import ProxyError, ProxyTimeoutError, ProxyConnectionError except: diff --git a/lib/websocket/tests/test_url.py b/lib/websocket/tests/test_url.py index ad3a3b1b..7e155fd1 100644 --- a/lib/websocket/tests/test_url.py +++ b/lib/websocket/tests/test_url.py @@ -1,10 +1,14 @@ # -*- coding: utf-8 -*- # +import os +import unittest +from websocket._url import get_proxy_info, parse_url, _is_address_in_network, _is_no_proxy_host + """ test_url.py websocket - WebSocket client library for Python -Copyright 2021 engn33r +Copyright 2022 engn33r Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -19,10 +23,6 @@ See the License for the specific language governing permissions and limitations under the License. """ -import os -import unittest -from websocket._url import get_proxy_info, parse_url, _is_address_in_network, _is_no_proxy_host - class UrlTest(unittest.TestCase): @@ -209,73 +209,73 @@ class ProxyInfoTest(unittest.TestCase): del os.environ["no_proxy"] def testProxyFromArgs(self): - self.assertEqual(get_proxy_info("echo.websocket.org", False, proxy_host="localhost"), ("localhost", 0, None)) - self.assertEqual(get_proxy_info("echo.websocket.org", False, proxy_host="localhost", proxy_port=3128), + self.assertEqual(get_proxy_info("echo.websocket.events", False, proxy_host="localhost"), ("localhost", 0, None)) + self.assertEqual(get_proxy_info("echo.websocket.events", False, proxy_host="localhost", proxy_port=3128), ("localhost", 3128, None)) - self.assertEqual(get_proxy_info("echo.websocket.org", True, proxy_host="localhost"), ("localhost", 0, None)) - self.assertEqual(get_proxy_info("echo.websocket.org", True, proxy_host="localhost", proxy_port=3128), + self.assertEqual(get_proxy_info("echo.websocket.events", True, proxy_host="localhost"), ("localhost", 0, None)) + self.assertEqual(get_proxy_info("echo.websocket.events", True, proxy_host="localhost", proxy_port=3128), ("localhost", 3128, None)) - self.assertEqual(get_proxy_info("echo.websocket.org", False, proxy_host="localhost", proxy_auth=("a", "b")), + self.assertEqual(get_proxy_info("echo.websocket.events", False, proxy_host="localhost", proxy_auth=("a", "b")), ("localhost", 0, ("a", "b"))) self.assertEqual( - get_proxy_info("echo.websocket.org", False, proxy_host="localhost", proxy_port=3128, proxy_auth=("a", "b")), + get_proxy_info("echo.websocket.events", False, proxy_host="localhost", proxy_port=3128, proxy_auth=("a", "b")), ("localhost", 3128, ("a", "b"))) - self.assertEqual(get_proxy_info("echo.websocket.org", True, proxy_host="localhost", proxy_auth=("a", "b")), + self.assertEqual(get_proxy_info("echo.websocket.events", True, proxy_host="localhost", proxy_auth=("a", "b")), ("localhost", 0, ("a", "b"))) self.assertEqual( - get_proxy_info("echo.websocket.org", True, proxy_host="localhost", proxy_port=3128, proxy_auth=("a", "b")), + get_proxy_info("echo.websocket.events", True, proxy_host="localhost", proxy_port=3128, proxy_auth=("a", "b")), ("localhost", 3128, ("a", "b"))) - self.assertEqual(get_proxy_info("echo.websocket.org", True, proxy_host="localhost", proxy_port=3128, + self.assertEqual(get_proxy_info("echo.websocket.events", True, proxy_host="localhost", proxy_port=3128, no_proxy=["example.com"], proxy_auth=("a", "b")), ("localhost", 3128, ("a", "b"))) - self.assertEqual(get_proxy_info("echo.websocket.org", True, proxy_host="localhost", proxy_port=3128, - no_proxy=["echo.websocket.org"], proxy_auth=("a", "b")), + self.assertEqual(get_proxy_info("echo.websocket.events", True, proxy_host="localhost", proxy_port=3128, + no_proxy=["echo.websocket.events"], proxy_auth=("a", "b")), (None, 0, None)) def testProxyFromEnv(self): os.environ["http_proxy"] = "http://localhost/" - self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", None, None)) + self.assertEqual(get_proxy_info("echo.websocket.events", False), ("localhost", None, None)) os.environ["http_proxy"] = "http://localhost:3128/" - self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", 3128, None)) + self.assertEqual(get_proxy_info("echo.websocket.events", False), ("localhost", 3128, None)) os.environ["http_proxy"] = "http://localhost/" os.environ["https_proxy"] = "http://localhost2/" - self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", None, None)) + self.assertEqual(get_proxy_info("echo.websocket.events", False), ("localhost", None, None)) os.environ["http_proxy"] = "http://localhost:3128/" os.environ["https_proxy"] = "http://localhost2:3128/" - self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", 3128, None)) + self.assertEqual(get_proxy_info("echo.websocket.events", False), ("localhost", 3128, None)) os.environ["http_proxy"] = "http://localhost/" os.environ["https_proxy"] = "http://localhost2/" - self.assertEqual(get_proxy_info("echo.websocket.org", True), ("localhost2", None, None)) + self.assertEqual(get_proxy_info("echo.websocket.events", True), ("localhost2", None, None)) os.environ["http_proxy"] = "http://localhost:3128/" os.environ["https_proxy"] = "http://localhost2:3128/" - self.assertEqual(get_proxy_info("echo.websocket.org", True), ("localhost2", 3128, None)) + self.assertEqual(get_proxy_info("echo.websocket.events", True), ("localhost2", 3128, None)) os.environ["http_proxy"] = "http://a:b@localhost/" - self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", None, ("a", "b"))) + self.assertEqual(get_proxy_info("echo.websocket.events", False), ("localhost", None, ("a", "b"))) os.environ["http_proxy"] = "http://a:b@localhost:3128/" - self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", 3128, ("a", "b"))) + self.assertEqual(get_proxy_info("echo.websocket.events", False), ("localhost", 3128, ("a", "b"))) os.environ["http_proxy"] = "http://a:b@localhost/" os.environ["https_proxy"] = "http://a:b@localhost2/" - self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", None, ("a", "b"))) + self.assertEqual(get_proxy_info("echo.websocket.events", False), ("localhost", None, ("a", "b"))) os.environ["http_proxy"] = "http://a:b@localhost:3128/" os.environ["https_proxy"] = "http://a:b@localhost2:3128/" - self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", 3128, ("a", "b"))) + self.assertEqual(get_proxy_info("echo.websocket.events", False), ("localhost", 3128, ("a", "b"))) os.environ["http_proxy"] = "http://a:b@localhost/" os.environ["https_proxy"] = "http://a:b@localhost2/" - self.assertEqual(get_proxy_info("echo.websocket.org", True), ("localhost2", None, ("a", "b"))) + self.assertEqual(get_proxy_info("echo.websocket.events", True), ("localhost2", None, ("a", "b"))) os.environ["http_proxy"] = "http://a:b@localhost:3128/" os.environ["https_proxy"] = "http://a:b@localhost2:3128/" - self.assertEqual(get_proxy_info("echo.websocket.org", True), ("localhost2", 3128, ("a", "b"))) + self.assertEqual(get_proxy_info("echo.websocket.events", True), ("localhost2", 3128, ("a", "b"))) os.environ["http_proxy"] = "http://john%40example.com:P%40SSWORD@localhost:3128/" os.environ["https_proxy"] = "http://john%40example.com:P%40SSWORD@localhost2:3128/" - self.assertEqual(get_proxy_info("echo.websocket.org", True), ("localhost2", 3128, ("john@example.com", "P@SSWORD"))) + self.assertEqual(get_proxy_info("echo.websocket.events", True), ("localhost2", 3128, ("john@example.com", "P@SSWORD"))) os.environ["http_proxy"] = "http://a:b@localhost/" os.environ["https_proxy"] = "http://a:b@localhost2/" @@ -283,12 +283,12 @@ class ProxyInfoTest(unittest.TestCase): self.assertEqual(get_proxy_info("example.1.com", True), ("localhost2", None, ("a", "b"))) os.environ["http_proxy"] = "http://a:b@localhost:3128/" os.environ["https_proxy"] = "http://a:b@localhost2:3128/" - os.environ["no_proxy"] = "example1.com,example2.com, echo.websocket.org" - self.assertEqual(get_proxy_info("echo.websocket.org", True), (None, 0, None)) + os.environ["no_proxy"] = "example1.com,example2.com, echo.websocket.events" + self.assertEqual(get_proxy_info("echo.websocket.events", True), (None, 0, None)) os.environ["http_proxy"] = "http://a:b@localhost:3128/" os.environ["https_proxy"] = "http://a:b@localhost2:3128/" - os.environ["no_proxy"] = "example1.com,example2.com, .websocket.org" - self.assertEqual(get_proxy_info("echo.websocket.org", True), (None, 0, None)) + os.environ["no_proxy"] = "example1.com,example2.com, .websocket.events" + self.assertEqual(get_proxy_info("echo.websocket.events", True), (None, 0, None)) os.environ["http_proxy"] = "http://a:b@localhost:3128/" os.environ["https_proxy"] = "http://a:b@localhost2:3128/" diff --git a/lib/websocket/tests/test_websocket.py b/lib/websocket/tests/test_websocket.py index 8b34aa51..ae42ab54 100644 --- a/lib/websocket/tests/test_websocket.py +++ b/lib/websocket/tests/test_websocket.py @@ -1,14 +1,21 @@ # -*- coding: utf-8 -*- # -""" - -""" +import os +import os.path +import socket +import websocket as ws +import unittest +from websocket._handshake import _create_sec_websocket_key, \ + _validate as _validate_header +from websocket._http import read_headers +from websocket._utils import validate_utf8 +from base64 import decodebytes as base64decode """ test_websocket.py websocket - WebSocket client library for Python -Copyright 2021 engn33r +Copyright 2022 engn33r Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -23,18 +30,6 @@ See the License for the specific language governing permissions and limitations under the License. """ -import os -import os.path -import socket -import websocket as ws -from websocket._handshake import _create_sec_websocket_key, \ - _validate as _validate_header -from websocket._http import read_headers -from websocket._utils import validate_utf8 -from base64 import decodebytes as base64decode - -import unittest - try: import ssl from ssl import SSLError @@ -201,14 +196,16 @@ class WebSocketTest(unittest.TestCase): @unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled") def testIter(self): count = 2 - for _ in ws.create_connection('wss://stream.meetup.com/2/rsvps'): + s = ws.create_connection('wss://api.bitfinex.com/ws/2') + s.send('{"event": "subscribe", "channel": "ticker"}') + for _ in s: count -= 1 if count == 0: break @unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled") def testNext(self): - sock = ws.create_connection('wss://stream.meetup.com/2/rsvps') + sock = ws.create_connection('wss://api.bitfinex.com/ws/2') self.assertEqual(str, type(next(sock))) def testInternalRecvStrict(self): @@ -383,6 +380,7 @@ class WebSocketTest(unittest.TestCase): s = ws.create_connection("ws://127.0.0.1:" + LOCAL_WS_SERVER_PORT, headers={"User-Agent": "PythonWebsocketClient"}) self.assertNotEqual(s, None) + self.assertEqual(s.getsubprotocol(), None) s.send("Hello, World") result = s.recv() self.assertEqual(result, "Hello, World") diff --git a/requirements.txt b/requirements.txt index 99f8033e..b91e719f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -47,7 +47,7 @@ tzdata==2022.1 tzlocal==4.2 urllib3==1.26.9 webencodings==0.5.1 -websocket-client==1.2.3 +websocket-client==1.3.2 xmltodict==0.12.0 zipp==3.8.0 From a1fe0b04d7edd0dd106f7f9aa0987a59ac478398 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 May 2022 20:41:47 -0700 Subject: [PATCH 080/743] Bump bleach from 4.1.0 to 5.0.0 (#1708) * Bump bleach from 4.1.0 to 5.0.0 Bumps [bleach](https://github.com/mozilla/bleach) from 4.1.0 to 5.0.0. - [Release notes](https://github.com/mozilla/bleach/releases) - [Changelog](https://github.com/mozilla/bleach/blob/main/CHANGES) - [Commits](https://github.com/mozilla/bleach/compare/v4.1.0...v5.0.0) --- updated-dependencies: - dependency-name: bleach dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Update bleach==5.0.0 Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci] --- lib/bleach/__init__.py | 20 ++- .../_vendor/html5lib-1.1.dist-info/LICENSE | 20 +++ .../_vendor/html5lib-1.1.dist-info/REQUESTED | 0 lib/bleach/_vendor/parse.py.SHA256SUM | 1 + lib/bleach/css_sanitizer.py | 104 ++++++++++++++++ lib/bleach/html5lib_shim.py | 115 +++++++++++++----- lib/bleach/linkifier.py | 24 ++-- lib/bleach/sanitizer.py | 108 ++++++---------- lib/bleach/utils.py | 21 ---- requirements.txt | 2 +- 10 files changed, 264 insertions(+), 151 deletions(-) create mode 100644 lib/bleach/_vendor/html5lib-1.1.dist-info/LICENSE create mode 100644 lib/bleach/_vendor/html5lib-1.1.dist-info/REQUESTED create mode 100644 lib/bleach/_vendor/parse.py.SHA256SUM create mode 100644 lib/bleach/css_sanitizer.py delete mode 100644 lib/bleach/utils.py diff --git a/lib/bleach/__init__.py b/lib/bleach/__init__.py index d619fb2c..d271811d 100644 --- a/lib/bleach/__init__.py +++ b/lib/bleach/__init__.py @@ -1,7 +1,3 @@ -# -*- coding: utf-8 -*- - -import packaging.version - from bleach.linkifier import ( DEFAULT_CALLBACKS, Linker, @@ -9,17 +5,15 @@ from bleach.linkifier import ( from bleach.sanitizer import ( ALLOWED_ATTRIBUTES, ALLOWED_PROTOCOLS, - ALLOWED_STYLES, ALLOWED_TAGS, Cleaner, ) # yyyymmdd -__releasedate__ = "20210825" +__releasedate__ = "20220407" # x.y.z or x.y.z.dev0 -- semver -__version__ = "4.1.0" -VERSION = packaging.version.Version(__version__) +__version__ = "5.0.0" __all__ = ["clean", "linkify"] @@ -29,10 +23,10 @@ def clean( text, tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES, - styles=ALLOWED_STYLES, protocols=ALLOWED_PROTOCOLS, strip=False, strip_comments=True, + css_sanitizer=None, ): """Clean an HTML fragment of malicious content and return it @@ -64,9 +58,6 @@ def clean( :arg dict attributes: allowed attributes; can be a callable, list or dict; defaults to ``bleach.sanitizer.ALLOWED_ATTRIBUTES`` - :arg list styles: allowed list of css styles; defaults to - ``bleach.sanitizer.ALLOWED_STYLES`` - :arg list protocols: allowed list of protocols for links; defaults to ``bleach.sanitizer.ALLOWED_PROTOCOLS`` @@ -74,16 +65,19 @@ def clean( :arg bool strip_comments: whether or not to strip HTML comments + :arg CSSSanitizer css_sanitizer: instance with a "sanitize_css" method for + sanitizing style attribute values and style text; defaults to None + :returns: cleaned text as unicode """ cleaner = Cleaner( tags=tags, attributes=attributes, - styles=styles, protocols=protocols, strip=strip, strip_comments=strip_comments, + css_sanitizer=css_sanitizer, ) return cleaner.clean(text) diff --git a/lib/bleach/_vendor/html5lib-1.1.dist-info/LICENSE b/lib/bleach/_vendor/html5lib-1.1.dist-info/LICENSE new file mode 100644 index 00000000..c87fa7a0 --- /dev/null +++ b/lib/bleach/_vendor/html5lib-1.1.dist-info/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2006-2013 James Graham and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/lib/bleach/_vendor/html5lib-1.1.dist-info/REQUESTED b/lib/bleach/_vendor/html5lib-1.1.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/lib/bleach/_vendor/parse.py.SHA256SUM b/lib/bleach/_vendor/parse.py.SHA256SUM new file mode 100644 index 00000000..a324b193 --- /dev/null +++ b/lib/bleach/_vendor/parse.py.SHA256SUM @@ -0,0 +1 @@ +46af966e33b6247ae1d57d9459115a3eb46cda9f809c9f14e052abc2fe8dacb2 parse.py diff --git a/lib/bleach/css_sanitizer.py b/lib/bleach/css_sanitizer.py new file mode 100644 index 00000000..489dd6bc --- /dev/null +++ b/lib/bleach/css_sanitizer.py @@ -0,0 +1,104 @@ +import tinycss2 + + +ALLOWED_CSS_PROPERTIES = frozenset( + ( + "azimuth", + "background-color", + "border-bottom-color", + "border-collapse", + "border-color", + "border-left-color", + "border-right-color", + "border-top-color", + "clear", + "color", + "cursor", + "direction", + "display", + "elevation", + "float", + "font", + "font-family", + "font-size", + "font-style", + "font-variant", + "font-weight", + "height", + "letter-spacing", + "line-height", + "overflow", + "pause", + "pause-after", + "pause-before", + "pitch", + "pitch-range", + "richness", + "speak", + "speak-header", + "speak-numeral", + "speak-punctuation", + "speech-rate", + "stress", + "text-align", + "text-decoration", + "text-indent", + "unicode-bidi", + "vertical-align", + "voice-family", + "volume", + "white-space", + "width", + ) +) + + +ALLOWED_SVG_PROPERTIES = frozenset( + ( + "fill", + "fill-opacity", + "fill-rule", + "stroke", + "stroke-width", + "stroke-linecap", + "stroke-linejoin", + "stroke-opacity", + ) +) + + +class CSSSanitizer: + def __init__( + self, + allowed_css_properties=ALLOWED_CSS_PROPERTIES, + allowed_svg_properties=ALLOWED_SVG_PROPERTIES, + ): + self.allowed_css_properties = allowed_css_properties + self.allowed_svg_properties = allowed_svg_properties + + def sanitize_css(self, style): + """Sanitizes css in style tags""" + parsed = tinycss2.parse_declaration_list(style) + + if not parsed: + return "" + + new_tokens = [] + for token in parsed: + if token.type == "declaration": + if ( + token.lower_name in self.allowed_css_properties + or token.lower_name in self.allowed_svg_properties + ): + new_tokens.append(token) + elif token.type in ("comment", "whitespace"): + if new_tokens and new_tokens[-1].type != token.type: + new_tokens.append(token) + + # NOTE(willkg): We currently don't handle AtRule or ParseError and + # so both get silently thrown out + + if not new_tokens: + return "" + + return tinycss2.serialize(new_tokens).strip() diff --git a/lib/bleach/html5lib_shim.py b/lib/bleach/html5lib_shim.py index 3c9c3306..6fc90485 100644 --- a/lib/bleach/html5lib_shim.py +++ b/lib/bleach/html5lib_shim.py @@ -36,6 +36,8 @@ from bleach._vendor.html5lib.filters.base import ( ) # noqa: E402 module level import not at top of file from bleach._vendor.html5lib.filters.sanitizer import ( allowed_protocols, + allowed_css_properties, + allowed_svg_properties, ) # noqa: E402 module level import not at top of file from bleach._vendor.html5lib.filters.sanitizer import ( Filter as SanitizerFilter, @@ -68,8 +70,10 @@ TAG_TOKEN_TYPES = { constants.tokenTypes["EndTag"], constants.tokenTypes["EmptyTag"], } -CHARACTERS_TYPE = constants.tokenTypes["Characters"] -PARSEERROR_TYPE = constants.tokenTypes["ParseError"] +TAG_TOKEN_TYPE_START = constants.tokenTypes["StartTag"] +TAG_TOKEN_TYPE_END = constants.tokenTypes["EndTag"] +TAG_TOKEN_TYPE_CHARACTERS = constants.tokenTypes["Characters"] +TAG_TOKEN_TYPE_PARSEERROR = constants.tokenTypes["ParseError"] #: List of valid HTML tags, from WHATWG HTML Living Standard as of 2018-10-17 @@ -190,6 +194,48 @@ HTML_TAGS = [ ] +#: List of block level HTML tags, as per https://github.com/mozilla/bleach/issues/369 +#: from mozilla on 2019.07.11 +#: https://developer.mozilla.org/en-US/docs/Web/HTML/Block-level_elements#Elements +HTML_TAGS_BLOCK_LEVEL = frozenset( + [ + "address", + "article", + "aside", + "blockquote", + "details", + "dialog", + "dd", + "div", + "dl", + "dt", + "fieldset", + "figcaption", + "figure", + "footer", + "form", + "h1", + "h2", + "h3", + "h4", + "h5", + "h6", + "header", + "hgroup", + "hr", + "li", + "main", + "nav", + "ol", + "p", + "pre", + "section", + "table", + "ul", + ] +) + + class InputStreamWithMemory: """Wraps an HTMLInputStream to remember characters since last < @@ -257,17 +303,20 @@ class BleachHTMLTokenizer(HTMLTokenizer): """Tokenizer that doesn't consume character entities""" def __init__(self, consume_entities=False, **kwargs): - super(BleachHTMLTokenizer, self).__init__(**kwargs) + super().__init__(**kwargs) self.consume_entities = consume_entities # Wrap the stream with one that remembers the history self.stream = InputStreamWithMemory(self.stream) + # Remember the last token emitted; needed for block element spacing + self.emitted_last_token = None + def __iter__(self): last_error_token = None - for token in super(BleachHTMLTokenizer, self).__iter__(): + for token in super().__iter__(): if last_error_token is not None: if ( last_error_token["data"] == "invalid-character-in-attribute-name" @@ -309,12 +358,12 @@ class BleachHTMLTokenizer(HTMLTokenizer): # If this is not an allowed tag, then we convert it to # characters and it'll get escaped in the sanitizer. token["data"] = self.stream.get_tag() - token["type"] = CHARACTERS_TYPE + token["type"] = TAG_TOKEN_TYPE_CHARACTERS last_error_token = None yield token - elif token["type"] == PARSEERROR_TYPE: + elif token["type"] == TAG_TOKEN_TYPE_PARSEERROR: # If the token is a parse error, then let the last_error_token # go, and make token the new last_error_token yield last_error_token @@ -329,7 +378,7 @@ class BleachHTMLTokenizer(HTMLTokenizer): # If the token is a ParseError, we hold on to it so we can get the # next token and potentially fix it. - if token["type"] == PARSEERROR_TYPE: + if token["type"] == TAG_TOKEN_TYPE_PARSEERROR: last_error_token = token continue @@ -342,9 +391,7 @@ class BleachHTMLTokenizer(HTMLTokenizer): # If this tokenizer is set to consume entities, then we can let the # superclass do its thing. if self.consume_entities: - return super(BleachHTMLTokenizer, self).consumeEntity( - allowedChar, fromAttribute - ) + return super().consumeEntity(allowedChar, fromAttribute) # If this tokenizer is set to not consume entities, then we don't want # to consume and convert them, so this overrides the html5lib tokenizer's @@ -356,7 +403,7 @@ class BleachHTMLTokenizer(HTMLTokenizer): self.currentToken["data"][-1][1] += "&" else: - self.tokenQueue.append({"type": CHARACTERS_TYPE, "data": "&"}) + self.tokenQueue.append({"type": TAG_TOKEN_TYPE_CHARACTERS, "data": "&"}) def tagOpenState(self): # This state marks a < that is either a StartTag, EndTag, EmptyTag, @@ -364,7 +411,7 @@ class BleachHTMLTokenizer(HTMLTokenizer): # we've collected so far and we do that by calling start_tag() on # the input stream wrapper. self.stream.start_tag() - return super(BleachHTMLTokenizer, self).tagOpenState() + return super().tagOpenState() def emitCurrentToken(self): token = self.currentToken @@ -378,9 +425,19 @@ class BleachHTMLTokenizer(HTMLTokenizer): # allowed list, then it gets stripped or escaped. In both of these # cases it gets converted to a Characters token. if self.parser.strip: - # If we're stripping the token, we just throw in an empty - # string token. - new_data = "" + if ( + self.emitted_last_token + and token["type"] == TAG_TOKEN_TYPE_START + and token["name"].lower() in HTML_TAGS_BLOCK_LEVEL + ): + # If this is a block level tag we're stripping, we drop it + # for a newline because that's what a browser would parse + # it as + new_data = "\n" + else: + # For all other things being stripped, we throw in an empty + # string token + new_data = "" else: # If we're escaping the token, we want to escape the exact @@ -390,14 +447,15 @@ class BleachHTMLTokenizer(HTMLTokenizer): # string and use that. new_data = self.stream.get_tag() - new_token = {"type": CHARACTERS_TYPE, "data": new_data} + new_token = {"type": TAG_TOKEN_TYPE_CHARACTERS, "data": new_data} - self.currentToken = new_token + self.currentToken = self.emitted_last_token = new_token self.tokenQueue.append(new_token) self.state = self.dataState return - super(BleachHTMLTokenizer, self).emitCurrentToken() + self.emitted_last_token = self.currentToken + super().emitCurrentToken() class BleachHTMLParser(HTMLParser): @@ -416,7 +474,7 @@ class BleachHTMLParser(HTMLParser): self.tags = [tag.lower() for tag in tags] if tags is not None else None self.strip = strip self.consume_entities = consume_entities - super(BleachHTMLParser, self).__init__(**kwargs) + super().__init__(**kwargs) def _parse( self, stream, innerHTML=False, container="div", scripting=True, **kwargs @@ -514,13 +572,13 @@ def convert_entities(text): def match_entity(stream): """Returns first entity in stream or None if no entity exists - Note: For Bleach purposes, entities must start with a "&" and end with - a ";". This ignoresambiguous character entities that have no ";" at the - end. + Note: For Bleach purposes, entities must start with a "&" and end with a + ";". This ignores ambiguous character entities that have no ";" at the end. :arg stream: the character stream - :returns: ``None`` or the entity string without "&" or ";" + :returns: the entity string without "&" or ";" if it's a valid character + entity; ``None`` otherwise """ # Nix the & at the beginning @@ -559,9 +617,11 @@ def match_entity(stream): # Handle character entities while stream and stream[0] not in end_characters: c = stream.pop(0) - if not ENTITIES_TRIE.has_keys_with_prefix(possible_entity): - break possible_entity += c + if not ENTITIES_TRIE.has_keys_with_prefix(possible_entity): + # If it's not a prefix, then it's not an entity and we're + # out + return None if possible_entity and stream and stream[0] == ";": return possible_entity @@ -642,15 +702,14 @@ class BleachHTMLSerializer(HTMLSerializer): in_tag = False after_equals = False - for stoken in super(BleachHTMLSerializer, self).serialize(treewalker, encoding): + for stoken in super().serialize(treewalker, encoding): if in_tag: if stoken == ">": in_tag = False elif after_equals: if stoken != '"': - for part in self.escape_base_amp(stoken): - yield part + yield from self.escape_base_amp(stoken) after_equals = False continue diff --git a/lib/bleach/linkifier.py b/lib/bleach/linkifier.py index 759882e9..68a4042e 100644 --- a/lib/bleach/linkifier.py +++ b/lib/bleach/linkifier.py @@ -2,7 +2,6 @@ import re from bleach import callbacks as linkify_callbacks from bleach import html5lib_shim -from bleach.utils import alphabetize_attributes #: List of default callbacks @@ -155,7 +154,7 @@ class Linker: omit_optional_tags=False, # linkify does not sanitize sanitize=False, - # linkify alphabetizes + # linkify preserves attr order alphabetical_attributes=False, ) @@ -228,7 +227,7 @@ class LinkifyFilter(html5lib_shim.Filter): :arg re email_re: email matching regex """ - super(LinkifyFilter, self).__init__(source) + super().__init__(source) self.callbacks = callbacks or [] self.skip_tags = skip_tags or [] @@ -316,7 +315,6 @@ class LinkifyFilter(html5lib_shim.Filter): else: # Add an "a" tag for the new link _text = attrs.pop("_text", "") - attrs = alphabetize_attributes(attrs) new_tokens.extend( [ {"type": "StartTag", "name": "a", "data": attrs}, @@ -332,8 +330,7 @@ class LinkifyFilter(html5lib_shim.Filter): if end < len(text): new_tokens.append({"type": "Characters", "data": text[end:]}) - for new_token in new_tokens: - yield new_token + yield from new_tokens continue @@ -439,8 +436,6 @@ class LinkifyFilter(html5lib_shim.Filter): new_tokens.append({"type": "Characters", "data": prefix}) _text = attrs.pop("_text", "") - attrs = alphabetize_attributes(attrs) - new_tokens.extend( [ {"type": "StartTag", "name": "a", "data": attrs}, @@ -460,8 +455,7 @@ class LinkifyFilter(html5lib_shim.Filter): if end < len(text): new_tokens.append({"type": "Characters", "data": text[end:]}) - for new_token in new_tokens: - yield new_token + yield from new_tokens continue @@ -493,14 +487,13 @@ class LinkifyFilter(html5lib_shim.Filter): else: new_text = attrs.pop("_text", "") - a_token["data"] = alphabetize_attributes(attrs) + a_token["data"] = attrs if text == new_text: # The callbacks didn't change the text, so we yield the new "a" # token, then whatever else was there, then the end "a" token yield a_token - for mem in token_buffer[1:]: - yield mem + yield from token_buffer[1:] else: # If the callbacks changed the text, then we're going to drop @@ -516,7 +509,7 @@ class LinkifyFilter(html5lib_shim.Filter): token_buffer = [] - for token in super(LinkifyFilter, self).__iter__(): + for token in super().__iter__(): if in_a: # Handle the case where we're in an "a" tag--we want to buffer tokens # until we hit an end "a" tag. @@ -524,8 +517,7 @@ class LinkifyFilter(html5lib_shim.Filter): # Add the end tag to the token buffer and then handle them # and yield anything returned token_buffer.append(token) - for new_token in self.handle_a_tag(token_buffer): - yield new_token + yield from self.handle_a_tag(token_buffer) # Clear "a" related state and continue since we've yielded all # the tokens we're going to yield diff --git a/lib/bleach/sanitizer.py b/lib/bleach/sanitizer.py index 89aff1f4..0816cfd0 100644 --- a/lib/bleach/sanitizer.py +++ b/lib/bleach/sanitizer.py @@ -6,7 +6,6 @@ from bleach._vendor.parse import urlparse from xml.sax.saxutils import unescape from bleach import html5lib_shim -from bleach.utils import alphabetize_attributes #: List of allowed tags @@ -33,9 +32,6 @@ ALLOWED_ATTRIBUTES = { "acronym": ["title"], } -#: List of allowed styles -ALLOWED_STYLES = [] - #: List of allowed protocols ALLOWED_PROTOCOLS = ["http", "https", "mailto"] @@ -85,11 +81,11 @@ class Cleaner: self, tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES, - styles=ALLOWED_STYLES, protocols=ALLOWED_PROTOCOLS, strip=False, strip_comments=True, filters=None, + css_sanitizer=None, ): """Initializes a Cleaner @@ -99,9 +95,6 @@ class Cleaner: :arg dict attributes: allowed attributes; can be a callable, list or dict; defaults to ``bleach.sanitizer.ALLOWED_ATTRIBUTES`` - :arg list styles: allowed list of css styles; defaults to - ``bleach.sanitizer.ALLOWED_STYLES`` - :arg list protocols: allowed list of protocols for links; defaults to ``bleach.sanitizer.ALLOWED_PROTOCOLS`` @@ -118,14 +111,17 @@ class Cleaner: Using filters changes the output of ``bleach.Cleaner.clean``. Make sure the way the filters change the output are secure. + :arg CSSSanitizer css_sanitizer: instance with a "sanitize_css" method for + sanitizing style attribute values and style text; defaults to None + """ self.tags = tags self.attributes = attributes - self.styles = styles self.protocols = protocols self.strip = strip self.strip_comments = strip_comments self.filters = filters or [] + self.css_sanitizer = css_sanitizer self.parser = html5lib_shim.BleachHTMLParser( tags=self.tags, @@ -143,7 +139,7 @@ class Cleaner: resolve_entities=False, # Bleach has its own sanitizer, so don't use the html5lib one sanitize=False, - # Bleach sanitizer alphabetizes already, so don't use the html5lib one + # clean preserves attr order alphabetical_attributes=False, ) @@ -175,11 +171,10 @@ class Cleaner: attributes=self.attributes, strip_disallowed_elements=self.strip, strip_html_comments=self.strip_comments, + css_sanitizer=self.css_sanitizer, # html5lib-sanitizer things allowed_elements=self.tags, - allowed_css_properties=self.styles, allowed_protocols=self.protocols, - allowed_svg_properties=[], ) # Apply any filters after the BleachSanitizerFilter @@ -242,25 +237,25 @@ class BleachSanitizerFilter(html5lib_shim.SanitizerFilter): def __init__( self, source, + allowed_elements=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES, + allowed_protocols=ALLOWED_PROTOCOLS, strip_disallowed_elements=False, strip_html_comments=True, + css_sanitizer=None, **kwargs, ): """Creates a BleachSanitizerFilter instance :arg Treewalker source: stream - :arg list tags: allowed list of tags; defaults to + :arg list allowed_elements: allowed list of tags; defaults to ``bleach.sanitizer.ALLOWED_TAGS`` :arg dict attributes: allowed attributes; can be a callable, list or dict; defaults to ``bleach.sanitizer.ALLOWED_ATTRIBUTES`` - :arg list styles: allowed list of css styles; defaults to - ``bleach.sanitizer.ALLOWED_STYLES`` - - :arg list protocols: allowed list of protocols for links; defaults + :arg list allowed_protocols: allowed list of protocols for links; defaults to ``bleach.sanitizer.ALLOWED_PROTOCOLS`` :arg bool strip_disallowed_elements: whether or not to strip disallowed @@ -268,10 +263,14 @@ class BleachSanitizerFilter(html5lib_shim.SanitizerFilter): :arg bool strip_html_comments: whether or not to strip HTML comments + :arg CSSSanitizer css_sanitizer: instance with a "sanitize_css" method for + sanitizing style attribute values and style text; defaults to None + """ self.attr_filter = attribute_filter_factory(attributes) self.strip_disallowed_elements = strip_disallowed_elements self.strip_html_comments = strip_html_comments + self.css_sanitizer = css_sanitizer # filter out html5lib deprecation warnings to use bleach from BleachSanitizerFilter init warnings.filterwarnings( @@ -280,7 +279,12 @@ class BleachSanitizerFilter(html5lib_shim.SanitizerFilter): category=DeprecationWarning, module="bleach._vendor.html5lib", ) - return super(BleachSanitizerFilter, self).__init__(source, **kwargs) + return super().__init__( + source, + allowed_elements=allowed_elements, + allowed_protocols=allowed_protocols, + **kwargs, + ) def sanitize_stream(self, token_iterator): for token in token_iterator: @@ -290,8 +294,7 @@ class BleachSanitizerFilter(html5lib_shim.SanitizerFilter): continue if isinstance(ret, list): - for subtoken in ret: - yield subtoken + yield from ret else: yield ret @@ -358,10 +361,6 @@ class BleachSanitizerFilter(html5lib_shim.SanitizerFilter): return None else: - if "data" in token: - # Alphabetize the attributes before calling .disallowed_token() - # so that the resulting string is stable - token["data"] = alphabetize_attributes(token["data"]) return self.disallowed_token(token) elif token_type == "Comment": @@ -547,12 +546,21 @@ class BleachSanitizerFilter(html5lib_shim.SanitizerFilter): # If it's a style attribute, sanitize it if namespaced_name == (None, "style"): - val = self.sanitize_css(val) + if self.css_sanitizer: + val = self.css_sanitizer.sanitize_css(val) + else: + # FIXME(willkg): if style is allowed, but no + # css_sanitizer was set up, then this is probably a + # mistake and we should raise an error here + # + # For now, we're going to set the value to "" because + # there was no sanitizer set + val = "" # At this point, we want to keep the attribute, so add it in attrs[namespaced_name] = val - token["data"] = alphabetize_attributes(attrs) + token["data"] = attrs return token @@ -575,7 +583,7 @@ class BleachSanitizerFilter(html5lib_shim.SanitizerFilter): if ns is None or ns not in html5lib_shim.prefixes: namespaced_name = name else: - namespaced_name = "%s:%s" % (html5lib_shim.prefixes[ns], name) + namespaced_name = "{}:{}".format(html5lib_shim.prefixes[ns], name) attrs.append( ' %s="%s"' @@ -587,7 +595,7 @@ class BleachSanitizerFilter(html5lib_shim.SanitizerFilter): v, ) ) - token["data"] = "<%s%s>" % (token["name"], "".join(attrs)) + token["data"] = "<{}{}>".format(token["name"], "".join(attrs)) else: token["data"] = "<%s>" % token["name"] @@ -599,47 +607,3 @@ class BleachSanitizerFilter(html5lib_shim.SanitizerFilter): del token["name"] return token - - def sanitize_css(self, style): - """Sanitizes css in style tags""" - # Convert entities in the style so that it can be parsed as CSS - style = html5lib_shim.convert_entities(style) - - # Drop any url values before we do anything else - style = re.compile(r"url\s*\(\s*[^\s)]+?\s*\)\s*").sub(" ", style) - - # The gauntlet of sanitization - - # Validate the css in the style tag and if it's not valid, then drop - # the whole thing. - parts = style.split(";") - gauntlet = re.compile( - r"""^( # consider a style attribute value as composed of: -[/:,#%!.\s\w] # a non-newline character -|\w-\w # 3 characters in the form \w-\w -|'[\s\w]+'\s* # a single quoted string of [\s\w]+ with trailing space -|"[\s\w]+" # a double quoted string of [\s\w]+ -|\([\d,%\.\s]+\) # a parenthesized string of one or more digits, commas, periods, ... -)*$""", # ... percent signs, or whitespace e.g. from 'color: hsl(30,100%,50%)' - flags=re.U | re.VERBOSE, - ) - - for part in parts: - if not gauntlet.match(part): - return "" - - if not re.match(r"^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$", style): - return "" - - clean = [] - for prop, value in re.findall(r"([-\w]+)\s*:\s*([^:;]*)", style): - if not value: - continue - - if prop.lower() in self.allowed_css_properties: - clean.append(prop + ": " + value + ";") - - elif prop.lower() in self.allowed_svg_properties: - clean.append(prop + ": " + value + ";") - - return " ".join(clean) diff --git a/lib/bleach/utils.py b/lib/bleach/utils.py deleted file mode 100644 index 6be59f6f..00000000 --- a/lib/bleach/utils.py +++ /dev/null @@ -1,21 +0,0 @@ -from collections import OrderedDict - - -def _attr_key(attr): - """Returns appropriate key for sorting attribute names - - Attribute names are a tuple of ``(namespace, name)`` where namespace can be - ``None`` or a string. These can't be compared in Python 3, so we conver the - ``None`` to an empty string. - - """ - key = (attr[0][0] or ""), attr[0][1] - return key - - -def alphabetize_attributes(attrs): - """Takes a dict of attributes (or None) and returns them alphabetized""" - if not attrs: - return attrs - - return OrderedDict([(k, v) for k, v in sorted(attrs.items(), key=_attr_key)]) diff --git a/requirements.txt b/requirements.txt index b91e719f..5e22dccb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,7 @@ backports.csv==1.0.7 backports.functools-lru-cache==1.6.4 backports.zoneinfo==0.2.1 beautifulsoup4==4.10.0 -bleach==4.1.0 +bleach==5.0.0 certifi==2021.10.8 cheroot==8.6.0 cherrypy==18.6.1 From 467ae352f56dc80f72c225d66f455a324d5b5229 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 May 2022 20:46:21 -0700 Subject: [PATCH 081/743] Bump beautifulsoup4 from 4.10.0 to 4.11.1 (#1717) * Bump beautifulsoup4 from 4.10.0 to 4.11.1 Bumps [beautifulsoup4](https://www.crummy.com/software/BeautifulSoup/bs4/) from 4.10.0 to 4.11.1. --- updated-dependencies: - dependency-name: beautifulsoup4 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Update beautifulsoup4==4.11.1 * Update soupsieve==2.3.2.post1 * Update requirements.txt Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci] --- lib/bs4/__init__.py | 128 +- lib/bs4/builder/__init__.py | 123 +- lib/bs4/builder/_html5lib.py | 10 +- lib/bs4/builder/_htmlparser.py | 11 +- lib/bs4/builder/_lxml.py | 56 +- lib/bs4/dammit.py | 2297 +-------------------- lib/bs4/diagnose.py | 10 +- lib/bs4/element.py | 198 +- lib/bs4/formatter.py | 22 +- lib/bs4/{testing.py => tests/__init__.py} | 397 ++-- lib/bs4/tests/test_builder.py | 29 + lib/bs4/tests/test_builder_registry.py | 136 ++ lib/bs4/tests/test_dammit.py | 371 ++++ lib/bs4/tests/test_docs.py | 38 + lib/bs4/tests/test_element.py | 74 + lib/bs4/tests/test_formatter.py | 113 + lib/bs4/tests/test_html5lib.py | 223 ++ lib/bs4/tests/test_htmlparser.py | 136 ++ lib/bs4/tests/test_lxml.py | 199 ++ lib/bs4/tests/test_navigablestring.py | 144 ++ lib/bs4/tests/test_pageelement.py | 751 +++++++ lib/bs4/tests/test_soup.py | 462 +++++ lib/bs4/tests/test_tag.py | 221 ++ lib/bs4/tests/test_tree.py | 1290 ++++++++++++ lib/soupsieve/__meta__.py | 2 +- lib/soupsieve/css_match.py | 2 +- lib/soupsieve/css_parser.py | 8 +- requirements.txt | 4 +- 28 files changed, 4846 insertions(+), 2609 deletions(-) rename lib/bs4/{testing.py => tests/__init__.py} (78%) create mode 100644 lib/bs4/tests/test_builder.py create mode 100644 lib/bs4/tests/test_builder_registry.py create mode 100644 lib/bs4/tests/test_dammit.py create mode 100644 lib/bs4/tests/test_docs.py create mode 100644 lib/bs4/tests/test_element.py create mode 100644 lib/bs4/tests/test_formatter.py create mode 100644 lib/bs4/tests/test_html5lib.py create mode 100644 lib/bs4/tests/test_htmlparser.py create mode 100644 lib/bs4/tests/test_lxml.py create mode 100644 lib/bs4/tests/test_navigablestring.py create mode 100644 lib/bs4/tests/test_pageelement.py create mode 100644 lib/bs4/tests/test_soup.py create mode 100644 lib/bs4/tests/test_tag.py create mode 100644 lib/bs4/tests/test_tree.py diff --git a/lib/bs4/__init__.py b/lib/bs4/__init__.py index 2a436d34..b3c9feb8 100644 --- a/lib/bs4/__init__.py +++ b/lib/bs4/__init__.py @@ -15,14 +15,13 @@ documentation: http://www.crummy.com/software/BeautifulSoup/bs4/doc/ """ __author__ = "Leonard Richardson (leonardr@segfault.org)" -__version__ = "4.10.0" -__copyright__ = "Copyright (c) 2004-2021 Leonard Richardson" +__version__ = "4.11.1" +__copyright__ = "Copyright (c) 2004-2022 Leonard Richardson" # Use of this source code is governed by the MIT license. __license__ = "MIT" __all__ = ['BeautifulSoup'] - from collections import Counter import os import re @@ -35,7 +34,11 @@ import warnings if sys.version_info.major < 3: raise ImportError('You are trying to use a Python 3-specific version of Beautiful Soup under Python 2. This will not work. The final version of Beautiful Soup to support Python 2 was 4.9.3.') -from .builder import builder_registry, ParserRejectedMarkup +from .builder import ( + builder_registry, + ParserRejectedMarkup, + XMLParsedAsHTMLWarning, +) from .dammit import UnicodeDammit from .element import ( CData, @@ -67,7 +70,7 @@ class MarkupResemblesLocatorWarning(UserWarning): on disk. """ - + class BeautifulSoup(Tag): """A data structure representing a parsed HTML or XML document. @@ -207,10 +210,10 @@ class BeautifulSoup(Tag): if old_name in kwargs: warnings.warn( 'The "%s" argument to the BeautifulSoup constructor ' - 'has been renamed to "%s."' % (old_name, new_name)) - value = kwargs[old_name] - del kwargs[old_name] - return value + 'has been renamed to "%s."' % (old_name, new_name), + DeprecationWarning + ) + return kwargs.pop(old_name) return None parse_only = parse_only or deprecated_argument( @@ -305,51 +308,18 @@ class BeautifulSoup(Tag): self._namespaces = dict() self.parse_only = parse_only - self.builder.initialize_soup(self) - if hasattr(markup, 'read'): # It's a file-type object. markup = markup.read() elif len(markup) <= 256 and ( (isinstance(markup, bytes) and not b'<' in markup) or (isinstance(markup, str) and not '<' in markup) ): - # Print out warnings for a couple beginner problems + # Issue warnings for a couple beginner problems # involving passing non-markup to Beautiful Soup. # Beautiful Soup will still parse the input as markup, - # just in case that's what the user really wants. - if (isinstance(markup, str) - and not os.path.supports_unicode_filenames): - possible_filename = markup.encode("utf8") - else: - possible_filename = markup - is_file = False - is_directory = False - try: - is_file = os.path.exists(possible_filename) - if is_file: - is_directory = os.path.isdir(possible_filename) - except Exception as e: - # This is almost certainly a problem involving - # characters not valid in filenames on this - # system. Just let it go. - pass - if is_directory: - warnings.warn( - '"%s" looks like a directory name, not markup. You may' - ' want to open a file found in this directory and pass' - ' the filehandle into Beautiful Soup.' % ( - self._decode_markup(markup) - ), - MarkupResemblesLocatorWarning - ) - elif is_file: - warnings.warn( - '"%s" looks like a filename, not markup. You should' - ' probably open this file and pass the filehandle into' - ' Beautiful Soup.' % self._decode_markup(markup), - MarkupResemblesLocatorWarning - ) - self._check_markup_is_url(markup) + # since that is sometimes the intended behavior. + if not self._markup_is_url(markup): + self._markup_resembles_filename(markup) rejections = [] success = False @@ -358,6 +328,7 @@ class BeautifulSoup(Tag): self.builder.prepare_markup( markup, from_encoding, exclude_encodings=exclude_encodings)): self.reset() + self.builder.initialize_soup(self) try: self._feed() success = True @@ -393,10 +364,10 @@ class BeautifulSoup(Tag): def __getstate__(self): # Frequently a tree builder can't be pickled. d = dict(self.__dict__) - if 'builder' in d and not self.builder.picklable: + if 'builder' in d and d['builder'] is not None and not self.builder.picklable: d['builder'] = None return d - + @classmethod def _decode_markup(cls, markup): """Ensure `markup` is bytes so it's safe to send into warnings.warn. @@ -411,11 +382,13 @@ class BeautifulSoup(Tag): return decoded @classmethod - def _check_markup_is_url(cls, markup): + def _markup_is_url(cls, markup): """Error-handling method to raise a warning if incoming markup looks like a URL. :param markup: A string. + :return: Whether or not the markup resembles a URL + closely enough to justify a warning. """ if isinstance(markup, bytes): space = b' ' @@ -424,20 +397,50 @@ class BeautifulSoup(Tag): space = ' ' cant_start_with = ("http:", "https:") else: - return + return False if any(markup.startswith(prefix) for prefix in cant_start_with): if not space in markup: warnings.warn( - '"%s" looks like a URL. Beautiful Soup is not an' - ' HTTP client. You should probably use an HTTP client like' - ' requests to get the document behind the URL, and feed' - ' that document to Beautiful Soup.' % cls._decode_markup( - markup - ), + 'The input looks more like a URL than markup. You may want to use' + ' an HTTP client like requests to get the document behind' + ' the URL, and feed that document to Beautiful Soup.', MarkupResemblesLocatorWarning ) + return True + return False + @classmethod + def _markup_resembles_filename(cls, markup): + """Error-handling method to raise a warning if incoming markup + resembles a filename. + + :param markup: A bytestring or string. + :return: Whether or not the markup resembles a filename + closely enough to justify a warning. + """ + path_characters = '/\\' + extensions = ['.html', '.htm', '.xml', '.xhtml', '.txt'] + if isinstance(markup, bytes): + path_characters = path_characters.encode("utf8") + extensions = [x.encode('utf8') for x in extensions] + filelike = False + if any(x in markup for x in path_characters): + filelike = True + else: + lower = markup.lower() + if any(lower.endswith(ext) for ext in extensions): + filelike = True + if filelike: + warnings.warn( + 'The input looks more like a filename than markup. You may' + ' want to open this file and pass the filehandle into' + ' Beautiful Soup.', + MarkupResemblesLocatorWarning + ) + return True + return False + def _feed(self): """Internal method that parses previously set markup, creating a large number of Tag and NavigableString objects. @@ -689,7 +692,7 @@ class BeautifulSoup(Tag): return most_recently_popped def handle_starttag(self, name, namespace, nsprefix, attrs, sourceline=None, - sourcepos=None): + sourcepos=None, namespaces=None): """Called by the tree builder when a new tag is encountered. :param name: Name of the tag. @@ -699,6 +702,8 @@ class BeautifulSoup(Tag): source document. :param sourcepos: The character position within `sourceline` where this tag was found. + :param namespaces: A dictionary of all namespace prefix mappings + currently in scope in the document. If this method returns None, the tag was rejected by an active SoupStrainer. You should proceed as if the tag had not occurred @@ -716,7 +721,8 @@ class BeautifulSoup(Tag): tag = self.element_classes.get(Tag, Tag)( self, self.builder, name, namespace, nsprefix, attrs, self.currentTag, self._most_recent_element, - sourceline=sourceline, sourcepos=sourcepos + sourceline=sourceline, sourcepos=sourcepos, + namespaces=namespaces ) if tag is None: return tag @@ -735,7 +741,7 @@ class BeautifulSoup(Tag): #print("End tag: " + name) self.endData() self._popToTag(name, nsprefix) - + def handle_data(self, data): """Called by the tree builder when a chunk of textual data is encountered.""" self.current_data.append(data) @@ -782,7 +788,9 @@ class BeautifulStoneSoup(BeautifulSoup): kwargs['features'] = 'xml' warnings.warn( 'The BeautifulStoneSoup class is deprecated. Instead of using ' - 'it, pass features="xml" into the BeautifulSoup constructor.') + 'it, pass features="xml" into the BeautifulSoup constructor.', + DeprecationWarning + ) super(BeautifulStoneSoup, self).__init__(*args, **kwargs) diff --git a/lib/bs4/builder/__init__.py b/lib/bs4/builder/__init__.py index bd44905e..9f789f3e 100644 --- a/lib/bs4/builder/__init__.py +++ b/lib/bs4/builder/__init__.py @@ -3,10 +3,14 @@ __license__ = "MIT" from collections import defaultdict import itertools +import re +import warnings import sys from bs4.element import ( CharsetMetaAttributeValue, ContentMetaAttributeValue, + RubyParenthesisString, + RubyTextString, Stylesheet, Script, TemplateString, @@ -28,6 +32,12 @@ XML = 'xml' HTML = 'html' HTML_5 = 'html5' +class XMLParsedAsHTMLWarning(UserWarning): + """The warning issued when an HTML parser is used to parse + XML that is not XHTML. + """ + MESSAGE = """It looks like you're parsing an XML document using an HTML parser. If this really is an HTML document (maybe it's XHTML?), you can ignore or filter this warning. If it's XML, you should know that using an XML parser will be more reliable. To parse this document as XML, make sure you have the lxml package installed, and pass the keyword argument `features="xml"` into the BeautifulSoup constructor.""" + class TreeBuilderRegistry(object): """A way of looking up TreeBuilder subclasses by their name or by desired @@ -319,7 +329,7 @@ class TreeBuilder(object): values = value attrs[attr] = values return attrs - + class SAXTreeBuilder(TreeBuilder): """A Beautiful Soup treebuilder that listens for SAX events. @@ -390,17 +400,25 @@ class HTMLTreeBuilder(TreeBuilder): # you need to use it. block_elements = set(["address", "article", "aside", "blockquote", "canvas", "dd", "div", "dl", "dt", "fieldset", "figcaption", "figure", "footer", "form", "h1", "h2", "h3", "h4", "h5", "h6", "header", "hr", "li", "main", "nav", "noscript", "ol", "output", "p", "pre", "section", "table", "tfoot", "ul", "video"]) - # The HTML standard defines an unusual content model for these tags. - # We represent this by using a string class other than NavigableString - # inside these tags. + # These HTML tags need special treatment so they can be + # represented by a string class other than NavigableString. # - # I made this list by going through the HTML spec + # For some of these tags, it's because the HTML standard defines + # an unusual content model for them. I made this list by going + # through the HTML spec # (https://html.spec.whatwg.org/#metadata-content) and looking for # "metadata content" elements that can contain strings. # + # The Ruby tags ( and ) are here despite being normal + # "phrasing content" tags, because the content they contain is + # qualitatively different from other text in the document, and it + # can be useful to be able to distinguish it. + # # TODO: Arguably