From 2f6869ed2ad55f50b364e2fb9d63ca5b791ed6cd Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 23 Dec 2022 11:10:39 -0800 Subject: [PATCH 001/113] Update CI badges in README * Ref: badges/shields#8671 [skip ci] --- README.md | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index fa35aab8..d38f69e1 100644 --- a/README.md +++ b/README.md @@ -57,24 +57,24 @@ Read the [Installation Guides][Installation] for instructions on how to install [badge-release-nightly-last-commit]: https://img.shields.io/github/last-commit/Tautulli/Tautulli/nightly?style=flat-square&color=blue [badge-release-nightly-commits]: https://img.shields.io/github/commits-since/Tautulli/Tautulli/latest/nightly?style=flat-square&color=blue [badge-docker-master]: https://img.shields.io/badge/docker-latest-blue?style=flat-square -[badge-docker-master-ci]: https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Docker/master?style=flat-square +[badge-docker-master-ci]: https://img.shields.io/github/actions/workflow/status/Tautulli/Tautulli/.github/workflows/publish-docker.yml?style=flat-square&branch=master [badge-docker-beta]: https://img.shields.io/badge/docker-beta-blue?style=flat-square -[badge-docker-beta-ci]: https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Docker/beta?style=flat-square +[badge-docker-beta-ci]: https://img.shields.io/github/actions/workflow/status/Tautulli/Tautulli/.github/workflows/publish-docker.yml?style=flat-square&branch=beta [badge-docker-nightly]: https://img.shields.io/badge/docker-nightly-blue?style=flat-square -[badge-docker-nightly-ci]: https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Docker/nightly?style=flat-square +[badge-docker-nightly-ci]: https://img.shields.io/github/actions/workflow/status/Tautulli/Tautulli/.github/workflows/publish-docker.yml?style=flat-square&branch=nightly [badge-snap-master]: https://img.shields.io/badge/snap-stable-blue?style=flat-square -[badge-snap-master-ci]: https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Snap/master?style=flat-square +[badge-snap-master-ci]: https://img.shields.io/github/actions/workflow/status/Tautulli/Tautulli/.github/workflows/publish-snap.yml?style=flat-square&branch=master [badge-snap-beta]: https://img.shields.io/badge/snap-beta-blue?style=flat-square -[badge-snap-beta-ci]: https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Snap/beta?style=flat-square +[badge-snap-beta-ci]: https://img.shields.io/github/actions/workflow/status/Tautulli/Tautulli/.github/workflows/publish-snap.yml?style=flat-square&branch=beta [badge-snap-nightly]: https://img.shields.io/badge/snap-edge-blue?style=flat-square -[badge-snap-nightly-ci]: https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Snap/nightly?style=flat-square +[badge-snap-nightly-ci]: https://img.shields.io/github/actions/workflow/status/Tautulli/Tautulli/.github/workflows/publish-snap.yml?style=flat-square&branch=nightly [badge-installer-master-win]: https://img.shields.io/github/v/release/Tautulli/Tautulli?label=windows&style=flat-square [badge-installer-master-macos]: https://img.shields.io/github/v/release/Tautulli/Tautulli?label=macos&style=flat-square -[badge-installer-master-ci]: https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Installers/master?style=flat-square +[badge-installer-master-ci]: https://img.shields.io/github/actions/workflow/status/Tautulli/Tautulli/.github/workflows/publish-installers.yml?style=flat-square&branch=master [badge-installer-beta-win]: https://img.shields.io/github/v/release/Tautulli/Tautulli?label=windows&include_prereleases&style=flat-square [badge-installer-beta-macos]: https://img.shields.io/github/v/release/Tautulli/Tautulli?label=macos&include_prereleases&style=flat-square -[badge-installer-beta-ci]: https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Installers/beta?style=flat-square -[badge-installer-nightly-ci]: https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Installers/nightly?style=flat-square +[badge-installer-beta-ci]: https://img.shields.io/github/actions/workflow/status/Tautulli/Tautulli/.github/workflows/publish-installers.yml?style=flat-square&branch=beta +[badge-installer-nightly-ci]: https://img.shields.io/github/actions/workflow/status/Tautulli/Tautulli/.github/workflows/publish-installers.yml?style=flat-square&branch=nightly ## Support From 460a463be11386e2777f488f60e6d437672f283c Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 19 Jan 2023 13:54:55 -0800 Subject: [PATCH 002/113] Update notification parameter description for Plex API image paths --- plexpy/common.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/plexpy/common.py b/plexpy/common.py index 33b4cc00..3e56a4c9 100644 --- a/plexpy/common.py +++ b/plexpy/common.py @@ -600,11 +600,11 @@ NOTIFICATION_PARAMETERS = [ {'name': 'Rating Key', 'type': 'int', 'value': 'rating_key', 'description': 'The unique identifier for the movie, episode, or track.'}, {'name': 'Parent Rating Key', 'type': 'int', 'value': 'parent_rating_key', 'description': 'The unique identifier for the season or album.'}, {'name': 'Grandparent Rating Key', 'type': 'int', 'value': 'grandparent_rating_key', 'description': 'The unique identifier for the TV show or artist.'}, - {'name': 'Art', 'type': 'str', 'value': 'art', 'description': 'The Plex background art for the media.'}, - {'name': 'Thumb', 'type': 'str', 'value': 'thumb', 'description': 'The Plex thumbnail for the movie or episode.'}, - {'name': 'Parent Thumb', 'type': 'str', 'value': 'parent_thumb', 'description': 'The Plex thumbnail for the season or album.'}, - {'name': 'Grandparent Thumb', 'type': 'str', 'value': 'grandparent_thumb', 'description': 'The Plex thumbnail for the TV show or artist.'}, - {'name': 'Poster Thumb', 'type': 'str', 'value': 'poster_thumb', 'description': 'The Plex thumbnail for the poster image.'}, + {'name': 'Art', 'type': 'str', 'value': 'art', 'description': 'The Plex API path to the background art for the media.'}, + {'name': 'Thumb', 'type': 'str', 'value': 'thumb', 'description': 'The Plex API path to the thumbnail for the movie or episode.'}, + {'name': 'Parent Thumb', 'type': 'str', 'value': 'parent_thumb', 'description': 'The Plex API path to the thumbnail for the season or album.'}, + {'name': 'Grandparent Thumb', 'type': 'str', 'value': 'grandparent_thumb', 'description': 'The Plex API path to the thumbnail for the TV show or artist.'}, + {'name': 'Poster Thumb', 'type': 'str', 'value': 'poster_thumb', 'description': 'The Plex API path to the thumbnail for the poster image.'}, {'name': 'Poster Title', 'type': 'str', 'value': 'poster_title', 'description': 'The title for the poster image.'}, {'name': 'Indexes', 'type': 'int', 'value': 'indexes', 'description': 'If the media has video preview thumbnails.', 'example': '0 or 1'}, ] From 548264d51a905bd80d418e0bdb34266b6da32a8c Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 20 Jan 2023 17:05:30 -0800 Subject: [PATCH 003/113] Add prvenance: false to docker/build-push-action Ref: docker/buildx#1533 --- .github/workflows/publish-docker.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/publish-docker.yml b/.github/workflows/publish-docker.yml index 125cae51..37885c2f 100644 --- a/.github/workflows/publish-docker.yml +++ b/.github/workflows/publish-docker.yml @@ -87,6 +87,7 @@ jobs: ghcr.io/${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.tag }} cache-from: type=local,src=/tmp/.buildx-cache cache-to: type=local,dest=/tmp/.buildx-cache + provenance: false discord: name: Discord Notification From 1e02c26a9ad1fa3c36040e9a6c3cd58179739d0c Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 20 Jan 2023 17:09:03 -0800 Subject: [PATCH 004/113] Relax workflow action versions --- .github/workflows/publish-docker.yml | 4 ++-- .github/workflows/publish-installers.yml | 4 ++-- .github/workflows/publish-snap.yml | 2 +- .github/workflows/pull-requests.yml | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/publish-docker.yml b/.github/workflows/publish-docker.yml index 37885c2f..85ce6266 100644 --- a/.github/workflows/publish-docker.yml +++ b/.github/workflows/publish-docker.yml @@ -13,7 +13,7 @@ jobs: if: ${{ !contains(github.event.head_commit.message, '[skip ci]') }} steps: - name: Checkout Code - uses: actions/checkout@v3.2.0 + uses: actions/checkout@v3 - name: Prepare id: prepare @@ -47,7 +47,7 @@ jobs: version: latest - name: Cache Docker Layers - uses: actions/cache@v3.2.0 + uses: actions/cache@v3 with: path: /tmp/.buildx-cache key: ${{ runner.os }}-buildx-${{ github.sha }} diff --git a/.github/workflows/publish-installers.yml b/.github/workflows/publish-installers.yml index 3b271a1b..31ad0f81 100644 --- a/.github/workflows/publish-installers.yml +++ b/.github/workflows/publish-installers.yml @@ -24,7 +24,7 @@ jobs: steps: - name: Checkout Code - uses: actions/checkout@v3.2.0 + uses: actions/checkout@v3 - name: Set Release Version id: get_version @@ -52,7 +52,7 @@ jobs: echo $GITHUB_SHA > version.txt - name: Set Up Python - uses: actions/setup-python@v4.4.0 + uses: actions/setup-python@v4 with: python-version: '3.9' cache: pip diff --git a/.github/workflows/publish-snap.yml b/.github/workflows/publish-snap.yml index 7ad8fe95..9df4d2fd 100644 --- a/.github/workflows/publish-snap.yml +++ b/.github/workflows/publish-snap.yml @@ -20,7 +20,7 @@ jobs: - armhf steps: - name: Checkout Code - uses: actions/checkout@v3.2.0 + uses: actions/checkout@v3 - name: Prepare id: prepare diff --git a/.github/workflows/pull-requests.yml b/.github/workflows/pull-requests.yml index d7c8e45d..58cb4ee4 100644 --- a/.github/workflows/pull-requests.yml +++ b/.github/workflows/pull-requests.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Code - uses: actions/checkout@v3.2.0 + uses: actions/checkout@v3 - name: Comment on Pull Request uses: mshick/add-pr-comment@v2 From 0959f28e957ef119e5abbd083796c650862501c4 Mon Sep 17 00:00:00 2001 From: herby2212 <12448284+herby2212@users.noreply.github.com> Date: Sat, 28 Jan 2023 23:09:11 +0100 Subject: [PATCH 005/113] Add edition detail field for movie info (#1957) * edition addition for movie info * swap position to match plex order --- data/interfaces/default/info.html | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/data/interfaces/default/info.html b/data/interfaces/default/info.html index 6d8b3aaf..a7acb11b 100644 --- a/data/interfaces/default/info.html +++ b/data/interfaces/default/info.html @@ -14,6 +14,7 @@ rating_key Returns the unique identifier for the media item. media_type Returns the type of media. Either 'movie', 'show', 'season', 'episode', 'artist', 'album', or 'track'. art Returns the location of the item's artwork title Returns the name of the movie, show, episode, artist, album, or track. +edition_title Returns the edition title of a movie. duration Returns the standard runtime of the media. content_rating Returns the age rating for the media. summary Returns a brief description of the media plot. @@ -390,6 +391,11 @@ DOCUMENTATION :: END Runtime ${data['duration']} % endif + % if data['edition_title']: +
+ Edition ${data['edition_title']} +
+ % endif
% if data['content_rating']: Rated ${data['content_rating']} From c51ee673e83e4ed2fe143f3ef8466575070e31ff Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sat, 28 Jan 2023 13:29:12 -0800 Subject: [PATCH 006/113] Add support for Telegram group topics * Closes #1980 --- plexpy/notifiers.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py index fbd48c4b..75f810d8 100644 --- a/plexpy/notifiers.py +++ b/plexpy/notifiers.py @@ -3962,7 +3962,10 @@ class TELEGRAM(Notifier): } def agent_notify(self, subject='', body='', action='', **kwargs): - data = {'chat_id': self.config['chat_id']} + chat_id, *message_thread_id = self.config['chat_id'].split('/') + data = {'chat_id': chat_id} + if message_thread_id: + data['message_thread_id'] = message_thread_id[0] if self.config['incl_subject']: text = subject + '\r\n' + body @@ -4032,7 +4035,8 @@ class TELEGRAM(Notifier): 'description': 'Your Telegram Chat ID, Group ID, Channel ID or @channelusername. ' 'Contact @myidbot' - ' on Telegram to get an ID.', + ' on Telegram to get an ID. ' + 'For a group topic, append /topicID to the group ID.', 'input_type': 'text' }, {'label': 'Include Subject Line', From b0a55df8620e37795e6a5e202fdd8b10e4d5338a Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sat, 28 Jan 2023 13:43:42 -0800 Subject: [PATCH 007/113] Add anidb_id and anidb_url notification parameters * Closes #1973 --- plexpy/common.py | 2 ++ plexpy/notification_handler.py | 6 ++++++ 2 files changed, 8 insertions(+) diff --git a/plexpy/common.py b/plexpy/common.py index 3e56a4c9..ad91ee87 100644 --- a/plexpy/common.py +++ b/plexpy/common.py @@ -553,6 +553,8 @@ NOTIFICATION_PARAMETERS = [ {'name': 'TVmaze URL', 'type': 'str', 'value': 'tvmaze_url', 'description': 'The TVmaze URL for the TV show.'}, {'name': 'MusicBrainz ID', 'type': 'str', 'value': 'musicbrainz_id', 'description': 'The MusicBrainz ID for the artist, album, or track.', 'example': 'e.g. b670dfcf-9824-4309-a57e-03595aaba286'}, {'name': 'MusicBrainz URL', 'type': 'str', 'value': 'musicbrainz_url', 'description': 'The MusicBrainz URL for the artist, album, or track.'}, + {'name': 'AniDB ID', 'type': 'str', 'value': 'anidb_id', 'description': 'The AniDB ID for the Anime', 'example': 'e.g. 69', 'help_text': 'TV show library agent must be HAMA'}, + {'name': 'AniDB URL', 'type': 'str', 'value': 'anidb_url', 'description': 'The AniDB URL for the Anime', 'help_text': 'TV show library agent must be HAMA'}, {'name': 'Last.fm URL', 'type': 'str', 'value': 'lastfm_url', 'description': 'The Last.fm URL for the album.', 'help_text': 'Music library agent must be Last.fm'}, {'name': 'Trakt.tv URL', 'type': 'str', 'value': 'trakt_url', 'description': 'The trakt.tv URL for the movie or TV show.'}, {'name': 'Container', 'type': 'str', 'value': 'container', 'description': 'The media container of the original media.'}, diff --git a/plexpy/notification_handler.py b/plexpy/notification_handler.py index 57f5ff83..6b55c5b7 100644 --- a/plexpy/notification_handler.py +++ b/plexpy/notification_handler.py @@ -715,6 +715,10 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m else: notify_params['musicbrainz_url'] = 'https://musicbrainz.org/track/' + notify_params['musicbrainz_id'] + if 'hama://' in notify_params['guid']: + notify_params['anidb_id'] = notify_params['guid'].split('hama://')[1].split('/')[0].split('?')[0].split('-')[1] + notify_params['anidb_url'] = 'https://anidb.net/anime/' + notify_params['anidb_id'] + # Get TheMovieDB info (for movies and tv only) if plexpy.CONFIG.THEMOVIEDB_LOOKUP and notify_params['media_type'] in ('movie', 'show', 'season', 'episode'): if notify_params.get('themoviedb_id'): @@ -1142,6 +1146,8 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m 'tvmaze_url': notify_params['tvmaze_url'], 'musicbrainz_id': notify_params['musicbrainz_id'], 'musicbrainz_url': notify_params['musicbrainz_url'], + 'anidb_id': notify_params['anidb_id'], + 'anidb_url': notify_params['anidb_url'], 'lastfm_url': notify_params['lastfm_url'], 'trakt_url': notify_params['trakt_url'], 'container': notify_params['container'], From b6ff45138f29763b92fa59646c7499cfd3d28c41 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sat, 28 Jan 2023 14:04:45 -0800 Subject: [PATCH 008/113] Add section_id and user_id parameters to get_home_stats API command * Closes #1944 --- plexpy/datafactory.py | 49 ++++++++++++++++++++++++------------------- plexpy/webserve.py | 9 ++++++-- 2 files changed, 35 insertions(+), 23 deletions(-) diff --git a/plexpy/datafactory.py b/plexpy/datafactory.py index 700c14c6..cf55a2c0 100644 --- a/plexpy/datafactory.py +++ b/plexpy/datafactory.py @@ -349,7 +349,8 @@ class DataFactory(object): return dict def get_home_stats(self, grouping=None, time_range=30, stats_type='plays', - stats_start=0, stats_count=10, stat_id='', stats_cards=None): + stats_start=0, stats_count=10, stat_id='', stats_cards=None, + section_id=None, user_id=None): monitor_db = database.MonitorDatabase() time_range = helpers.cast_to_int(time_range) @@ -364,6 +365,12 @@ class DataFactory(object): if stats_cards is None: stats_cards = plexpy.CONFIG.HOME_STATS_CARDS + where_id = '' + if section_id: + where_id += 'AND session_history.section_id = %s ' % section_id + if user_id: + where_id += 'AND session_history.user_id = %s ' % user_id + movie_watched_percent = plexpy.CONFIG.MOVIE_WATCHED_PERCENT tv_watched_percent = plexpy.CONFIG.TV_WATCHED_PERCENT music_watched_percent = plexpy.CONFIG.MUSIC_WATCHED_PERCENT @@ -385,12 +392,12 @@ class DataFactory(object): ' AS d ' \ ' FROM session_history ' \ ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type = "movie" ' \ + ' AND session_history.media_type = "movie" %s ' \ ' GROUP BY %s) AS sh ' \ 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ 'GROUP BY shm.full_title, shm.year ' \ 'ORDER BY %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, group_by, sort_type, stats_count, stats_start) + 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_movies: %s." % e) @@ -438,12 +445,12 @@ class DataFactory(object): ' AS d ' \ ' FROM session_history ' \ ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type = "movie" ' \ + ' AND session_history.media_type = "movie" %s ' \ ' GROUP BY %s) AS sh ' \ 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ 'GROUP BY shm.full_title, shm.year ' \ 'ORDER BY users_watched DESC, %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, group_by, sort_type, stats_count, stats_start) + 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_movies: %s." % e) @@ -490,12 +497,12 @@ class DataFactory(object): ' AS d ' \ ' FROM session_history ' \ ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type = "episode" ' \ + ' AND session_history.media_type = "episode" %s ' \ ' GROUP BY %s) AS sh ' \ 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ 'GROUP BY shm.grandparent_title ' \ 'ORDER BY %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, group_by, sort_type, stats_count, stats_start) + 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_tv: %s." % e) @@ -545,12 +552,12 @@ class DataFactory(object): ' AS d ' \ ' FROM session_history ' \ ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type = "episode" ' \ + ' AND session_history.media_type = "episode" %s ' \ ' GROUP BY %s) AS sh ' \ 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ 'GROUP BY shm.grandparent_title ' \ 'ORDER BY users_watched DESC, %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, group_by, sort_type, stats_count, stats_start) + 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_tv: %s." % e) @@ -596,12 +603,12 @@ class DataFactory(object): ' AS d ' \ ' FROM session_history ' \ ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type = "track" ' \ + ' AND session_history.media_type = "track" %s ' \ ' GROUP BY %s) AS sh ' \ 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ 'GROUP BY shm.original_title, shm.grandparent_title ' \ 'ORDER BY %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, group_by, sort_type, stats_count, stats_start) + 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_music: %s." % e) @@ -650,12 +657,12 @@ class DataFactory(object): ' AS d ' \ ' FROM session_history ' \ ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type = "track" ' \ + ' AND session_history.media_type = "track" %s ' \ ' GROUP BY %s) AS sh ' \ 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ 'GROUP BY shm.original_title, shm.grandparent_title ' \ 'ORDER BY users_watched DESC, %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, group_by, sort_type, stats_count, stats_start) + 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_music: %s." % e) @@ -706,14 +713,14 @@ class DataFactory(object): ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ ' AS d ' \ ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s ' \ + ' WHERE session_history.stopped >= %s %s ' \ ' GROUP BY %s) AS sh ' \ 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ 'LEFT OUTER JOIN (SELECT * FROM library_sections WHERE deleted_section = 0) ' \ ' AS ls ON sh.section_id = ls.section_id ' \ 'GROUP BY sh.section_id ' \ 'ORDER BY %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, group_by, sort_type, stats_count, stats_start) + 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_libraries: %s." % e) @@ -793,13 +800,13 @@ class DataFactory(object): ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ ' AS d ' \ ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s ' \ + ' WHERE session_history.stopped >= %s %s ' \ ' GROUP BY %s) AS sh ' \ 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ 'LEFT OUTER JOIN users AS u ON sh.user_id = u.user_id ' \ 'GROUP BY sh.user_id ' \ 'ORDER BY %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, group_by, sort_type, stats_count, stats_start) + 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_users: %s." % e) @@ -862,11 +869,11 @@ class DataFactory(object): ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ ' AS d ' \ ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s ' \ + ' WHERE session_history.stopped >= %s %s ' \ ' GROUP BY %s) AS sh ' \ 'GROUP BY sh.platform ' \ 'ORDER BY %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, group_by, sort_type, stats_count, stats_start) + 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_platforms: %s." % e) @@ -918,7 +925,7 @@ class DataFactory(object): 'FROM (SELECT *, MAX(id) FROM session_history ' \ ' WHERE session_history.stopped >= %s ' \ ' AND (session_history.media_type = "movie" ' \ - ' OR session_history.media_type = "episode") ' \ + ' OR session_history.media_type = "episode") %s ' \ ' GROUP BY %s) AS sh ' \ 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ 'LEFT OUTER JOIN users AS u ON sh.user_id = u.user_id ' \ @@ -926,7 +933,7 @@ class DataFactory(object): ' OR sh.media_type == "episode" AND percent_complete >= %s ' \ 'GROUP BY sh.id ' \ 'ORDER BY last_watch DESC ' \ - 'LIMIT %s OFFSET %s' % (timestamp, group_by, movie_watched_percent, tv_watched_percent, + 'LIMIT %s OFFSET %s' % (timestamp, where_id, group_by, movie_watched_percent, tv_watched_percent, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: diff --git a/plexpy/webserve.py b/plexpy/webserve.py index 76c44243..8ad3e664 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -6185,7 +6185,8 @@ class WebInterface(object): @requireAuth(member_of("admin")) @addtoapi() def get_home_stats(self, grouping=None, time_range=30, stats_type='plays', - stats_start=0, stats_count=10, stat_id='', **kwargs): + stats_start=0, stats_count=10, stat_id='', + section_id=None, user_id=None, **kwargs): """ Get the homepage watch statistics. ``` @@ -6201,6 +6202,8 @@ class WebInterface(object): stat_id (str): A single stat to return, 'top_movies', 'popular_movies', 'top_tv', 'popular_tv', 'top_music', 'popular_music', 'top_libraries', 'top_users', 'top_platforms', 'last_watched', 'most_concurrent' + section_id (int): The id of the Plex library section + user_id (int): The id of the Plex user Returns: json: @@ -6282,7 +6285,9 @@ class WebInterface(object): stats_type=stats_type, stats_start=stats_start, stats_count=stats_count, - stat_id=stat_id) + stat_id=stat_id, + section_id=section_id, + user_id=user_id) if result: return result From 5ab9315f1601850c04bff5858429799da0148ee8 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Tue, 14 Feb 2023 18:19:35 -0800 Subject: [PATCH 009/113] Upload notification images directly to Discord --- plexpy/notifiers.py | 31 +++++++++++++++++++++++-------- 1 file changed, 23 insertions(+), 8 deletions(-) diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py index 75f810d8..3e721515 100644 --- a/plexpy/notifiers.py +++ b/plexpy/notifiers.py @@ -892,6 +892,15 @@ class PrettyMetadata(object): parameters[''] = '' return parameters + def get_image(self): + result = pmsconnect.PmsConnect().get_image(img=self.parameters.get('poster_thumb', '')) + if result and result[0]: + poster_content = result[0] + poster_filename = 'poster_{}.png'.format(self.parameters['rating_key']) + return (poster_filename, poster_content, 'image/png') + + logger.error("Tautulli Notifiers :: Unable to retrieve image for notification.") + class Notifier(object): NAME = '' @@ -1117,10 +1126,16 @@ class DISCORD(Notifier): if self.config['tts']: data['tts'] = True + files = {} + if self.config['incl_card'] and kwargs.get('parameters', {}).get('media_type'): # Grab formatted metadata pretty_metadata = PrettyMetadata(kwargs['parameters']) + image = pretty_metadata.get_image() + if image: + files = {'files[0]': image} + if pretty_metadata.media_type == 'movie': provider = self.config['movie_provider'] elif pretty_metadata.media_type in ('show', 'season', 'episode'): @@ -1150,9 +1165,9 @@ class DISCORD(Notifier): attachment['color'] = helpers.hex_to_int(hex) if self.config['incl_thumbnail']: - attachment['thumbnail'] = {'url': poster_url} + attachment['thumbnail'] = {'url': 'attachment://{}'.format(image[0]) if image else poster_url} else: - attachment['image'] = {'url': poster_url} + attachment['image'] = {'url': 'attachment://{}'.format(image[0]) if image else poster_url} if self.config['incl_description']: attachment['description'] = description[:2045] + (description[2045:] and '...') @@ -1172,10 +1187,13 @@ class DISCORD(Notifier): data['embeds'] = [attachment] - headers = {'Content-type': 'application/json'} params = {'wait': True} - return self.make_request(self.config['hook'], params=params, headers=headers, json=data) + if files: + files['payload_json'] = (None, json.dumps(data), 'application/json') + return self.make_request(self.config['hook'], params=params, files=files) + else: + return self.make_request(self.config['hook'], params=params, json=data) def _return_config_options(self): config_option = [{'label': 'Discord Webhook URL', @@ -1217,10 +1235,7 @@ class DISCORD(Notifier): {'label': 'Include Rich Metadata Info', 'value': self.config['incl_card'], 'name': 'discord_incl_card', - 'description': 'Include an info card with a poster and metadata with the notifications.
' - 'Note: Image Hosting ' - 'must be enabled under the 3rd Party APIs settings tab.', + 'description': 'Include an info card with a poster and metadata with the notifications.', 'input_type': 'checkbox' }, {'label': 'Include Summary', From e263f0b8a3a40f92820a6a230f5a377eef817a5c Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Tue, 14 Feb 2023 18:21:11 -0800 Subject: [PATCH 010/113] Refactor notification image upload code --- plexpy/notifiers.py | 57 +++++++++++---------------------------------- 1 file changed, 13 insertions(+), 44 deletions(-) diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py index 3e721515..e3ef6862 100644 --- a/plexpy/notifiers.py +++ b/plexpy/notifiers.py @@ -1807,19 +1807,12 @@ class GROUPME(Notifier): if self.config['incl_poster'] and kwargs.get('parameters'): pretty_metadata = PrettyMetadata(kwargs.get('parameters')) - # Retrieve the poster from Plex - result = pmsconnect.PmsConnect().get_image(img=pretty_metadata.parameters.get('poster_thumb','')) - if result and result[0]: - poster_content = result[0] - else: - poster_content = '' - logger.error("Tautulli Notifiers :: Unable to retrieve image for {name}.".format(name=self.NAME)) - - if poster_content: + image = pretty_metadata.get_image() + if image: headers = {'X-Access-Token': self.config['access_token'], 'Content-Type': 'image/png'} - r = requests.post('https://image.groupme.com/pictures', headers=headers, data=poster_content) + r = requests.post('https://image.groupme.com/pictures', headers=headers, data=image[1]) if r.status_code == 200: logger.info("Tautulli Notifiers :: {name} poster sent.".format(name=self.NAME)) @@ -3042,18 +3035,10 @@ class PUSHBULLET(Notifier): # Grab formatted metadata pretty_metadata = PrettyMetadata(kwargs['parameters']) - # Retrieve the poster from Plex - result = pmsconnect.PmsConnect().get_image(img=pretty_metadata.parameters.get('poster_thumb', '')) - if result and result[0]: - poster_content = result[0] - else: - poster_content = '' - logger.error("Tautulli Notifiers :: Unable to retrieve image for {name}.".format(name=self.NAME)) - - if poster_content: - poster_filename = 'poster_{}.png'.format(pretty_metadata.parameters['rating_key']) - file_json = {'file_name': poster_filename, 'file_type': 'image/png'} - files = {'file': (poster_filename, poster_content, 'image/png')} + image = pretty_metadata.get_image() + if image: + file_json = {'file_name': image[0], 'file_type': image[2]} + files = {'file': image} r = requests.post('https://api.pushbullet.com/v2/upload-request', headers=headers, json=file_json) @@ -3199,17 +3184,9 @@ class PUSHOVER(Notifier): # Grab formatted metadata pretty_metadata = PrettyMetadata(kwargs['parameters']) - # Retrieve the poster from Plex - result = pmsconnect.PmsConnect().get_image(img=pretty_metadata.parameters.get('poster_thumb', '')) - if result and result[0]: - poster_content = result[0] - else: - poster_content = '' - logger.error("Tautulli Notifiers :: Unable to retrieve image for {name}.".format(name=self.NAME)) - - if poster_content: - poster_filename = 'poster_{}.png'.format(pretty_metadata.parameters['rating_key']) - files = {'attachment': (poster_filename, poster_content, 'image/png')} + image = pretty_metadata.get_image() + if image: + files = {'attachment': image} headers = {} return self.make_request('https://api.pushover.net/1/messages.json', headers=headers, data=data, files=files) @@ -3994,17 +3971,9 @@ class TELEGRAM(Notifier): # Grab formatted metadata pretty_metadata = PrettyMetadata(kwargs['parameters']) - # Retrieve the poster from Plex - result = pmsconnect.PmsConnect().get_image(img=pretty_metadata.parameters.get('poster_thumb', '')) - if result and result[0]: - poster_content = result[0] - else: - poster_content = '' - logger.error("Tautulli Notifiers :: Unable to retrieve image for {name}.".format(name=self.NAME)) - - if poster_content: - poster_filename = 'poster_{}.png'.format(pretty_metadata.parameters['rating_key']) - files = {'photo': (poster_filename, poster_content, 'image/png')} + image = pretty_metadata.get_image() + if image: + files = {'photo': image} if len(text) > 1024: data['disable_notification'] = True From 0db9548995303d5669e06d5111ed074a486583dd Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 20 Jan 2023 17:05:30 -0800 Subject: [PATCH 011/113] Revert "Add prvenance: false to docker/build-push-action" This reverts commit 548264d51a905bd80d418e0bdb34266b6da32a8c. --- .github/workflows/publish-docker.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/publish-docker.yml b/.github/workflows/publish-docker.yml index 85ce6266..8cb8267f 100644 --- a/.github/workflows/publish-docker.yml +++ b/.github/workflows/publish-docker.yml @@ -87,7 +87,6 @@ jobs: ghcr.io/${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.tag }} cache-from: type=local,src=/tmp/.buildx-cache cache-to: type=local,dest=/tmp/.buildx-cache - provenance: false discord: name: Discord Notification From a8539b29272995e0cf6ade177923c26119838dc3 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 16 Feb 2023 11:29:18 -0800 Subject: [PATCH 012/113] Retrieve intro/credits markers for metadata details --- plexpy/pmsconnect.py | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/plexpy/pmsconnect.py b/plexpy/pmsconnect.py index 11142873..3a7a2450 100644 --- a/plexpy/pmsconnect.py +++ b/plexpy/pmsconnect.py @@ -140,7 +140,7 @@ class PmsConnect(object): Output: array """ - uri = '/library/metadata/' + rating_key + uri = '/library/metadata/' + rating_key + '?includeMarkers=1' request = self.request_handler.make_request(uri=uri, request_type='GET', output_format=output_format) @@ -745,6 +745,7 @@ class PmsConnect(object): labels = [] collections = [] guids = [] + markers = [] if metadata_main.getElementsByTagName('Director'): for director in metadata_main.getElementsByTagName('Director'): @@ -774,6 +775,16 @@ class PmsConnect(object): for guid in metadata_main.getElementsByTagName('Guid'): guids.append(helpers.get_xml_attr(guid, 'id')) + if metadata_main.getElementsByTagName('Marker'): + for marker in metadata_main.getElementsByTagName('Marker'): + markers.append({ + 'id': helpers.get_xml_attr(marker, 'id'), + 'type': helpers.get_xml_attr(marker, 'type'), + 'start_time_offset': helpers.cast_to_int(helpers.get_xml_attr(marker, 'startTimeOffset')), + 'end_time_offset': helpers.cast_to_int(helpers.get_xml_attr(marker, 'endTimeOffset')), + 'final': helpers.bool_true(helpers.get_xml_attr(marker, 'final')) + }) + if metadata_type == 'movie': metadata = {'media_type': metadata_type, 'section_id': section_id, @@ -821,6 +832,7 @@ class PmsConnect(object): 'labels': labels, 'collections': collections, 'guids': guids, + 'markers': markers, 'parent_guids': [], 'grandparent_guids': [], 'full_title': helpers.get_xml_attr(metadata_main, 'title'), @@ -880,6 +892,7 @@ class PmsConnect(object): 'labels': labels, 'collections': collections, 'guids': guids, + 'markers': markers, 'parent_guids': [], 'grandparent_guids': [], 'full_title': helpers.get_xml_attr(metadata_main, 'title'), @@ -942,6 +955,7 @@ class PmsConnect(object): 'labels': show_details.get('labels', []), 'collections': show_details.get('collections', []), 'guids': guids, + 'markers': markers, 'parent_guids': show_details.get('guids', []), 'grandparent_guids': [], 'full_title': '{} - {}'.format(helpers.get_xml_attr(metadata_main, 'parentTitle'), @@ -1021,6 +1035,7 @@ class PmsConnect(object): 'labels': show_details.get('labels', []), 'collections': show_details.get('collections', []), 'guids': guids, + 'markers': markers, 'parent_guids': season_details.get('guids', []), 'grandparent_guids': show_details.get('guids', []), 'full_title': '{} - {}'.format(helpers.get_xml_attr(metadata_main, 'grandparentTitle'), @@ -1076,6 +1091,7 @@ class PmsConnect(object): 'labels': labels, 'collections': collections, 'guids': guids, + 'markers': markers, 'parent_guids': [], 'grandparent_guids': [], 'full_title': helpers.get_xml_attr(metadata_main, 'title'), @@ -1132,6 +1148,7 @@ class PmsConnect(object): 'labels': labels, 'collections': collections, 'guids': guids, + 'markers': markers, 'parent_guids': artist_details.get('guids', []), 'grandparent_guids': [], 'full_title': '{} - {}'.format(helpers.get_xml_attr(metadata_main, 'parentTitle'), @@ -1191,6 +1208,7 @@ class PmsConnect(object): 'labels': album_details.get('labels', []), 'collections': album_details.get('collections', []), 'guids': guids, + 'markers': markers, 'parent_guids': album_details.get('guids', []), 'grandparent_guids': album_details.get('parent_guids', []), 'full_title': '{} - {}'.format(helpers.get_xml_attr(metadata_main, 'title'), @@ -1246,6 +1264,7 @@ class PmsConnect(object): 'labels': labels, 'collections': collections, 'guids': guids, + 'markers': markers, 'parent_guids': [], 'grandparent_guids': [], 'full_title': helpers.get_xml_attr(metadata_main, 'title'), @@ -1302,6 +1321,7 @@ class PmsConnect(object): 'labels': photo_album_details.get('labels', []), 'collections': photo_album_details.get('collections', []), 'guids': [], + 'markers': markers, 'parent_guids': photo_album_details.get('guids', []), 'grandparent_guids': [], 'full_title': '{} - {}'.format(helpers.get_xml_attr(metadata_main, 'parentTitle') or library_name, @@ -1361,6 +1381,7 @@ class PmsConnect(object): 'labels': labels, 'collections': collections, 'guids': guids, + 'markers': markers, 'parent_guids': [], 'grandparent_guids': [], 'full_title': helpers.get_xml_attr(metadata_main, 'title'), @@ -1435,6 +1456,7 @@ class PmsConnect(object): 'labels': labels, 'collections': collections, 'guids': guids, + 'markers': markers, 'parent_guids': [], 'grandparent_guids': [], 'full_title': helpers.get_xml_attr(metadata_main, 'title'), From 9a152932ee8c81341123749d61de8b1987390a0d Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 16 Feb 2023 11:29:47 -0800 Subject: [PATCH 013/113] Monitor stream intro/credits marker activity --- plexpy/__init__.py | 15 +- plexpy/activity_handler.py | 380 ++++++++++++++++++----------------- plexpy/activity_processor.py | 12 +- 3 files changed, 226 insertions(+), 181 deletions(-) diff --git a/plexpy/__init__.py b/plexpy/__init__.py index d27cd396..003259bc 100644 --- a/plexpy/__init__.py +++ b/plexpy/__init__.py @@ -656,7 +656,8 @@ def dbcheck(): 'synced_version INTEGER, synced_version_profile TEXT, ' 'live INTEGER, live_uuid TEXT, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT, ' 'secure INTEGER, relayed INTEGER, ' - 'buffer_count INTEGER DEFAULT 0, buffer_last_triggered INTEGER, last_paused INTEGER, watched INTEGER DEFAULT 0, ' + 'buffer_count INTEGER DEFAULT 0, buffer_last_triggered INTEGER, last_paused INTEGER, ' + 'watched INTEGER DEFAULT 0, intro INTEGER DEFAULT 0, credits INTEGER DEFAULT 0, ' 'initial_stream INTEGER DEFAULT 1, write_attempts INTEGER DEFAULT 0, raw_stream_info TEXT, ' 'rating_key_websocket TEXT)' ) @@ -1401,6 +1402,18 @@ def dbcheck(): 'ALTER TABLE sessions ADD COLUMN stream_subtitle_forced INTEGER' ) + # Upgrade sessions table from earlier versions + try: + c_db.execute('SELECT intro FROM sessions') + except sqlite3.OperationalError: + logger.debug(u"Altering database. Updating database table sessions.") + c_db.execute( + 'ALTER TABLE sessions ADD COLUMN intro INTEGER DEFAULT 0' + ) + c_db.execute( + 'ALTER TABLE sessions ADD COLUMN credits INTEGER DEFAULT 0' + ) + # Upgrade session_history table from earlier versions try: c_db.execute('SELECT reference_id FROM session_history') diff --git a/plexpy/activity_handler.py b/plexpy/activity_handler.py index 07d0f8e3..a89ccb98 100644 --- a/plexpy/activity_handler.py +++ b/plexpy/activity_handler.py @@ -51,7 +51,11 @@ RECENTLY_ADDED_QUEUE = {} class ActivityHandler(object): def __init__(self, timeline): + self.ap = activity_processor.ActivityProcessor() self.timeline = timeline + self.db_session = None + self.session = None + self.metadata = None def is_valid_session(self): if 'sessionKey' in self.timeline: @@ -72,15 +76,18 @@ class ActivityHandler(object): return None + def get_db_session(self): + # Retrieve the session data from our temp table + self.db_session = self.ap.get_session_by_key(session_key=self.get_session_key()) + def get_metadata(self, skip_cache=False): - cache_key = None if skip_cache else self.get_session_key() - pms_connect = pmsconnect.PmsConnect() - metadata = pms_connect.get_metadata_details(rating_key=self.get_rating_key(), cache_key=cache_key) + if self.metadata is None: + cache_key = None if skip_cache else self.get_session_key() + pms_connect = pmsconnect.PmsConnect() + metadata = pms_connect.get_metadata_details(rating_key=self.get_rating_key(), cache_key=cache_key) - if metadata: - return metadata - - return None + if metadata: + self.metadata = metadata def get_live_session(self, skip_cache=False): pms_connect = pmsconnect.PmsConnect() @@ -94,196 +101,179 @@ class ActivityHandler(object): if not session['rating_key']: session['rating_key'] = self.get_rating_key() session['rating_key_websocket'] = self.get_rating_key() + self.session = session return session - return None + def update_db_session(self, notify=False): + if self.session is None: + self.get_live_session() - def update_db_session(self, session=None, notify=False): - if session is None: - session = self.get_live_session() - - if session: + if self.session: # Update our session temp table values - ap = activity_processor.ActivityProcessor() - ap.write_session(session=session, notify=notify) + self.ap.write_session(session=self.session, notify=notify) self.set_session_state() + self.get_db_session() def set_session_state(self): - ap = activity_processor.ActivityProcessor() - ap.set_session_state(session_key=self.get_session_key(), + self.ap.set_session_state(session_key=self.get_session_key(), state=self.timeline['state'], view_offset=self.timeline['viewOffset'], stopped=helpers.timestamp()) + + def put_notification(self, notify_action, **kwargs): + notification = {'stream_data': self.db_session.copy(), 'notify_action': notify_action} + notification.update(kwargs) + plexpy.NOTIFY_QUEUE.put(notification) def on_start(self): - if self.is_valid_session(): - session = self.get_live_session(skip_cache=True) + self.get_live_session(skip_cache=True) - if not session: + if not self.session: + return + + # Some DLNA clients create a new session temporarily when browsing the library + # Wait and get session again to make sure it is an actual session + if self.session['platform'] == 'DLNA': + time.sleep(1) + self.get_live_session() + if not self.session: return - # Some DLNA clients create a new session temporarily when browsing the library - # Wait and get session again to make sure it is an actual session - if session['platform'] == 'DLNA': - time.sleep(1) - session = self.get_live_session() - if not session: - return + logger.debug("Tautulli ActivityHandler :: Session %s started by user %s (%s) with ratingKey %s (%s)%s." + % (str(self.session['session_key']), str(self.session['user_id']), self.session['username'], + str(self.session['rating_key']), self.session['full_title'], '[Live TV]' if self.session['live'] else '')) - logger.debug("Tautulli ActivityHandler :: Session %s started by user %s (%s) with ratingKey %s (%s)%s." - % (str(session['session_key']), str(session['user_id']), session['username'], - str(session['rating_key']), session['full_title'], '[Live TV]' if session['live'] else '')) + # Write the new session to our temp session table + self.update_db_session(notify=True) - # Send notification after updating db - #plexpy.NOTIFY_QUEUE.put({'stream_data': session.copy(), 'notify_action': 'on_play'}) - - # Write the new session to our temp session table - self.update_db_session(session=session, notify=True) - - # Schedule a callback to force stop a stale stream 5 minutes later - schedule_callback('session_key-{}'.format(self.get_session_key()), - func=force_stop_stream, - args=[self.get_session_key(), session['full_title'], session['username']], - minutes=5) + # Schedule a callback to force stop a stale stream 5 minutes later + schedule_callback('session_key-{}'.format(self.get_session_key()), + func=force_stop_stream, + args=[self.get_session_key(), self.session['full_title'], self.session['username']], + minutes=5) + + self.check_markers() def on_stop(self, force_stop=False): - if self.is_valid_session(): - logger.debug("Tautulli ActivityHandler :: Session %s %sstopped." - % (str(self.get_session_key()), 'force ' if force_stop else '')) + logger.debug("Tautulli ActivityHandler :: Session %s %sstopped." + % (str(self.get_session_key()), 'force ' if force_stop else '')) - # Set the session last_paused timestamp - ap = activity_processor.ActivityProcessor() - ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=None) + # Set the session last_paused timestamp + self.ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=None) - # Update the session state and viewOffset - # Set force_stop to true to disable the state set - if not force_stop: - self.set_session_state() + # Update the session state and viewOffset + # Set force_stop to true to disable the state set + if not force_stop: + self.set_session_state() - # Retrieve the session data from our temp table - db_session = ap.get_session_by_key(session_key=self.get_session_key()) + # Write it to the history table + row_id = self.ap.write_session_history(session=self.db_session) - # Write it to the history table - monitor_proc = activity_processor.ActivityProcessor() - row_id = monitor_proc.write_session_history(session=db_session) + if row_id: + self.put_notification('on_stop') - if row_id: - plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_stop'}) + schedule_callback('session_key-{}'.format(self.get_session_key()), remove_job=True) - schedule_callback('session_key-{}'.format(self.get_session_key()), remove_job=True) - - # Remove the session from our temp session table - logger.debug("Tautulli ActivityHandler :: Removing sessionKey %s ratingKey %s from session queue" - % (str(self.get_session_key()), str(self.get_rating_key()))) - ap.delete_session(row_id=row_id) - delete_metadata_cache(self.get_session_key()) - else: - schedule_callback('session_key-{}'.format(self.get_session_key()), - func=force_stop_stream, - args=[self.get_session_key(), db_session['full_title'], db_session['user']], - seconds=30) + # Remove the session from our temp session table + logger.debug("Tautulli ActivityHandler :: Removing sessionKey %s ratingKey %s from session queue" + % (str(self.get_session_key()), str(self.get_rating_key()))) + self.ap.delete_session(row_id=row_id) + delete_metadata_cache(self.get_session_key()) + else: + schedule_callback('session_key-{}'.format(self.get_session_key()), + func=force_stop_stream, + args=[self.get_session_key(), self.db_session['full_title'], self.db_session['user']], + seconds=30) def on_pause(self, still_paused=False): - if self.is_valid_session(): - if not still_paused: - logger.debug("Tautulli ActivityHandler :: Session %s paused." % str(self.get_session_key())) + if not still_paused: + logger.debug("Tautulli ActivityHandler :: Session %s paused." % str(self.get_session_key())) - # Set the session last_paused timestamp - ap = activity_processor.ActivityProcessor() - ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=helpers.timestamp()) + # Set the session last_paused timestamp + self.ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=helpers.timestamp()) - # Update the session state and viewOffset - self.update_db_session() + self.update_db_session() - # Retrieve the session data from our temp table - db_session = ap.get_session_by_key(session_key=self.get_session_key()) - - if not still_paused: - plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_pause'}) + if not still_paused: + self.put_notification('on_pause') def on_resume(self): - if self.is_valid_session(): - logger.debug("Tautulli ActivityHandler :: Session %s resumed." % str(self.get_session_key())) + logger.debug("Tautulli ActivityHandler :: Session %s resumed." % str(self.get_session_key())) - # Set the session last_paused timestamp - ap = activity_processor.ActivityProcessor() - ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=None) + # Set the session last_paused timestamp + self.ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=None) - # Update the session state and viewOffset - self.update_db_session() + self.update_db_session() - # Retrieve the session data from our temp table - db_session = ap.get_session_by_key(session_key=self.get_session_key()) - - plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_resume'}) - - def on_change(self): - if self.is_valid_session(): - logger.debug("Tautulli ActivityHandler :: Session %s has changed transcode decision." % str(self.get_session_key())) - - # Update the session state and viewOffset - self.update_db_session() - - # Retrieve the session data from our temp table - ap = activity_processor.ActivityProcessor() - db_session = ap.get_session_by_key(session_key=self.get_session_key()) - - plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_change'}) + self.put_notification('on_resume') def on_buffer(self): - if self.is_valid_session(): - logger.debug("Tautulli ActivityHandler :: Session %s is buffering." % self.get_session_key()) - ap = activity_processor.ActivityProcessor() - db_stream = ap.get_session_by_key(session_key=self.get_session_key()) + logger.debug("Tautulli ActivityHandler :: Session %s is buffering." % self.get_session_key()) - # Increment our buffer count - ap.increment_session_buffer_count(session_key=self.get_session_key()) + # Increment our buffer count + self.ap.increment_session_buffer_count(session_key=self.get_session_key()) - # Get our current buffer count - current_buffer_count = ap.get_session_buffer_count(self.get_session_key()) - logger.debug("Tautulli ActivityHandler :: Session %s buffer count is %s." % - (self.get_session_key(), current_buffer_count)) + # Get our current buffer count + current_buffer_count = self.ap.get_session_buffer_count(self.get_session_key()) + logger.debug("Tautulli ActivityHandler :: Session %s buffer count is %s." % + (self.get_session_key(), current_buffer_count)) - # Get our last triggered time - buffer_last_triggered = ap.get_session_buffer_trigger_time(self.get_session_key()) + # Get our last triggered time + buffer_last_triggered = self.ap.get_session_buffer_trigger_time(self.get_session_key()) - # Update the session state and viewOffset - self.update_db_session() + self.update_db_session() - time_since_last_trigger = 0 - if buffer_last_triggered: - logger.debug("Tautulli ActivityHandler :: Session %s buffer last triggered at %s." % - (self.get_session_key(), buffer_last_triggered)) - time_since_last_trigger = helpers.timestamp() - int(buffer_last_triggered) + time_since_last_trigger = 0 + if buffer_last_triggered: + logger.debug("Tautulli ActivityHandler :: Session %s buffer last triggered at %s." % + (self.get_session_key(), buffer_last_triggered)) + time_since_last_trigger = helpers.timestamp() - int(buffer_last_triggered) - if current_buffer_count >= plexpy.CONFIG.BUFFER_THRESHOLD and time_since_last_trigger == 0 or \ - time_since_last_trigger >= plexpy.CONFIG.BUFFER_WAIT: - ap.set_session_buffer_trigger_time(session_key=self.get_session_key()) + if current_buffer_count >= plexpy.CONFIG.BUFFER_THRESHOLD and time_since_last_trigger == 0 or \ + time_since_last_trigger >= plexpy.CONFIG.BUFFER_WAIT: + self.ap.set_session_buffer_trigger_time(session_key=self.get_session_key()) - # Retrieve the session data from our temp table - db_session = ap.get_session_by_key(session_key=self.get_session_key()) - - plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_buffer'}) + self.put_notification('on_buffer') def on_error(self): - if self.is_valid_session(): - logger.debug("Tautulli ActivityHandler :: Session %s encountered an error." % str(self.get_session_key())) + logger.debug("Tautulli ActivityHandler :: Session %s encountered an error." % str(self.get_session_key())) - # Update the session state and viewOffset - self.update_db_session() + self.update_db_session() - # Retrieve the session data from our temp table - ap = activity_processor.ActivityProcessor() - db_session = ap.get_session_by_key(session_key=self.get_session_key()) + self.put_notification('on_error') - plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_error'}) + def on_change(self): + logger.debug("Tautulli ActivityHandler :: Session %s has changed transcode decision." % str(self.get_session_key())) + + self.update_db_session() + + self.put_notification('on_change') + + def on_intro(self): + if self.get_live_session(): + logger.debug("Tautulli ActivityHandler :: Session %s intro marker reached." % str(self.get_session_key())) + + self.put_notification('on_intro') + + def on_credits(self): + if self.get_live_session(): + logger.debug("Tautulli ActivityHandler :: Session %s credits marker reached." % str(self.get_session_key())) + self.put_notification('on_credits') + + def on_watched(self): + logger.debug("Tautulli ActivityHandler :: Session %s watched." % str(self.get_session_key())) + + watched_notifiers = notification_handler.get_notify_state_enabled( + session=self.db_session, notify_action='on_watched', notified=False) + + for d in watched_notifiers: + self.put_notification('on_watched', notifier_id=d['notifier_id']) # This function receives events from our websocket connection def process(self): if self.is_valid_session(): - ap = activity_processor.ActivityProcessor() - db_session = ap.get_session_by_key(session_key=self.get_session_key()) + self.get_db_session() this_state = self.timeline['state'] this_rating_key = str(self.timeline['ratingKey']) @@ -294,27 +284,27 @@ class ActivityHandler(object): this_live_uuid = this_key.split('/')[-1] if this_key.startswith('/livetv/sessions') else None # If we already have this session in the temp table, check for state changes - if db_session: + if self.db_session: # Re-schedule the callback to reset the 5 minutes timer schedule_callback('session_key-{}'.format(self.get_session_key()), func=force_stop_stream, - args=[self.get_session_key(), db_session['full_title'], db_session['user']], + args=[self.get_session_key(), self.db_session['full_title'], self.db_session['user']], minutes=5) - last_state = db_session['state'] - last_rating_key = str(db_session['rating_key']) - last_live_uuid = db_session['live_uuid'] - last_transcode_key = db_session['transcode_key'].split('/')[-1] - last_paused = db_session['last_paused'] - last_rating_key_websocket = db_session['rating_key_websocket'] - last_guid = db_session['guid'] + last_state = self.db_session['state'] + last_rating_key = str(self.db_session['rating_key']) + last_live_uuid = self.db_session['live_uuid'] + last_transcode_key = self.db_session['transcode_key'].split('/')[-1] + last_paused = self.db_session['last_paused'] + last_rating_key_websocket = self.db_session['rating_key_websocket'] + last_guid = self.db_session['guid'] this_guid = last_guid # Check guid for live TV metadata every 60 seconds - if db_session['live'] and helpers.timestamp() - db_session['stopped'] > 60: - metadata = self.get_metadata(skip_cache=True) - if metadata: - this_guid = metadata['guid'] + if self.db_session['live'] and helpers.timestamp() - self.db_session['stopped'] > 60: + self.get_metadata(skip_cache=True) + if self.metadata: + this_guid = self.metadata['guid'] # Make sure the same item is being played if (this_rating_key == last_rating_key @@ -325,7 +315,7 @@ class ActivityHandler(object): if this_state == 'playing': # Update the session in our temp session table # if the last set temporary stopped time exceeds 60 seconds - if helpers.timestamp() - db_session['stopped'] > 60: + if helpers.timestamp() - self.db_session['stopped'] > 60: self.update_db_session() # Start our state checks @@ -356,33 +346,65 @@ class ActivityHandler(object): self.on_stop(force_stop=True) self.on_start() - # Monitor if the stream has reached the watch percentage for notifications - # The only purpose of this is for notifications - if not db_session['watched'] and this_state != 'buffering': - progress_percent = helpers.get_percent(self.timeline['viewOffset'], db_session['duration']) - watched_percent = {'movie': plexpy.CONFIG.MOVIE_WATCHED_PERCENT, - 'episode': plexpy.CONFIG.TV_WATCHED_PERCENT, - 'track': plexpy.CONFIG.MUSIC_WATCHED_PERCENT, - 'clip': plexpy.CONFIG.TV_WATCHED_PERCENT - } - - if progress_percent >= watched_percent.get(db_session['media_type'], 101): - logger.debug("Tautulli ActivityHandler :: Session %s watched." - % str(self.get_session_key())) - ap.set_watched(session_key=self.get_session_key()) - - watched_notifiers = notification_handler.get_notify_state_enabled( - session=db_session, notify_action='on_watched', notified=False) - - for d in watched_notifiers: - plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), - 'notifier_id': d['notifier_id'], - 'notify_action': 'on_watched'}) + # Check for stream offset notifications + self.check_markers() + self.check_watched() else: # We don't have this session in our table yet, start a new one. if this_state != 'buffering': self.on_start() + + def check_markers(self): + # Monitor if the stream has reached the intro or credit marker offsets + self.get_metadata() + + intro_markers, credits_markers = [], [] + for marker in self.metadata['markers']: + if marker['type'] == 'intro': + intro_markers.append(marker) + elif marker['type'] == 'credits': + credits_markers.append(marker) + + self._check_marker('intro', intro_markers) + self._check_marker('credits', credits_markers) + + def _check_marker(self, marker_type, markers): + if self.db_session[marker_type] < len(markers): + marker = markers[self.db_session[marker_type]] + + # Websocket events only fire every 10 seconds + # Check if the marker is within 10 seconds of the current viewOffset + if marker['start_time_offset'] - 10000 <= self.timeline['viewOffset'] <= marker['end_time_offset']: + set_func = getattr(self.ap, 'set_{}'.format(marker_type)) + callback_func = getattr(self, 'on_{}'.format(marker_type)) + + set_func(session_key=self.get_session_key()) + + if self.timeline['viewOffset'] < marker['start_time_offset']: + # Schedule a callback for the exact offset of the marker + schedule_callback( + 'session_key-{}-{}-{}'.format(self.get_session_key(), marker_type, self.db_session[marker_type]), + func=callback_func, + milliseconds=marker['start_time_offset'] - self.timeline['viewOffset'] + ) + else: + callback_func() + + def check_watched(self): + # Monitor if the stream has reached the watch percentage for notifications + if not self.db_session['watched'] and self.timeline['state'] != 'buffering': + progress_percent = helpers.get_percent(self.timeline['viewOffset'], self.db_session['duration']) + watched_percent = { + 'movie': plexpy.CONFIG.MOVIE_WATCHED_PERCENT, + 'episode': plexpy.CONFIG.TV_WATCHED_PERCENT, + 'track': plexpy.CONFIG.MUSIC_WATCHED_PERCENT, + 'clip': plexpy.CONFIG.TV_WATCHED_PERCENT + } + + if progress_percent >= watched_percent.get(self.db_session['media_type'], 101): + self.ap.set_watched(session_key=self.get_session_key()) + self.on_watched() class TimelineHandler(object): diff --git a/plexpy/activity_processor.py b/plexpy/activity_processor.py index 4608e4c8..e110ea64 100644 --- a/plexpy/activity_processor.py +++ b/plexpy/activity_processor.py @@ -660,8 +660,18 @@ class ActivityProcessor(object): self.db.action('UPDATE sessions SET write_attempts = ? WHERE session_key = ?', [session['write_attempts'] + 1, session_key]) + def set_intro(self, session_key=None): + self.db.action('UPDATE sessions SET intro = intro + 1 ' + 'WHERE session_key = ?', + [session_key]) + + def set_credits(self, session_key=None): + self.db.action('UPDATE sessions SET credits = credits + 1 ' + 'WHERE session_key = ?', + [session_key]) + def set_watched(self, session_key=None): - self.db.action('UPDATE sessions SET watched = ?' + self.db.action('UPDATE sessions SET watched = ? ' 'WHERE session_key = ?', [1, session_key]) From 71bc0631559e6630610afd570afc2ec7551c12bc Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 16 Feb 2023 11:31:35 -0800 Subject: [PATCH 014/113] Add notification triggers for intro/credit markers --- plexpy/__init__.py | 29 ++++++++++++++++++++++++++++- plexpy/common.py | 8 ++++++++ plexpy/notification_handler.py | 24 ++++++++++++++++++++++++ plexpy/notifiers.py | 16 ++++++++++++++++ 4 files changed, 76 insertions(+), 1 deletion(-) diff --git a/plexpy/__init__.py b/plexpy/__init__.py index 003259bc..873b46f0 100644 --- a/plexpy/__init__.py +++ b/plexpy/__init__.py @@ -754,19 +754,22 @@ def dbcheck(): 'agent_id INTEGER, agent_name TEXT, agent_label TEXT, friendly_name TEXT, notifier_config TEXT, ' 'on_play INTEGER DEFAULT 0, on_stop INTEGER DEFAULT 0, on_pause INTEGER DEFAULT 0, ' 'on_resume INTEGER DEFAULT 0, on_change INTEGER DEFAULT 0, on_buffer INTEGER DEFAULT 0, ' - 'on_error INTEGER DEFAULT 0, on_watched INTEGER DEFAULT 0, on_created INTEGER DEFAULT 0, ' + 'on_error INTEGER DEFAULT 0, on_intro INTEGER DEFAULT 0, on_credits INTEGER DEFAULT 0, ' + 'on_watched INTEGER DEFAULT 0, on_created INTEGER DEFAULT 0, ' 'on_extdown INTEGER DEFAULT 0, on_intdown INTEGER DEFAULT 0, ' 'on_extup INTEGER DEFAULT 0, on_intup INTEGER DEFAULT 0, on_pmsupdate INTEGER DEFAULT 0, ' 'on_concurrent INTEGER DEFAULT 0, on_newdevice INTEGER DEFAULT 0, on_plexpyupdate INTEGER DEFAULT 0, ' 'on_plexpydbcorrupt INTEGER DEFAULT 0, ' 'on_play_subject TEXT, on_stop_subject TEXT, on_pause_subject TEXT, ' 'on_resume_subject TEXT, on_change_subject TEXT, on_buffer_subject TEXT, on_error_subject TEXT, ' + 'on_intro_subject TEXT, on_credits_subject TEXT, ' 'on_watched_subject TEXT, on_created_subject TEXT, on_extdown_subject TEXT, on_intdown_subject TEXT, ' 'on_extup_subject TEXT, on_intup_subject TEXT, on_pmsupdate_subject TEXT, ' 'on_concurrent_subject TEXT, on_newdevice_subject TEXT, on_plexpyupdate_subject TEXT, ' 'on_plexpydbcorrupt_subject TEXT, ' 'on_play_body TEXT, on_stop_body TEXT, on_pause_body TEXT, ' 'on_resume_body TEXT, on_change_body TEXT, on_buffer_body TEXT, on_error_body TEXT, ' + 'on_intro_body TEXT, on_credits_body TEXT, ' 'on_watched_body TEXT, on_created_body TEXT, on_extdown_body TEXT, on_intdown_body TEXT, ' 'on_extup_body TEXT, on_intup_body TEXT, on_pmsupdate_body TEXT, ' 'on_concurrent_body TEXT, on_newdevice_body TEXT, on_plexpyupdate_body TEXT, ' @@ -2384,6 +2387,30 @@ def dbcheck(): 'ALTER TABLE notifiers ADD COLUMN on_error_body TEXT' ) + # Upgrade notifiers table from earlier versions + try: + c_db.execute('SELECT on_intro FROM notifiers') + except sqlite3.OperationalError: + logger.debug("Altering database. Updating database table notifiers.") + c_db.execute( + 'ALTER TABLE notifiers ADD COLUMN on_intro INTEGER DEFAULT 0' + ) + c_db.execute( + 'ALTER TABLE notifiers ADD COLUMN on_intro_subject TEXT' + ) + c_db.execute( + 'ALTER TABLE notifiers ADD COLUMN on_intro_body TEXT' + ) + c_db.execute( + 'ALTER TABLE notifiers ADD COLUMN on_credits INTEGER DEFAULT 0' + ) + c_db.execute( + 'ALTER TABLE notifiers ADD COLUMN on_credits_subject TEXT' + ) + c_db.execute( + 'ALTER TABLE notifiers ADD COLUMN on_credits_body TEXT' + ) + # Upgrade tvmaze_lookup table from earlier versions try: c_db.execute('SELECT rating_key FROM tvmaze_lookup') diff --git a/plexpy/common.py b/plexpy/common.py index ad91ee87..65de4810 100644 --- a/plexpy/common.py +++ b/plexpy/common.py @@ -416,6 +416,7 @@ NOTIFICATION_PARAMETERS = [ {'name': 'Progress Duration (sec)', 'type': 'int', 'value': 'progress_duration_sec', 'description': 'The last reported offset (in seconds) of the stream.'}, {'name': 'Progress Time', 'type': 'str', 'value': 'progress_time', 'description': 'The last reported offset (in time format) of the stream.'}, {'name': 'Progress Percent', 'type': 'int', 'value': 'progress_percent', 'description': 'The last reported progress percent of the stream.'}, + {'name': 'View Offset (ms)', 'type': 'int', 'value': 'view_offset', 'description': 'The current view offset (in milliseconds) for the stream.'}, {'name': 'Transcode Decision', 'type': 'str', 'value': 'transcode_decision', 'description': 'The transcode decision of the stream.'}, {'name': 'Container Decision', 'type': 'str', 'value': 'container_decision', 'description': 'The container transcode decision of the stream.'}, {'name': 'Video Decision', 'type': 'str', 'value': 'video_decision', 'description': 'The video transcode decision of the stream.'}, @@ -426,6 +427,12 @@ NOTIFICATION_PARAMETERS = [ {'name': 'Optimized Version Profile', 'type': 'str', 'value': 'optimized_version_profile', 'description': 'The optimized version profile of the stream.'}, {'name': 'Synced Version', 'type': 'int', 'value': 'synced_version', 'description': 'If the stream is an synced version.', 'example': '0 or 1'}, {'name': 'Live', 'type': 'int', 'value': 'live', 'description': 'If the stream is live TV.', 'example': '0 or 1'}, + {'name': 'Intro Marker Start Time', 'type': 'int', 'value': 'intro_marker_start', 'description': 'The intro marker start time offset in milliseconds.'}, + {'name': 'Intro Marker End Time', 'type': 'int', 'value': 'intro_marker_end', 'description': 'The intro marker end time offset in milliseconds.'}, + {'name': 'Credits Marker First', 'type': 'int', 'value': 'credits_marker_first', 'description': 'If the credits marker is the first marker.', 'example': '0 or 1'}, + {'name': 'Credits Marker Final', 'type': 'int', 'value': 'credits_marker_final', 'description': 'If the credits marker is the final marker.', 'example': '0 or 1'}, + {'name': 'Credits Marker Start Time', 'type': 'int', 'value': 'credits_marker_start', 'description': 'The credits marker start time offset in milliseconds.'}, + {'name': 'Credits Marker End Time', 'type': 'int', 'value': 'credits_marker_end', 'description': 'The credits marker end time offset in milliseconds.'}, {'name': 'Channel Call Sign', 'type': 'str', 'value': 'channel_call_sign', 'description': 'The Live TV channel call sign.'}, {'name': 'Channel Identifier', 'type': 'str', 'value': 'channel_identifier', 'description': 'The Live TV channel number.'}, {'name': 'Channel Thumb', 'type': 'str', 'value': 'channel_thumb', 'description': 'The URL for the Live TV channel logo.'}, @@ -540,6 +547,7 @@ NOTIFICATION_PARAMETERS = [ {'name': 'User Rating', 'type': 'float', 'value': 'user_rating', 'description': 'The user (star) rating (out of 10) for the item.'}, {'name': 'Duration', 'type': 'int', 'value': 'duration', 'description': 'The duration (in minutes) for the item.'}, {'name': 'Duration (sec)', 'type': 'int', 'value': 'duration_sec', 'description': 'The duration (in seconds) for the item.'}, + {'name': 'Duration (ms)', 'type': 'int', 'value': 'duration_ms', 'description': 'The duration (in milliseconds) for the item.'}, {'name': 'Poster URL', 'type': 'str', 'value': 'poster_url', 'description': 'A URL for the movie, TV show, or album poster.'}, {'name': 'Plex ID', 'type': 'str', 'value': 'plex_id', 'description': 'The Plex ID for the item.', 'example': 'e.g. 5d7769a9594b2b001e6a6b7e'}, {'name': 'Plex URL', 'type': 'str', 'value': 'plex_url', 'description': 'The Plex URL to your server for the item.'}, diff --git a/plexpy/notification_handler.py b/plexpy/notification_handler.py index 6b55c5b7..b9940b70 100644 --- a/plexpy/notification_handler.py +++ b/plexpy/notification_handler.py @@ -583,6 +583,8 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m notify_params.update(media_info) notify_params.update(media_part_info) + metadata = pmsconnect.PmsConnect().get_metadata_details(rating_key=rating_key) + child_metadata = grandchild_metadata = [] for key in kwargs.pop('child_keys', []): child = pmsconnect.PmsConnect().get_metadata_details(rating_key=key) @@ -938,6 +940,20 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m and audience_rating: audience_rating = helpers.get_percent(notify_params['audience_rating'], 10) + intro_markers, credits_markers = [], [] + for marker in metadata['markers']: + if marker['type'] == 'intro': + intro_markers.append(marker) + elif marker['type'] == 'credits': + credits_markers.append(marker) + + intro_marker = defaultdict(int) + credits_marker = defaultdict(int) + if notify_action == 'on_intro' and intro_markers and notify_params['intro'] < len(intro_markers): + intro_marker = intro_markers[notify_params['intro']] + if notify_action == 'on_credits' and credits_markers and notify_params['credits'] < len(credits_markers): + credits_marker = credits_markers[notify_params['credits']] + now = arrow.now() now_iso = now.isocalendar() @@ -1005,6 +1021,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m 'progress_duration_sec': view_offset_sec, 'progress_time': arrow.get(view_offset_sec).format(duration_format), 'progress_percent': helpers.get_percent(view_offset_sec, duration_sec), + 'view_offset': session.get('view_offset', 0), 'initial_stream': notify_params['initial_stream'], 'transcode_decision': transcode_decision, 'container_decision': notify_params['container_decision'], @@ -1016,6 +1033,12 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m 'optimized_version_profile': notify_params['optimized_version_profile'], 'synced_version': notify_params['synced_version'], 'live': notify_params['live'], + 'intro_marker_start': intro_marker['start_time_offset'], + 'intro_marker_end': intro_marker['end_time_offset'], + 'credits_marker_first': int(bool(credits_marker and notify_params['credits'] == 0)), + 'credits_marker_final': int(credits_marker['final']), + 'credits_marker_start': credits_marker['start_time_offset'], + 'credits_marker_end': credits_marker['end_time_offset'], 'channel_call_sign': notify_params['channel_call_sign'], 'channel_identifier': notify_params['channel_identifier'], 'channel_thumb': notify_params['channel_thumb'], @@ -1132,6 +1155,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m 'user_rating': notify_params['user_rating'], 'duration': duration, 'duration_sec': duration_sec, + 'duration_ms': notify_params['duration'], 'poster_title': notify_params['poster_title'], 'poster_url': notify_params['poster_url'], 'plex_id': notify_params['plex_id'], diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py index e3ef6862..383f4908 100644 --- a/plexpy/notifiers.py +++ b/plexpy/notifiers.py @@ -340,6 +340,22 @@ def available_notification_actions(agent_id=None): 'icon': 'fa-exchange-alt', 'media_types': ('movie', 'episode', 'track') }, + {'label': 'Intro Marker', + 'name': 'on_intro', + 'description': 'Trigger a notification when a video stream reaches any intro marker.', + 'subject': 'Tautulli ({server_name})', + 'body': '{user} ({player}) has reached an intro marker for {title}.', + 'icon': 'fa-bookmark', + 'media_types': ('episode',) + }, + {'label': 'Credits Marker', + 'name': 'on_credits', + 'description': 'Trigger a notification when a video stream reaches any credits marker.', + 'subject': 'Tautulli ({server_name})', + 'body': '{user} ({player}) has reached a credits marker for {title}.', + 'icon': 'fa-bookmark', + 'media_types': ('movie', 'episode') + }, {'label': 'Watched', 'name': 'on_watched', 'description': 'Trigger a notification when a video stream reaches the specified watch percentage.', From 97af214ac1bcff2de971a3c2bdeb9ff1b4a4edb1 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 16 Feb 2023 16:19:29 -0800 Subject: [PATCH 015/113] Handle seeking through intro/credits markers --- plexpy/__init__.py | 11 +++++++- plexpy/activity_handler.py | 54 +++++++++++++++++------------------- plexpy/activity_processor.py | 17 ++++++------ 3 files changed, 44 insertions(+), 38 deletions(-) diff --git a/plexpy/__init__.py b/plexpy/__init__.py index 873b46f0..09c16586 100644 --- a/plexpy/__init__.py +++ b/plexpy/__init__.py @@ -657,7 +657,7 @@ def dbcheck(): 'live INTEGER, live_uuid TEXT, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT, ' 'secure INTEGER, relayed INTEGER, ' 'buffer_count INTEGER DEFAULT 0, buffer_last_triggered INTEGER, last_paused INTEGER, ' - 'watched INTEGER DEFAULT 0, intro INTEGER DEFAULT 0, credits INTEGER DEFAULT 0, ' + 'watched INTEGER DEFAULT 0, intro INTEGER DEFAULT 0, credits INTEGER DEFAULT 0, marker INTEGER DEFAULT 0, ' 'initial_stream INTEGER DEFAULT 1, write_attempts INTEGER DEFAULT 0, raw_stream_info TEXT, ' 'rating_key_websocket TEXT)' ) @@ -1417,6 +1417,15 @@ def dbcheck(): 'ALTER TABLE sessions ADD COLUMN credits INTEGER DEFAULT 0' ) + # Upgrade sessions table from earlier versions + try: + c_db.execute('SELECT marker FROM sessions') + except sqlite3.OperationalError: + logger.debug(u"Altering database. Updating database table sessions.") + c_db.execute( + 'ALTER TABLE sessions ADD COLUMN marker INTEGER DEFAULT 0' + ) + # Upgrade session_history table from earlier versions try: c_db.execute('SELECT reference_id FROM session_history') diff --git a/plexpy/activity_handler.py b/plexpy/activity_handler.py index a89ccb98..3b5a283f 100644 --- a/plexpy/activity_handler.py +++ b/plexpy/activity_handler.py @@ -250,16 +250,16 @@ class ActivityHandler(object): self.put_notification('on_change') - def on_intro(self): + def on_intro(self, marker): if self.get_live_session(): logger.debug("Tautulli ActivityHandler :: Session %s intro marker reached." % str(self.get_session_key())) - self.put_notification('on_intro') + self.put_notification('on_intro', marker=marker) - def on_credits(self): + def on_credits(self, marker): if self.get_live_session(): logger.debug("Tautulli ActivityHandler :: Session %s credits marker reached." % str(self.get_session_key())) - self.put_notification('on_credits') + self.put_notification('on_credits', marker=marker) def on_watched(self): logger.debug("Tautulli ActivityHandler :: Session %s watched." % str(self.get_session_key())) @@ -359,37 +359,33 @@ class ActivityHandler(object): # Monitor if the stream has reached the intro or credit marker offsets self.get_metadata() - intro_markers, credits_markers = [], [] - for marker in self.metadata['markers']: - if marker['type'] == 'intro': - intro_markers.append(marker) - elif marker['type'] == 'credits': - credits_markers.append(marker) - - self._check_marker('intro', intro_markers) - self._check_marker('credits', credits_markers) - - def _check_marker(self, marker_type, markers): - if self.db_session[marker_type] < len(markers): - marker = markers[self.db_session[marker_type]] + marker_flag = False + for marker_idx, marker in enumerate(self.metadata['markers'], start=1): # Websocket events only fire every 10 seconds # Check if the marker is within 10 seconds of the current viewOffset if marker['start_time_offset'] - 10000 <= self.timeline['viewOffset'] <= marker['end_time_offset']: - set_func = getattr(self.ap, 'set_{}'.format(marker_type)) - callback_func = getattr(self, 'on_{}'.format(marker_type)) + marker_flag = True - set_func(session_key=self.get_session_key()) + if self.db_session['marker'] != marker_idx: + self.ap.set_marker(session_key=self.get_session_key(), marker_idx=marker_idx, marker_type=marker['type']) + callback_func = getattr(self, 'on_{}'.format(marker['type'])) - if self.timeline['viewOffset'] < marker['start_time_offset']: - # Schedule a callback for the exact offset of the marker - schedule_callback( - 'session_key-{}-{}-{}'.format(self.get_session_key(), marker_type, self.db_session[marker_type]), - func=callback_func, - milliseconds=marker['start_time_offset'] - self.timeline['viewOffset'] - ) - else: - callback_func() + if self.timeline['viewOffset'] < marker['start_time_offset']: + # Schedule a callback for the exact offset of the marker + schedule_callback( + 'session_key-{}-marker-{}'.format(self.get_session_key(), marker_idx), + func=callback_func, + args=[marker], + milliseconds=marker['start_time_offset'] - self.timeline['viewOffset'] + ) + else: + callback_func(marker) + + break + + if not marker_flag: + self.ap.set_marker(session_key=self.get_session_key(), marker_idx=0) def check_watched(self): # Monitor if the stream has reached the watch percentage for notifications diff --git a/plexpy/activity_processor.py b/plexpy/activity_processor.py index e110ea64..c821d23d 100644 --- a/plexpy/activity_processor.py +++ b/plexpy/activity_processor.py @@ -660,15 +660,16 @@ class ActivityProcessor(object): self.db.action('UPDATE sessions SET write_attempts = ? WHERE session_key = ?', [session['write_attempts'] + 1, session_key]) - def set_intro(self, session_key=None): - self.db.action('UPDATE sessions SET intro = intro + 1 ' + def set_marker(self, session_key=None, marker_idx=None, marker_type=None): + if marker_type == 'intro': + args = [1, 0] + elif marker_type == 'credits': + args = [0, 1] + else: + args = [0, 0] + self.db.action('UPDATE sessions SET intro = ?, credits = ?, marker = ? ' 'WHERE session_key = ?', - [session_key]) - - def set_credits(self, session_key=None): - self.db.action('UPDATE sessions SET credits = credits + 1 ' - 'WHERE session_key = ?', - [session_key]) + args + [marker_idx, session_key]) def set_watched(self, session_key=None): self.db.action('UPDATE sessions SET watched = ? ' From 9be3bbbf0f85eb33a7b6d09d17af52355cbcdadb Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 16 Feb 2023 16:20:25 -0800 Subject: [PATCH 016/113] Update marker notification parameters --- plexpy/common.py | 10 ++++------ plexpy/notification_handler.py | 24 +++++------------------- plexpy/pmsconnect.py | 10 +++++++++- 3 files changed, 18 insertions(+), 26 deletions(-) diff --git a/plexpy/common.py b/plexpy/common.py index 65de4810..cf1180dc 100644 --- a/plexpy/common.py +++ b/plexpy/common.py @@ -427,12 +427,10 @@ NOTIFICATION_PARAMETERS = [ {'name': 'Optimized Version Profile', 'type': 'str', 'value': 'optimized_version_profile', 'description': 'The optimized version profile of the stream.'}, {'name': 'Synced Version', 'type': 'int', 'value': 'synced_version', 'description': 'If the stream is an synced version.', 'example': '0 or 1'}, {'name': 'Live', 'type': 'int', 'value': 'live', 'description': 'If the stream is live TV.', 'example': '0 or 1'}, - {'name': 'Intro Marker Start Time', 'type': 'int', 'value': 'intro_marker_start', 'description': 'The intro marker start time offset in milliseconds.'}, - {'name': 'Intro Marker End Time', 'type': 'int', 'value': 'intro_marker_end', 'description': 'The intro marker end time offset in milliseconds.'}, - {'name': 'Credits Marker First', 'type': 'int', 'value': 'credits_marker_first', 'description': 'If the credits marker is the first marker.', 'example': '0 or 1'}, - {'name': 'Credits Marker Final', 'type': 'int', 'value': 'credits_marker_final', 'description': 'If the credits marker is the final marker.', 'example': '0 or 1'}, - {'name': 'Credits Marker Start Time', 'type': 'int', 'value': 'credits_marker_start', 'description': 'The credits marker start time offset in milliseconds.'}, - {'name': 'Credits Marker End Time', 'type': 'int', 'value': 'credits_marker_end', 'description': 'The credits marker end time offset in milliseconds.'}, + {'name': 'Marker Start Time', 'type': 'int', 'value': 'marker_start', 'description': 'The intro or credits marker start time offset in milliseconds.'}, + {'name': 'Marker End Time', 'type': 'int', 'value': 'marker_end', 'description': 'The intro or credits marker end time offset in milliseconds.'}, + {'name': 'Credits Marker First', 'type': 'int', 'value': 'credits_marker_first', 'description': 'If the marker is the first credits marker.', 'example': '0 or 1'}, + {'name': 'Credits Marker Final', 'type': 'int', 'value': 'credits_marker_final', 'description': 'If the marker is the final credits marker.', 'example': '0 or 1'}, {'name': 'Channel Call Sign', 'type': 'str', 'value': 'channel_call_sign', 'description': 'The Live TV channel call sign.'}, {'name': 'Channel Identifier', 'type': 'str', 'value': 'channel_identifier', 'description': 'The Live TV channel number.'}, {'name': 'Channel Thumb', 'type': 'str', 'value': 'channel_thumb', 'description': 'The URL for the Live TV channel logo.'}, diff --git a/plexpy/notification_handler.py b/plexpy/notification_handler.py index b9940b70..c92415e5 100644 --- a/plexpy/notification_handler.py +++ b/plexpy/notification_handler.py @@ -940,19 +940,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m and audience_rating: audience_rating = helpers.get_percent(notify_params['audience_rating'], 10) - intro_markers, credits_markers = [], [] - for marker in metadata['markers']: - if marker['type'] == 'intro': - intro_markers.append(marker) - elif marker['type'] == 'credits': - credits_markers.append(marker) - - intro_marker = defaultdict(int) - credits_marker = defaultdict(int) - if notify_action == 'on_intro' and intro_markers and notify_params['intro'] < len(intro_markers): - intro_marker = intro_markers[notify_params['intro']] - if notify_action == 'on_credits' and credits_markers and notify_params['credits'] < len(credits_markers): - credits_marker = credits_markers[notify_params['credits']] + marker = kwargs.pop('marker', defaultdict(int)) now = arrow.now() now_iso = now.isocalendar() @@ -1033,12 +1021,10 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m 'optimized_version_profile': notify_params['optimized_version_profile'], 'synced_version': notify_params['synced_version'], 'live': notify_params['live'], - 'intro_marker_start': intro_marker['start_time_offset'], - 'intro_marker_end': intro_marker['end_time_offset'], - 'credits_marker_first': int(bool(credits_marker and notify_params['credits'] == 0)), - 'credits_marker_final': int(credits_marker['final']), - 'credits_marker_start': credits_marker['start_time_offset'], - 'credits_marker_end': credits_marker['end_time_offset'], + 'marker_start': marker['start_time_offset'], + 'marker_end': marker['end_time_offset'], + 'credits_marker_first': int(marker['first']), + 'credits_marker_final': int(marker['final']), 'channel_call_sign': notify_params['channel_call_sign'], 'channel_identifier': notify_params['channel_identifier'], 'channel_thumb': notify_params['channel_thumb'], diff --git a/plexpy/pmsconnect.py b/plexpy/pmsconnect.py index 3a7a2450..cc433027 100644 --- a/plexpy/pmsconnect.py +++ b/plexpy/pmsconnect.py @@ -776,12 +776,20 @@ class PmsConnect(object): guids.append(helpers.get_xml_attr(guid, 'id')) if metadata_main.getElementsByTagName('Marker'): + first = None for marker in metadata_main.getElementsByTagName('Marker'): + marker_type = helpers.get_xml_attr(marker, 'type') + if marker_type == 'credits': + if first is None: + first = True + elif first is True: + first = False markers.append({ - 'id': helpers.get_xml_attr(marker, 'id'), + 'id': helpers.cast_to_int(helpers.get_xml_attr(marker, 'id')), 'type': helpers.get_xml_attr(marker, 'type'), 'start_time_offset': helpers.cast_to_int(helpers.get_xml_attr(marker, 'startTimeOffset')), 'end_time_offset': helpers.cast_to_int(helpers.get_xml_attr(marker, 'endTimeOffset')), + 'first': first if marker_type == 'credits' else False, 'final': helpers.bool_true(helpers.get_xml_attr(marker, 'final')) }) From 599c54c9e10ce7293319f172fab4def78702a86b Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 16 Feb 2023 17:02:30 -0800 Subject: [PATCH 017/113] Refactor activity handler --- plexpy/activity_handler.py | 482 ++++++++++++++++++------------------- 1 file changed, 229 insertions(+), 253 deletions(-) diff --git a/plexpy/activity_handler.py b/plexpy/activity_handler.py index 3b5a283f..5ed69e10 100644 --- a/plexpy/activity_handler.py +++ b/plexpy/activity_handler.py @@ -53,38 +53,33 @@ class ActivityHandler(object): def __init__(self, timeline): self.ap = activity_processor.ActivityProcessor() self.timeline = timeline + + self.session_key = None + self.rating_key = None + + self.is_valid_session = ('sessionKey' in self.timeline and str(self.timeline['sessionKey']).isdigit()) + if self.is_valid_session: + self.session_key = int(self.timeline['sessionKey']) + self.rating_key = str(self.timeline['ratingKey']) + + self.key = self.timeline.get('key') + self.state = self.timeline.get('state') + self.view_offset = self.timeline.get('viewOffset') + self.transcode_key = self.timeline.get('transcodeSession', '') + self.db_session = None self.session = None self.metadata = None - def is_valid_session(self): - if 'sessionKey' in self.timeline: - if str(self.timeline['sessionKey']).isdigit(): - return True - - return False - - def get_session_key(self): - if self.is_valid_session(): - return int(self.timeline['sessionKey']) - - return None - - def get_rating_key(self): - if self.is_valid_session(): - return self.timeline['ratingKey'] - - return None - def get_db_session(self): # Retrieve the session data from our temp table - self.db_session = self.ap.get_session_by_key(session_key=self.get_session_key()) + self.db_session = self.ap.get_session_by_key(session_key=self.session_key) def get_metadata(self, skip_cache=False): if self.metadata is None: - cache_key = None if skip_cache else self.get_session_key() + cache_key = None if skip_cache else self.session_key pms_connect = pmsconnect.PmsConnect() - metadata = pms_connect.get_metadata_details(rating_key=self.get_rating_key(), cache_key=cache_key) + metadata = pms_connect.get_metadata_details(rating_key=self.rating_key, cache_key=cache_key) if metadata: self.metadata = metadata @@ -95,12 +90,12 @@ class ActivityHandler(object): if session_list: for session in session_list['sessions']: - if int(session['session_key']) == self.get_session_key(): + if int(session['session_key']) == self.session_key: # Live sessions don't have rating keys in sessions # Get it from the websocket data if not session['rating_key']: - session['rating_key'] = self.get_rating_key() - session['rating_key_websocket'] = self.get_rating_key() + session['rating_key'] = self.rating_key + session['rating_key_websocket'] = self.rating_key self.session = session return session @@ -116,9 +111,9 @@ class ActivityHandler(object): self.get_db_session() def set_session_state(self): - self.ap.set_session_state(session_key=self.get_session_key(), - state=self.timeline['state'], - view_offset=self.timeline['viewOffset'], + self.ap.set_session_state(session_key=self.session_key, + state=self.state, + view_offset=self.view_offset, stopped=helpers.timestamp()) def put_notification(self, notify_action, **kwargs): @@ -148,19 +143,19 @@ class ActivityHandler(object): self.update_db_session(notify=True) # Schedule a callback to force stop a stale stream 5 minutes later - schedule_callback('session_key-{}'.format(self.get_session_key()), + schedule_callback('session_key-{}'.format(self.session_key), func=force_stop_stream, - args=[self.get_session_key(), self.session['full_title'], self.session['username']], + args=[self.session_key, self.session['full_title'], self.session['username']], minutes=5) self.check_markers() def on_stop(self, force_stop=False): logger.debug("Tautulli ActivityHandler :: Session %s %sstopped." - % (str(self.get_session_key()), 'force ' if force_stop else '')) + % (str(self.session_key), 'force ' if force_stop else '')) # Set the session last_paused timestamp - self.ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=None) + self.ap.set_session_last_paused(session_key=self.session_key, timestamp=None) # Update the session state and viewOffset # Set force_stop to true to disable the state set @@ -173,25 +168,25 @@ class ActivityHandler(object): if row_id: self.put_notification('on_stop') - schedule_callback('session_key-{}'.format(self.get_session_key()), remove_job=True) + schedule_callback('session_key-{}'.format(self.session_key), remove_job=True) # Remove the session from our temp session table logger.debug("Tautulli ActivityHandler :: Removing sessionKey %s ratingKey %s from session queue" - % (str(self.get_session_key()), str(self.get_rating_key()))) + % (str(self.session_key), str(self.rating_key))) self.ap.delete_session(row_id=row_id) - delete_metadata_cache(self.get_session_key()) + delete_metadata_cache(self.session_key) else: - schedule_callback('session_key-{}'.format(self.get_session_key()), + schedule_callback('session_key-{}'.format(self.session_key), func=force_stop_stream, - args=[self.get_session_key(), self.db_session['full_title'], self.db_session['user']], + args=[self.session_key, self.db_session['full_title'], self.db_session['user']], seconds=30) def on_pause(self, still_paused=False): if not still_paused: - logger.debug("Tautulli ActivityHandler :: Session %s paused." % str(self.get_session_key())) + logger.debug("Tautulli ActivityHandler :: Session %s paused." % str(self.session_key)) # Set the session last_paused timestamp - self.ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=helpers.timestamp()) + self.ap.set_session_last_paused(session_key=self.session_key, timestamp=helpers.timestamp()) self.update_db_session() @@ -199,52 +194,52 @@ class ActivityHandler(object): self.put_notification('on_pause') def on_resume(self): - logger.debug("Tautulli ActivityHandler :: Session %s resumed." % str(self.get_session_key())) + logger.debug("Tautulli ActivityHandler :: Session %s resumed." % str(self.session_key)) # Set the session last_paused timestamp - self.ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=None) + self.ap.set_session_last_paused(session_key=self.session_key, timestamp=None) self.update_db_session() self.put_notification('on_resume') def on_buffer(self): - logger.debug("Tautulli ActivityHandler :: Session %s is buffering." % self.get_session_key()) + logger.debug("Tautulli ActivityHandler :: Session %s is buffering." % self.session_key) # Increment our buffer count - self.ap.increment_session_buffer_count(session_key=self.get_session_key()) + self.ap.increment_session_buffer_count(session_key=self.session_key) # Get our current buffer count - current_buffer_count = self.ap.get_session_buffer_count(self.get_session_key()) + current_buffer_count = self.ap.get_session_buffer_count(self.session_key) logger.debug("Tautulli ActivityHandler :: Session %s buffer count is %s." % - (self.get_session_key(), current_buffer_count)) + (self.session_key, current_buffer_count)) # Get our last triggered time - buffer_last_triggered = self.ap.get_session_buffer_trigger_time(self.get_session_key()) + buffer_last_triggered = self.ap.get_session_buffer_trigger_time(self.session_key) self.update_db_session() time_since_last_trigger = 0 if buffer_last_triggered: logger.debug("Tautulli ActivityHandler :: Session %s buffer last triggered at %s." % - (self.get_session_key(), buffer_last_triggered)) + (self.session_key, buffer_last_triggered)) time_since_last_trigger = helpers.timestamp() - int(buffer_last_triggered) if current_buffer_count >= plexpy.CONFIG.BUFFER_THRESHOLD and time_since_last_trigger == 0 or \ time_since_last_trigger >= plexpy.CONFIG.BUFFER_WAIT: - self.ap.set_session_buffer_trigger_time(session_key=self.get_session_key()) + self.ap.set_session_buffer_trigger_time(session_key=self.session_key) self.put_notification('on_buffer') def on_error(self): - logger.debug("Tautulli ActivityHandler :: Session %s encountered an error." % str(self.get_session_key())) + logger.debug("Tautulli ActivityHandler :: Session %s encountered an error." % str(self.session_key)) self.update_db_session() self.put_notification('on_error') def on_change(self): - logger.debug("Tautulli ActivityHandler :: Session %s has changed transcode decision." % str(self.get_session_key())) + logger.debug("Tautulli ActivityHandler :: Session %s has changed transcode decision." % str(self.session_key)) self.update_db_session() @@ -252,17 +247,17 @@ class ActivityHandler(object): def on_intro(self, marker): if self.get_live_session(): - logger.debug("Tautulli ActivityHandler :: Session %s intro marker reached." % str(self.get_session_key())) + logger.debug("Tautulli ActivityHandler :: Session %s intro marker reached." % str(self.session_key)) self.put_notification('on_intro', marker=marker) def on_credits(self, marker): if self.get_live_session(): - logger.debug("Tautulli ActivityHandler :: Session %s credits marker reached." % str(self.get_session_key())) + logger.debug("Tautulli ActivityHandler :: Session %s credits marker reached." % str(self.session_key)) self.put_notification('on_credits', marker=marker) def on_watched(self): - logger.debug("Tautulli ActivityHandler :: Session %s watched." % str(self.get_session_key())) + logger.debug("Tautulli ActivityHandler :: Session %s watched." % str(self.session_key)) watched_notifiers = notification_handler.get_notify_state_enabled( session=self.db_session, notify_action='on_watched', notified=False) @@ -272,88 +267,85 @@ class ActivityHandler(object): # This function receives events from our websocket connection def process(self): - if self.is_valid_session(): - self.get_db_session() + if not self.is_valid_session: + return + + self.get_db_session() - this_state = self.timeline['state'] - this_rating_key = str(self.timeline['ratingKey']) - this_key = self.timeline['key'] - this_transcode_key = self.timeline.get('transcodeSession', '') + if not self.db_session: + # We don't have this session in our table yet, start a new one. + if self.state != 'buffering': + self.on_start() + return - # Get the live tv session uuid - this_live_uuid = this_key.split('/')[-1] if this_key.startswith('/livetv/sessions') else None + # If we already have this session in the temp table, check for state changes + # Re-schedule the callback to reset the 5 minutes timer + schedule_callback('session_key-{}'.format(self.session_key), + func=force_stop_stream, + args=[self.session_key, self.db_session['full_title'], self.db_session['user']], + minutes=5) - # If we already have this session in the temp table, check for state changes - if self.db_session: - # Re-schedule the callback to reset the 5 minutes timer - schedule_callback('session_key-{}'.format(self.get_session_key()), - func=force_stop_stream, - args=[self.get_session_key(), self.db_session['full_title'], self.db_session['user']], - minutes=5) + last_state = self.db_session['state'] + last_rating_key = str(self.db_session['rating_key']) + last_live_uuid = self.db_session['live_uuid'] + last_transcode_key = self.db_session['transcode_key'].split('/')[-1] + last_paused = self.db_session['last_paused'] + last_rating_key_websocket = self.db_session['rating_key_websocket'] + last_guid = self.db_session['guid'] - last_state = self.db_session['state'] - last_rating_key = str(self.db_session['rating_key']) - last_live_uuid = self.db_session['live_uuid'] - last_transcode_key = self.db_session['transcode_key'].split('/')[-1] - last_paused = self.db_session['last_paused'] - last_rating_key_websocket = self.db_session['rating_key_websocket'] - last_guid = self.db_session['guid'] + # Get the live tv session uuid + this_live_uuid = self.key.split('/')[-1] if self.key.startswith('/livetv/sessions') else None - this_guid = last_guid - # Check guid for live TV metadata every 60 seconds - if self.db_session['live'] and helpers.timestamp() - self.db_session['stopped'] > 60: - self.get_metadata(skip_cache=True) - if self.metadata: - this_guid = self.metadata['guid'] + this_guid = last_guid + # Check guid for live TV metadata every 60 seconds + if self.db_session['live'] and helpers.timestamp() - self.db_session['stopped'] > 60: + self.get_metadata(skip_cache=True) + if self.metadata: + this_guid = self.metadata['guid'] - # Make sure the same item is being played - if (this_rating_key == last_rating_key - or this_rating_key == last_rating_key_websocket - or this_live_uuid == last_live_uuid) \ - and this_guid == last_guid: - # Update the session state and viewOffset - if this_state == 'playing': - # Update the session in our temp session table - # if the last set temporary stopped time exceeds 60 seconds - if helpers.timestamp() - self.db_session['stopped'] > 60: - self.update_db_session() + # Make sure the same item is being played + if (self.rating_key == last_rating_key + or self.rating_key == last_rating_key_websocket + or this_live_uuid == last_live_uuid) \ + and this_guid == last_guid: + # Update the session state and viewOffset + if self.state == 'playing': + # Update the session in our temp session table + # if the last set temporary stopped time exceeds 60 seconds + if helpers.timestamp() - self.db_session['stopped'] > 60: + self.update_db_session() - # Start our state checks - if this_state != last_state: - if this_state == 'paused': - self.on_pause() - elif last_paused and this_state == 'playing': - self.on_resume() - elif this_state == 'stopped': - self.on_stop() - elif this_state == 'error': - self.on_error() + # Start our state checks + if self.state != last_state: + if self.state == 'paused': + self.on_pause() + elif last_paused and self.state == 'playing': + self.on_resume() + elif self.state == 'stopped': + self.on_stop() + elif self.state == 'error': + self.on_error() - elif this_state == 'paused': - # Update the session last_paused timestamp - self.on_pause(still_paused=True) + elif self.state == 'paused': + # Update the session last_paused timestamp + self.on_pause(still_paused=True) - if this_state == 'buffering': - self.on_buffer() + if self.state == 'buffering': + self.on_buffer() - if this_transcode_key != last_transcode_key and this_state != 'stopped': - self.on_change() + if self.transcode_key != last_transcode_key and self.state != 'stopped': + self.on_change() - # If a client doesn't register stop events (I'm looking at you PHT!) check if the ratingKey has changed - else: - # Manually stop and start - # Set force_stop so that we don't overwrite our last viewOffset - self.on_stop(force_stop=True) - self.on_start() + # If a client doesn't register stop events (I'm looking at you PHT!) check if the ratingKey has changed + else: + # Manually stop and start + # Set force_stop so that we don't overwrite our last viewOffset + self.on_stop(force_stop=True) + self.on_start() - # Check for stream offset notifications - self.check_markers() - self.check_watched() - - else: - # We don't have this session in our table yet, start a new one. - if this_state != 'buffering': - self.on_start() + # Check for stream offset notifications + self.check_markers() + self.check_watched() def check_markers(self): # Monitor if the stream has reached the intro or credit marker offsets @@ -364,20 +356,20 @@ class ActivityHandler(object): for marker_idx, marker in enumerate(self.metadata['markers'], start=1): # Websocket events only fire every 10 seconds # Check if the marker is within 10 seconds of the current viewOffset - if marker['start_time_offset'] - 10000 <= self.timeline['viewOffset'] <= marker['end_time_offset']: + if marker['start_time_offset'] - 10000 <= self.view_offset <= marker['end_time_offset']: marker_flag = True if self.db_session['marker'] != marker_idx: - self.ap.set_marker(session_key=self.get_session_key(), marker_idx=marker_idx, marker_type=marker['type']) + self.ap.set_marker(session_key=self.session_key, marker_idx=marker_idx, marker_type=marker['type']) callback_func = getattr(self, 'on_{}'.format(marker['type'])) - if self.timeline['viewOffset'] < marker['start_time_offset']: + if self.view_offset < marker['start_time_offset']: # Schedule a callback for the exact offset of the marker schedule_callback( - 'session_key-{}-marker-{}'.format(self.get_session_key(), marker_idx), + 'session_key-{}-marker-{}'.format(self.session_key, marker_idx), func=callback_func, args=[marker], - milliseconds=marker['start_time_offset'] - self.timeline['viewOffset'] + milliseconds=marker['start_time_offset'] - self.view_offset ) else: callback_func(marker) @@ -385,7 +377,7 @@ class ActivityHandler(object): break if not marker_flag: - self.ap.set_marker(session_key=self.get_session_key(), marker_idx=0) + self.ap.set_marker(session_key=self.session_key, marker_idx=0) def check_watched(self): # Monitor if the stream has reached the watch percentage for notifications @@ -399,7 +391,7 @@ class ActivityHandler(object): } if progress_percent >= watched_percent.get(self.db_session['media_type'], 101): - self.ap.set_watched(session_key=self.get_session_key()) + self.ap.set_watched(session_key=self.session_key) self.on_watched() @@ -408,121 +400,106 @@ class TimelineHandler(object): def __init__(self, timeline): self.timeline = timeline - def is_item(self): - if 'itemID' in self.timeline: - return True + self.rating_key = None - return False + self.is_item = ('itemID' in self.timeline) + if self.is_item: + self.rating_key = int(self.timeline['itemID']) - def get_rating_key(self): - if self.is_item(): - return int(self.timeline['itemID']) - - return None - - def get_metadata(self): - pms_connect = pmsconnect.PmsConnect() - metadata = pms_connect.get_metadata_details(self.get_rating_key()) - - if metadata: - return metadata - - return None + self.parent_rating_key = helpers.cast_to_int(self.timeline.get('parentItemID')) or None + self.grandparent_rating_key = helpers.cast_to_int(self.timeline.get('rootItemID')) or None + self.identifier = self.timeline.get('identifier') + self.state_type = self.timeline.get('state') + self.media_type = common.MEDIA_TYPE_VALUES.get(self.timeline.get('type')) + self.section_id = helpers.cast_to_int(self.timeline.get('sectionID', 0)) + self.title = self.timeline.get('title', 'Unknown') + self.metadata_state = self.timeline.get('metadataState') + self.media_state = self.timeline.get('mediaState') + self.queue_size = self.timeline.get('queueSize') # This function receives events from our websocket connection def process(self): - if self.is_item(): - global RECENTLY_ADDED_QUEUE + if not self.is_item: + return + + # Return if it is not a library event (i.e. DVR EPG event) + if self.identifier != 'com.plexapp.plugins.library': + return - rating_key = self.get_rating_key() - parent_rating_key = helpers.cast_to_int(self.timeline.get('parentItemID')) or None - grandparent_rating_key = helpers.cast_to_int(self.timeline.get('rootItemID')) or None + global RECENTLY_ADDED_QUEUE - identifier = self.timeline.get('identifier') - state_type = self.timeline.get('state') - media_type = common.MEDIA_TYPE_VALUES.get(self.timeline.get('type')) - section_id = helpers.cast_to_int(self.timeline.get('sectionID', 0)) - title = self.timeline.get('title', 'Unknown') - metadata_state = self.timeline.get('metadataState') - media_state = self.timeline.get('mediaState') - queue_size = self.timeline.get('queueSize') + # Add a new media item to the recently added queue + if self.media_type and self.section_id > 0 and self.state_type == 0 and self.metadata_state == 'created': - # Return if it is not a library event (i.e. DVR EPG event) - if identifier != 'com.plexapp.plugins.library': - return + if self.media_type in ('episode', 'track'): + grandparent_set = RECENTLY_ADDED_QUEUE.get(self.grandparent_rating_key, set()) + grandparent_set.add(self.parent_rating_key) + RECENTLY_ADDED_QUEUE[self.grandparent_rating_key] = grandparent_set - # Add a new media item to the recently added queue - if media_type and section_id > 0 and state_type == 0 and metadata_state == 'created': + parent_set = RECENTLY_ADDED_QUEUE.get(self.parent_rating_key, set()) + parent_set.add(self.rating_key) + RECENTLY_ADDED_QUEUE[self.parent_rating_key] = parent_set - if media_type in ('episode', 'track'): - grandparent_set = RECENTLY_ADDED_QUEUE.get(grandparent_rating_key, set()) - grandparent_set.add(parent_rating_key) - RECENTLY_ADDED_QUEUE[grandparent_rating_key] = grandparent_set + RECENTLY_ADDED_QUEUE[self.rating_key] = {self.grandparent_rating_key} - parent_set = RECENTLY_ADDED_QUEUE.get(parent_rating_key, set()) - parent_set.add(rating_key) - RECENTLY_ADDED_QUEUE[parent_rating_key] = parent_set + logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s, grandparent %s) " + "added to recently added queue." + % (self.title, str(self.rating_key), str(self.grandparent_rating_key))) - RECENTLY_ADDED_QUEUE[rating_key] = {grandparent_rating_key} + # Schedule a callback to clear the recently added queue + schedule_callback('rating_key-{}'.format(self.grandparent_rating_key), + func=clear_recently_added_queue, + args=[self.grandparent_rating_key, self.title], + seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY) - logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s, grandparent %s) " - "added to recently added queue." - % (title, str(rating_key), str(grandparent_rating_key))) + elif self.media_type in ('season', 'album'): + parent_set = RECENTLY_ADDED_QUEUE.get(self.parent_rating_key, set()) + parent_set.add(self.rating_key) + RECENTLY_ADDED_QUEUE[self.parent_rating_key] = parent_set - # Schedule a callback to clear the recently added queue - schedule_callback('rating_key-{}'.format(grandparent_rating_key), - func=clear_recently_added_queue, - args=[grandparent_rating_key, title], - seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY) + logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s , parent %s) " + "added to recently added queue." + % (self.title, str(self.rating_key), str(self.parent_rating_key))) - elif media_type in ('season', 'album'): - parent_set = RECENTLY_ADDED_QUEUE.get(parent_rating_key, set()) - parent_set.add(rating_key) - RECENTLY_ADDED_QUEUE[parent_rating_key] = parent_set + # Schedule a callback to clear the recently added queue + schedule_callback('rating_key-{}'.format(self.parent_rating_key), + func=clear_recently_added_queue, + args=[self.parent_rating_key, self.title], + seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY) - logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s , parent %s) " - "added to recently added queue." - % (title, str(rating_key), str(parent_rating_key))) - - # Schedule a callback to clear the recently added queue - schedule_callback('rating_key-{}'.format(parent_rating_key), - func=clear_recently_added_queue, - args=[parent_rating_key, title], - seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY) - - elif media_type in ('movie', 'show', 'artist'): - queue_set = RECENTLY_ADDED_QUEUE.get(rating_key, set()) - RECENTLY_ADDED_QUEUE[rating_key] = queue_set - - logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s) " - "added to recently added queue." - % (title, str(rating_key))) - - # Schedule a callback to clear the recently added queue - schedule_callback('rating_key-{}'.format(rating_key), - func=clear_recently_added_queue, - args=[rating_key, title], - seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY) - - # A movie, show, or artist is done processing - elif media_type in ('movie', 'show', 'artist') and section_id > 0 and \ - state_type == 5 and metadata_state is None and queue_size is None and \ - rating_key in RECENTLY_ADDED_QUEUE: + elif self.media_type in ('movie', 'show', 'artist'): + queue_set = RECENTLY_ADDED_QUEUE.get(self.rating_key, set()) + RECENTLY_ADDED_QUEUE[self.rating_key] = queue_set logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s) " - "done processing metadata." - % (title, str(rating_key))) + "added to recently added queue." + % (self.title, str(self.rating_key))) - # An item was deleted, make sure it is removed from the queue - elif state_type == 9 and metadata_state == 'deleted': - if rating_key in RECENTLY_ADDED_QUEUE and not RECENTLY_ADDED_QUEUE[rating_key]: - logger.debug("Tautulli TimelineHandler :: Library item %s " - "removed from recently added queue." - % str(rating_key)) - del_keys(rating_key) + # Schedule a callback to clear the recently added queue + schedule_callback('rating_key-{}'.format(self.rating_key), + func=clear_recently_added_queue, + args=[self.rating_key, self.title], + seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY) - # Remove the callback if the item is removed - schedule_callback('rating_key-{}'.format(rating_key), remove_job=True) + # A movie, show, or artist is done processing + elif self.media_type in ('movie', 'show', 'artist') and self.section_id > 0 and \ + self.state_type == 5 and self.metadata_state is None and self.queue_size is None and \ + self.rating_key in RECENTLY_ADDED_QUEUE: + + logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s) " + "done processing metadata." + % (self.title, str(self.rating_key))) + + # An item was deleted, make sure it is removed from the queue + elif self.state_type == 9 and self.metadata_state == 'deleted': + if self.rating_key in RECENTLY_ADDED_QUEUE and not RECENTLY_ADDED_QUEUE[self.rating_key]: + logger.debug("Tautulli TimelineHandler :: Library item %s " + "removed from recently added queue." + % str(self.rating_key)) + del_keys(self.rating_key) + + # Remove the callback if the item is removed + schedule_callback('rating_key-{}'.format(self.rating_key), remove_job=True) class ReachabilityHandler(object): @@ -530,10 +507,7 @@ class ReachabilityHandler(object): def __init__(self, data): self.data = data - def is_reachable(self): - if 'reachability' in self.data: - return self.data['reachability'] - return False + self.is_reachable = self.data.get('reachable', False) def remote_access_enabled(self): pms_connect = pmsconnect.PmsConnect() @@ -552,42 +526,44 @@ class ReachabilityHandler(object): return # Do nothing if remote access is still up and hasn't changed - if self.is_reachable() and plexpy.PLEX_REMOTE_ACCESS_UP: + if self.is_reachable and plexpy.PLEX_REMOTE_ACCESS_UP: return pms_connect = pmsconnect.PmsConnect() server_response = pms_connect.get_server_response() - if server_response: - # Waiting for port mapping - if server_response['mapping_state'] == 'waiting': - logger.warn("Tautulli ReachabilityHandler :: Remote access waiting for port mapping.") + if not server_response: + return - elif plexpy.PLEX_REMOTE_ACCESS_UP is not False and server_response['reason']: - logger.warn("Tautulli ReachabilityHandler :: Remote access failed: %s" % server_response['reason']) - logger.info("Tautulli ReachabilityHandler :: Plex remote access is down.") + # Waiting for port mapping + if server_response['mapping_state'] == 'waiting': + logger.warn("Tautulli ReachabilityHandler :: Remote access waiting for port mapping.") - plexpy.PLEX_REMOTE_ACCESS_UP = False + elif plexpy.PLEX_REMOTE_ACCESS_UP is not False and server_response['reason']: + logger.warn("Tautulli ReachabilityHandler :: Remote access failed: %s" % server_response['reason']) + logger.info("Tautulli ReachabilityHandler :: Plex remote access is down.") - if not ACTIVITY_SCHED.get_job('on_extdown'): - logger.debug("Tautulli ReachabilityHandler :: Scheduling remote access down callback in %d seconds.", - plexpy.CONFIG.NOTIFY_REMOTE_ACCESS_THRESHOLD) - schedule_callback('on_extdown', func=self.on_extdown, args=[server_response], - seconds=plexpy.CONFIG.NOTIFY_REMOTE_ACCESS_THRESHOLD) + plexpy.PLEX_REMOTE_ACCESS_UP = False - elif plexpy.PLEX_REMOTE_ACCESS_UP is False and not server_response['reason']: - logger.info("Tautulli ReachabilityHandler :: Plex remote access is back up.") + if not ACTIVITY_SCHED.get_job('on_extdown'): + logger.debug("Tautulli ReachabilityHandler :: Scheduling remote access down callback in %d seconds.", + plexpy.CONFIG.NOTIFY_REMOTE_ACCESS_THRESHOLD) + schedule_callback('on_extdown', func=self.on_extdown, args=[server_response], + seconds=plexpy.CONFIG.NOTIFY_REMOTE_ACCESS_THRESHOLD) - plexpy.PLEX_REMOTE_ACCESS_UP = True + elif plexpy.PLEX_REMOTE_ACCESS_UP is False and not server_response['reason']: + logger.info("Tautulli ReachabilityHandler :: Plex remote access is back up.") - if ACTIVITY_SCHED.get_job('on_extdown'): - logger.debug("Tautulli ReachabilityHandler :: Cancelling scheduled remote access down callback.") - schedule_callback('on_extdown', remove_job=True) - else: - self.on_extup(server_response) + plexpy.PLEX_REMOTE_ACCESS_UP = True - elif plexpy.PLEX_REMOTE_ACCESS_UP is None: - plexpy.PLEX_REMOTE_ACCESS_UP = self.is_reachable() + if ACTIVITY_SCHED.get_job('on_extdown'): + logger.debug("Tautulli ReachabilityHandler :: Cancelling scheduled remote access down callback.") + schedule_callback('on_extdown', remove_job=True) + else: + self.on_extup(server_response) + + elif plexpy.PLEX_REMOTE_ACCESS_UP is None: + plexpy.PLEX_REMOTE_ACCESS_UP = self.is_reachable def del_keys(key): From 7cc78d448d6d3f907d203203be0d20d2a359f275 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Thu, 16 Feb 2023 17:06:46 -0800 Subject: [PATCH 018/113] Simplify set marker in database --- plexpy/activity_processor.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/plexpy/activity_processor.py b/plexpy/activity_processor.py index c821d23d..dcd1f138 100644 --- a/plexpy/activity_processor.py +++ b/plexpy/activity_processor.py @@ -661,15 +661,10 @@ class ActivityProcessor(object): [session['write_attempts'] + 1, session_key]) def set_marker(self, session_key=None, marker_idx=None, marker_type=None): - if marker_type == 'intro': - args = [1, 0] - elif marker_type == 'credits': - args = [0, 1] - else: - args = [0, 0] + marker_args = [int(marker_type == 'intro'), int(marker_type == 'credits')] self.db.action('UPDATE sessions SET intro = ?, credits = ?, marker = ? ' 'WHERE session_key = ?', - args + [marker_idx, session_key]) + marker_args + [marker_idx, session_key]) def set_watched(self, session_key=None): self.db.action('UPDATE sessions SET watched = ? ' From 9c6b8f1af5bd4f1e0c32a85d178253ae14014fd9 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 17 Feb 2023 09:28:44 -0800 Subject: [PATCH 019/113] Simplify metadata credits marker first flag --- plexpy/pmsconnect.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/plexpy/pmsconnect.py b/plexpy/pmsconnect.py index cc433027..347de513 100644 --- a/plexpy/pmsconnect.py +++ b/plexpy/pmsconnect.py @@ -780,17 +780,15 @@ class PmsConnect(object): for marker in metadata_main.getElementsByTagName('Marker'): marker_type = helpers.get_xml_attr(marker, 'type') if marker_type == 'credits': - if first is None: - first = True - elif first is True: - first = False + first = bool(first is None) + final = helpers.bool_true(helpers.get_xml_attr(marker, 'final')) markers.append({ 'id': helpers.cast_to_int(helpers.get_xml_attr(marker, 'id')), - 'type': helpers.get_xml_attr(marker, 'type'), + 'type': marker_type, 'start_time_offset': helpers.cast_to_int(helpers.get_xml_attr(marker, 'startTimeOffset')), 'end_time_offset': helpers.cast_to_int(helpers.get_xml_attr(marker, 'endTimeOffset')), - 'first': first if marker_type == 'credits' else False, - 'final': helpers.bool_true(helpers.get_xml_attr(marker, 'final')) + 'first': first if marker_type == 'credits' else None, + 'final': final if marker_type == 'credits' else None }) if metadata_type == 'movie': From 32bb98e8c1a0d3b790e269e28c15d259628e00d1 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 17 Feb 2023 09:29:08 -0800 Subject: [PATCH 020/113] Update get_metadata_details docs --- plexpy/webserve.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/plexpy/webserve.py b/plexpy/webserve.py index 8ad3e664..963f2ce1 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -5339,6 +5339,24 @@ class WebInterface(object): "last_viewed_at": "1462165717", "library_name": "TV Shows", "live": 0, + "markers": [ + { + "id": 908, + "type": "credits", + "start_time_offset": 2923863, + "end_time_offset": 2998197, + "first": true, + "final": true + }, + { + "id": 908, + "type": "intro", + "start_time_offset": 1622, + "end_time_offset": 109135, + "first": null, + "final": null + } + ], "media_index": "1", "media_info": [ { From 6b0b3a476fbfde4cdf3dbfa617ff8080cdf28234 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 17 Feb 2023 10:01:57 -0800 Subject: [PATCH 021/113] Add support for commercial marker triggers --- plexpy/__init__.py | 13 +++++++++++-- plexpy/activity_handler.py | 11 +++++++++-- plexpy/activity_processor.py | 8 ++++++-- plexpy/notification_handler.py | 4 ++-- plexpy/notifiers.py | 8 ++++++++ 5 files changed, 36 insertions(+), 8 deletions(-) diff --git a/plexpy/__init__.py b/plexpy/__init__.py index 09c16586..d3de0815 100644 --- a/plexpy/__init__.py +++ b/plexpy/__init__.py @@ -656,8 +656,8 @@ def dbcheck(): 'synced_version INTEGER, synced_version_profile TEXT, ' 'live INTEGER, live_uuid TEXT, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT, ' 'secure INTEGER, relayed INTEGER, ' - 'buffer_count INTEGER DEFAULT 0, buffer_last_triggered INTEGER, last_paused INTEGER, ' - 'watched INTEGER DEFAULT 0, intro INTEGER DEFAULT 0, credits INTEGER DEFAULT 0, marker INTEGER DEFAULT 0, ' + 'buffer_count INTEGER DEFAULT 0, buffer_last_triggered INTEGER, last_paused INTEGER, watched INTEGER DEFAULT 0, ' + 'intro INTEGER DEFAULT 0, credits INTEGER DEFAULT 0, commercial INTEGER DEFAULT 0, marker INTEGER DEFAULT 0, ' 'initial_stream INTEGER DEFAULT 1, write_attempts INTEGER DEFAULT 0, raw_stream_info TEXT, ' 'rating_key_websocket TEXT)' ) @@ -1417,6 +1417,15 @@ def dbcheck(): 'ALTER TABLE sessions ADD COLUMN credits INTEGER DEFAULT 0' ) + # Upgrade sessions table from earlier versions + try: + c_db.execute('SELECT commercial FROM sessions') + except sqlite3.OperationalError: + logger.debug(u"Altering database. Updating database table sessions.") + c_db.execute( + 'ALTER TABLE sessions ADD COLUMN commercial INTEGER DEFAULT 0' + ) + # Upgrade sessions table from earlier versions try: c_db.execute('SELECT marker FROM sessions') diff --git a/plexpy/activity_handler.py b/plexpy/activity_handler.py index 5ed69e10..8933c1ac 100644 --- a/plexpy/activity_handler.py +++ b/plexpy/activity_handler.py @@ -247,13 +247,20 @@ class ActivityHandler(object): def on_intro(self, marker): if self.get_live_session(): - logger.debug("Tautulli ActivityHandler :: Session %s intro marker reached." % str(self.session_key)) + logger.debug("Tautulli ActivityHandler :: Session %s reached intro marker." % str(self.session_key)) self.put_notification('on_intro', marker=marker) + def on_commercial(self, marker): + if self.get_live_session(): + logger.debug("Tautulli ActivityHandler :: Session %s reached commercial marker." % str(self.session_key)) + + self.put_notification('on_commercial', marker=marker) + def on_credits(self, marker): if self.get_live_session(): - logger.debug("Tautulli ActivityHandler :: Session %s credits marker reached." % str(self.session_key)) + logger.debug("Tautulli ActivityHandler :: Session %s reached credits marker." % str(self.session_key)) + self.put_notification('on_credits', marker=marker) def on_watched(self): diff --git a/plexpy/activity_processor.py b/plexpy/activity_processor.py index dcd1f138..a8d8cdd4 100644 --- a/plexpy/activity_processor.py +++ b/plexpy/activity_processor.py @@ -661,8 +661,12 @@ class ActivityProcessor(object): [session['write_attempts'] + 1, session_key]) def set_marker(self, session_key=None, marker_idx=None, marker_type=None): - marker_args = [int(marker_type == 'intro'), int(marker_type == 'credits')] - self.db.action('UPDATE sessions SET intro = ?, credits = ?, marker = ? ' + marker_args = [ + int(marker_type == 'intro'), + int(marker_type == 'commercial'), + int(marker_type == 'credits') + ] + self.db.action('UPDATE sessions SET intro = ?, commercial = ?, credits = ?, marker = ? ' 'WHERE session_key = ?', marker_args + [marker_idx, session_key]) diff --git a/plexpy/notification_handler.py b/plexpy/notification_handler.py index c92415e5..4b727c5d 100644 --- a/plexpy/notification_handler.py +++ b/plexpy/notification_handler.py @@ -1023,8 +1023,8 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m 'live': notify_params['live'], 'marker_start': marker['start_time_offset'], 'marker_end': marker['end_time_offset'], - 'credits_marker_first': int(marker['first']), - 'credits_marker_final': int(marker['final']), + 'credits_marker_first': helpers.cast_to_int(marker['first']), + 'credits_marker_final': helpers.cast_to_int(marker['final']), 'channel_call_sign': notify_params['channel_call_sign'], 'channel_identifier': notify_params['channel_identifier'], 'channel_thumb': notify_params['channel_thumb'], diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py index 383f4908..d717c8f6 100644 --- a/plexpy/notifiers.py +++ b/plexpy/notifiers.py @@ -348,6 +348,14 @@ def available_notification_actions(agent_id=None): 'icon': 'fa-bookmark', 'media_types': ('episode',) }, + {'label': 'Commercial Marker', + 'name': 'on_credits', + 'description': 'Trigger a notification when a video stream reaches any commercial marker.', + 'subject': 'Tautulli ({server_name})', + 'body': '{user} ({player}) has reached a commercial marker for {title}.', + 'icon': 'fa-bookmark', + 'media_types': ('movie', 'episode') + }, {'label': 'Credits Marker', 'name': 'on_credits', 'description': 'Trigger a notification when a video stream reaches any credits marker.', From 87d3c0ae8172ca65f3168d999bdbdb920265be14 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 17 Feb 2023 18:54:52 -0800 Subject: [PATCH 022/113] Fix missing on_commercial columns in database --- plexpy/__init__.py | 22 +++++++++++++++++++--- plexpy/notifiers.py | 2 +- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/plexpy/__init__.py b/plexpy/__init__.py index d3de0815..2827c5fa 100644 --- a/plexpy/__init__.py +++ b/plexpy/__init__.py @@ -754,7 +754,8 @@ def dbcheck(): 'agent_id INTEGER, agent_name TEXT, agent_label TEXT, friendly_name TEXT, notifier_config TEXT, ' 'on_play INTEGER DEFAULT 0, on_stop INTEGER DEFAULT 0, on_pause INTEGER DEFAULT 0, ' 'on_resume INTEGER DEFAULT 0, on_change INTEGER DEFAULT 0, on_buffer INTEGER DEFAULT 0, ' - 'on_error INTEGER DEFAULT 0, on_intro INTEGER DEFAULT 0, on_credits INTEGER DEFAULT 0, ' + 'on_error INTEGER DEFAULT 0, ' + 'on_intro INTEGER DEFAULT 0, on_credits INTEGER DEFAULT 0, on_commercial INTEGER DEFAULT 0, ' 'on_watched INTEGER DEFAULT 0, on_created INTEGER DEFAULT 0, ' 'on_extdown INTEGER DEFAULT 0, on_intdown INTEGER DEFAULT 0, ' 'on_extup INTEGER DEFAULT 0, on_intup INTEGER DEFAULT 0, on_pmsupdate INTEGER DEFAULT 0, ' @@ -762,14 +763,14 @@ def dbcheck(): 'on_plexpydbcorrupt INTEGER DEFAULT 0, ' 'on_play_subject TEXT, on_stop_subject TEXT, on_pause_subject TEXT, ' 'on_resume_subject TEXT, on_change_subject TEXT, on_buffer_subject TEXT, on_error_subject TEXT, ' - 'on_intro_subject TEXT, on_credits_subject TEXT, ' + 'on_intro_subject TEXT, on_credits_subject TEXT, on_commercial_subject TEXT,' 'on_watched_subject TEXT, on_created_subject TEXT, on_extdown_subject TEXT, on_intdown_subject TEXT, ' 'on_extup_subject TEXT, on_intup_subject TEXT, on_pmsupdate_subject TEXT, ' 'on_concurrent_subject TEXT, on_newdevice_subject TEXT, on_plexpyupdate_subject TEXT, ' 'on_plexpydbcorrupt_subject TEXT, ' 'on_play_body TEXT, on_stop_body TEXT, on_pause_body TEXT, ' 'on_resume_body TEXT, on_change_body TEXT, on_buffer_body TEXT, on_error_body TEXT, ' - 'on_intro_body TEXT, on_credits_body TEXT, ' + 'on_intro_body TEXT, on_credits_body TEXT, on_commercial_body TEXT, ' 'on_watched_body TEXT, on_created_body TEXT, on_extdown_body TEXT, on_intdown_body TEXT, ' 'on_extup_body TEXT, on_intup_body TEXT, on_pmsupdate_body TEXT, ' 'on_concurrent_body TEXT, on_newdevice_body TEXT, on_plexpyupdate_body TEXT, ' @@ -2429,6 +2430,21 @@ def dbcheck(): 'ALTER TABLE notifiers ADD COLUMN on_credits_body TEXT' ) + # Upgrade notifiers table from earlier versions + try: + c_db.execute('SELECT on_commercial FROM notifiers') + except sqlite3.OperationalError: + logger.debug("Altering database. Updating database table notifiers.") + c_db.execute( + 'ALTER TABLE notifiers ADD COLUMN on_commercial INTEGER DEFAULT 0' + ) + c_db.execute( + 'ALTER TABLE notifiers ADD COLUMN on_commercial_subject TEXT' + ) + c_db.execute( + 'ALTER TABLE notifiers ADD COLUMN on_commercial_body TEXT' + ) + # Upgrade tvmaze_lookup table from earlier versions try: c_db.execute('SELECT rating_key FROM tvmaze_lookup') diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py index d717c8f6..f0f02bf0 100644 --- a/plexpy/notifiers.py +++ b/plexpy/notifiers.py @@ -349,7 +349,7 @@ def available_notification_actions(agent_id=None): 'media_types': ('episode',) }, {'label': 'Commercial Marker', - 'name': 'on_credits', + 'name': 'on_commercial', 'description': 'Trigger a notification when a video stream reaches any commercial marker.', 'subject': 'Tautulli ({server_name})', 'body': '{user} ({player}) has reached a commercial marker for {title}.', From 6807cebe51311f599a32f5df421c2dccc6e0c498 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 17 Feb 2023 18:55:28 -0800 Subject: [PATCH 023/113] Strip whitespace from condition values --- plexpy/notification_handler.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plexpy/notification_handler.py b/plexpy/notification_handler.py index 4b727c5d..197314fc 100644 --- a/plexpy/notification_handler.py +++ b/plexpy/notification_handler.py @@ -294,7 +294,7 @@ def notify_custom_conditions(notifier_id=None, parameters=None): # Cast the condition values to the correct type try: if parameter_type == 'str': - values = ['' if v == '~' else str(v).lower() for v in values] + values = ['' if v == '~' else str(v).strip().lower() for v in values] elif parameter_type == 'int': values = [helpers.cast_to_int(v) for v in values] @@ -313,7 +313,7 @@ def notify_custom_conditions(notifier_id=None, parameters=None): # Cast the parameter value to the correct type try: if parameter_type == 'str': - parameter_value = str(parameter_value).lower() + parameter_value = str(parameter_value).strip().lower() elif parameter_type == 'int': parameter_value = helpers.cast_to_int(parameter_value) From c2abfce8e1bcc3a1f57a76a98e642eaa263edf6f Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sun, 19 Feb 2023 17:41:48 -0800 Subject: [PATCH 024/113] Save credits markers offsets to session history --- plexpy/__init__.py | 15 ++++++++++++++- plexpy/activity_processor.py | 12 +++++++++++- 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/plexpy/__init__.py b/plexpy/__init__.py index 2827c5fa..d0aed7cf 100644 --- a/plexpy/__init__.py +++ b/plexpy/__init__.py @@ -715,7 +715,8 @@ def dbcheck(): 'art TEXT, media_type TEXT, year INTEGER, originally_available_at TEXT, added_at INTEGER, updated_at INTEGER, ' 'last_viewed_at INTEGER, content_rating TEXT, summary TEXT, tagline TEXT, rating TEXT, ' 'duration INTEGER DEFAULT 0, guid TEXT, directors TEXT, writers TEXT, actors TEXT, genres TEXT, studio TEXT, ' - 'labels TEXT, live INTEGER DEFAULT 0, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT)' + 'labels TEXT, live INTEGER DEFAULT 0, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT, ' + 'marker_credits_first INTEGER DEFAULT NULL, marker_credits_final INTEGER DEFAULT NULL)' ) # users table :: This table keeps record of the friends list @@ -1564,6 +1565,18 @@ def dbcheck(): 'ALTER TABLE session_history_metadata ADD COLUMN channel_thumb TEXT' ) + # Upgrade session_history_metadata table from earlier versions + try: + c_db.execute('SELECT marker_credits_first FROM session_history_metadata') + except sqlite3.OperationalError: + logger.debug("Altering database. Updating database table session_history_metadata.") + c_db.execute( + 'ALTER TABLE session_history_metadata ADD COLUMN marker_credits_first INTEGER DEFAULT NULL' + ) + c_db.execute( + 'ALTER TABLE session_history_metadata ADD COLUMN marker_credits_final INTEGER DEFAULT NULL' + ) + # Upgrade session_history_media_info table from earlier versions try: c_db.execute('SELECT transcode_decision FROM session_history_media_info') diff --git a/plexpy/activity_processor.py b/plexpy/activity_processor.py index a8d8cdd4..d55c6738 100644 --- a/plexpy/activity_processor.py +++ b/plexpy/activity_processor.py @@ -490,6 +490,14 @@ class ActivityProcessor(object): genres = ";".join(metadata['genres']) labels = ";".join(metadata['labels']) + marker_credits_first = None + marker_credits_final = None + for marker in metadata['markers']: + if marker['first']: + marker_credits_first = marker['start_time_offset'] + if marker['final']: + marker_credits_final = marker['start_time_offset'] + # logger.debug("Tautulli ActivityProcessor :: Attempting to write to sessionKey %s session_history_metadata table..." # % session['session_key']) keys = {'id': last_id} @@ -528,7 +536,9 @@ class ActivityProcessor(object): 'live': session['live'], 'channel_call_sign': media_info.get('channel_call_sign', ''), 'channel_identifier': media_info.get('channel_identifier', ''), - 'channel_thumb': media_info.get('channel_thumb', '') + 'channel_thumb': media_info.get('channel_thumb', ''), + 'marker_credits_first': marker_credits_first, + 'marker_credits_final': marker_credits_final } # logger.debug("Tautulli ActivityProcessor :: Writing sessionKey %s session_history_metadata transaction..." From b1dd28e39b648482cb55a0c6b8a827760b4aea90 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Mon, 20 Feb 2023 16:33:19 -0800 Subject: [PATCH 025/113] Add setting to change video watched completion behaviour --- data/interfaces/default/settings.html | 14 ++++++++++++++ plexpy/config.py | 4 +++- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/data/interfaces/default/settings.html b/data/interfaces/default/settings.html index f7c24211..fd234da2 100644 --- a/data/interfaces/default/settings.html +++ b/data/interfaces/default/settings.html @@ -213,6 +213,20 @@

Set the percentage for a music track to be considered as listened. Minimum 50, Maximum 95.

+
+ +
+
+ +
+
+

Decide whether to use end credits markers to determine the 'watched' state of video items. When markers are not available the selected threshold percentage will be used.

+

diff --git a/plexpy/config.py b/plexpy/config.py index 544cf79a..f094e88e 100644 --- a/plexpy/config.py +++ b/plexpy/config.py @@ -199,6 +199,7 @@ _CONFIG_DEFINITIONS = { 'UPGRADE_FLAG': (int, 'Advanced', 0), 'VERBOSE_LOGS': (int, 'Advanced', 1), 'VERIFY_SSL_CERT': (bool_int, 'Advanced', 1), + 'WATCHED_MARKER': (int, 'Monitoring', 3), 'WEBSOCKET_MONITOR_PING_PONG': (int, 'Advanced', 0), 'WEBSOCKET_CONNECTION_ATTEMPTS': (int, 'Advanced', 5), 'WEBSOCKET_CONNECTION_TIMEOUT': (int, 'Advanced', 5), @@ -298,7 +299,8 @@ SETTINGS = [ 'REFRESH_USERS_INTERVAL', 'SHOW_ADVANCED_SETTINGS', 'TIME_FORMAT', - 'TV_WATCHED_PERCENT' + 'TV_WATCHED_PERCENT', + 'WATCHED_MARKER' ] CHECKED_SETTINGS = [ From b2b12044e3342ccc77dbbaa9c90771e8e13a38dd Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Mon, 20 Feb 2023 17:14:35 -0800 Subject: [PATCH 026/113] Trigger on_watched based on credits markers --- plexpy/activity_handler.py | 92 +++++++++++++++++++++++++------------- plexpy/helpers.py | 40 +++++++++++++++++ 2 files changed, 101 insertions(+), 31 deletions(-) diff --git a/plexpy/activity_handler.py b/plexpy/activity_handler.py index 8933c1ac..851372d5 100644 --- a/plexpy/activity_handler.py +++ b/plexpy/activity_handler.py @@ -110,11 +110,13 @@ class ActivityHandler(object): self.set_session_state() self.get_db_session() - def set_session_state(self): - self.ap.set_session_state(session_key=self.session_key, - state=self.state, - view_offset=self.view_offset, - stopped=helpers.timestamp()) + def set_session_state(self, view_offset=None): + self.ap.set_session_state( + session_key=self.session_key, + state=self.state, + view_offset=view_offset or self.view_offset, + stopped=helpers.timestamp() + ) def put_notification(self, notify_action, **kwargs): notification = {'stream_data': self.db_session.copy(), 'notify_action': notify_action} @@ -246,26 +248,34 @@ class ActivityHandler(object): self.put_notification('on_change') def on_intro(self, marker): - if self.get_live_session(): - logger.debug("Tautulli ActivityHandler :: Session %s reached intro marker." % str(self.session_key)) + logger.debug("Tautulli ActivityHandler :: Session %s reached intro marker." % str(self.session_key)) - self.put_notification('on_intro', marker=marker) + self.set_session_state(view_offset=marker['start_time_offset']) + + self.put_notification('on_intro', marker=marker) def on_commercial(self, marker): - if self.get_live_session(): - logger.debug("Tautulli ActivityHandler :: Session %s reached commercial marker." % str(self.session_key)) + logger.debug("Tautulli ActivityHandler :: Session %s reached commercial marker." % str(self.session_key)) - self.put_notification('on_commercial', marker=marker) + self.set_session_state(view_offset=marker['start_time_offset']) + + self.put_notification('on_commercial', marker=marker) def on_credits(self, marker): - if self.get_live_session(): - logger.debug("Tautulli ActivityHandler :: Session %s reached credits marker." % str(self.session_key)) + logger.debug("Tautulli ActivityHandler :: Session %s reached credits marker." % str(self.session_key)) - self.put_notification('on_credits', marker=marker) + self.set_session_state(view_offset=marker['start_time_offset']) - def on_watched(self): + self.put_notification('on_credits', marker=marker) + + def on_watched(self, marker=None): logger.debug("Tautulli ActivityHandler :: Session %s watched." % str(self.session_key)) + if marker: + self.set_session_state(view_offset=marker['start_time_offset']) + else: + self.update_db_session() + watched_notifiers = notification_handler.get_notify_state_enabled( session=self.db_session, notify_action='on_watched', notified=False) @@ -368,38 +378,58 @@ class ActivityHandler(object): if self.db_session['marker'] != marker_idx: self.ap.set_marker(session_key=self.session_key, marker_idx=marker_idx, marker_type=marker['type']) - callback_func = getattr(self, 'on_{}'.format(marker['type'])) if self.view_offset < marker['start_time_offset']: # Schedule a callback for the exact offset of the marker schedule_callback( 'session_key-{}-marker-{}'.format(self.session_key, marker_idx), - func=callback_func, + func=self._marker_callback, args=[marker], milliseconds=marker['start_time_offset'] - self.view_offset ) else: - callback_func(marker) + self._marker_callback(marker) break if not marker_flag: self.ap.set_marker(session_key=self.session_key, marker_idx=0) - def check_watched(self): - # Monitor if the stream has reached the watch percentage for notifications - if not self.db_session['watched'] and self.timeline['state'] != 'buffering': - progress_percent = helpers.get_percent(self.timeline['viewOffset'], self.db_session['duration']) - watched_percent = { - 'movie': plexpy.CONFIG.MOVIE_WATCHED_PERCENT, - 'episode': plexpy.CONFIG.TV_WATCHED_PERCENT, - 'track': plexpy.CONFIG.MUSIC_WATCHED_PERCENT, - 'clip': plexpy.CONFIG.TV_WATCHED_PERCENT - } + def _marker_callback(self, marker): + if self.get_live_session(): + # Reset ActivityProcessor object for new database thread + self.ap = activity_processor.ActivityProcessor() - if progress_percent >= watched_percent.get(self.db_session['media_type'], 101): - self.ap.set_watched(session_key=self.session_key) - self.on_watched() + if marker['type'] == 'intro': + self.on_intro(marker) + elif marker['type'] == 'commercial': + self.on_commercial(marker) + elif marker['type'] == 'credits': + self.on_credits(marker) + + if not self.db_session['watched']: + if marker['final'] and plexpy.CONFIG.WATCHED_MARKER == 1: + self._marker_watched(marker) + elif marker['first'] and (plexpy.CONFIG.WATCHED_MARKER in (2, 3)): + self._marker_watched(marker) + + def _marker_watched(self, marker): + if not self.db_session['watched']: + self._watched_callback(marker) + + def check_watched(self): + if plexpy.CONFIG.WATCHED_MARKER == 1 or plexpy.CONFIG.WATCHED_MARKER == 2: + return + + # Monitor if the stream has reached the watch percentage for notifications + if not self.db_session['watched'] and self.state != 'buffering' and helpers.check_watched( + self.db_session['media_type'], self.view_offset, self.db_session['duration'] + ): + self._watched_callback() + + def _watched_callback(self, marker=None): + self.ap.set_watched(session_key=self.session_key) + self.on_watched(marker) class TimelineHandler(object): diff --git a/plexpy/helpers.py b/plexpy/helpers.py index b0995849..89b047fd 100644 --- a/plexpy/helpers.py +++ b/plexpy/helpers.py @@ -1733,3 +1733,43 @@ def short_season(title): if title.startswith('Season ') and title[7:].isdigit(): return 'S%s' % title[7:] return title + + +def get_first_final_marker(markers): + first = None + final = None + for marker in markers: + if marker['first']: + first = marker + if marker['final']: + final = marker + return first, final + + +def check_watched(media_type, view_offset, duration, marker_credits_first=None, marker_credits_final=None): + if isinstance(marker_credits_first, dict): + marker_credits_first = marker_credits_first['start_time_offset'] + if isinstance(marker_credits_final, dict): + marker_credits_final = marker_credits_final['start_time_offset'] + + view_offset = cast_to_int(view_offset) + duration = cast_to_int(duration) + + watched_percent = { + 'movie': plexpy.CONFIG.MOVIE_WATCHED_PERCENT, + 'episode': plexpy.CONFIG.TV_WATCHED_PERCENT, + 'track': plexpy.CONFIG.MUSIC_WATCHED_PERCENT, + 'clip': plexpy.CONFIG.TV_WATCHED_PERCENT + } + threshold = watched_percent.get(media_type, 0) / 100 * duration + if not threshold: + return False + + if plexpy.CONFIG.WATCHED_MARKER == 1 and marker_credits_final: + return view_offset >= marker_credits_final + elif plexpy.CONFIG.WATCHED_MARKER == 2 and marker_credits_first: + return view_offset >= marker_credits_first + elif plexpy.CONFIG.WATCHED_MARKER == 3 and marker_credits_first: + return view_offset >= min(threshold, marker_credits_first) + else: + return view_offset >= threshold From c5005c1ea9ec1575abd36fa51dfc9cb1d4dd8159 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Mon, 20 Feb 2023 16:36:31 -0800 Subject: [PATCH 027/113] Group watched history sessions based on credits markers --- plexpy/activity_processor.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/plexpy/activity_processor.py b/plexpy/activity_processor.py index d55c6738..71b6e3e0 100644 --- a/plexpy/activity_processor.py +++ b/plexpy/activity_processor.py @@ -327,7 +327,7 @@ class ActivityProcessor(object): # Get the last insert row id last_id = self.db.last_insert_id() new_session = prev_session = None - prev_progress_percent = media_watched_percent = 0 + watched = False if session['live']: # Check if we should group the session, select the last guid from the user @@ -369,12 +369,11 @@ class ActivityProcessor(object): 'view_offset': result[1]['view_offset'], 'reference_id': result[1]['reference_id']} - watched_percent = {'movie': plexpy.CONFIG.MOVIE_WATCHED_PERCENT, - 'episode': plexpy.CONFIG.TV_WATCHED_PERCENT, - 'track': plexpy.CONFIG.MUSIC_WATCHED_PERCENT - } - prev_progress_percent = helpers.get_percent(prev_session['view_offset'], session['duration']) - media_watched_percent = watched_percent.get(session['media_type'], 0) + marker_first, marker_final = helpers.get_first_final_marker(metadata['markers']) + watched = helpers.check_watched( + session['media_type'], session['view_offset'], session['duration'], + marker_first, marker_final + ) query = 'UPDATE session_history SET reference_id = ? WHERE id = ? ' @@ -384,8 +383,7 @@ class ActivityProcessor(object): # else set the reference_id to the new id if prev_session is None and new_session is None: args = [last_id, last_id] - elif prev_progress_percent < media_watched_percent and \ - prev_session['view_offset'] <= new_session['view_offset'] or \ + elif watched and prev_session['view_offset'] <= new_session['view_offset'] or \ session['live'] and prev_session['guid'] == new_session['guid']: args = [prev_session['reference_id'], new_session['id']] else: From 928e1d4b5edb2adec3049e38cccf3717463f65c2 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Mon, 20 Feb 2023 16:37:37 -0800 Subject: [PATCH 028/113] History table watched status based on credits markers --- plexpy/datafactory.py | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/plexpy/datafactory.py b/plexpy/datafactory.py index cf55a2c0..e0e9fdee 100644 --- a/plexpy/datafactory.py +++ b/plexpy/datafactory.py @@ -99,8 +99,9 @@ class DataFactory(object): 'MIN(started) AS started', 'MAX(stopped) AS stopped', 'SUM(CASE WHEN stopped > 0 THEN (stopped - started) ELSE 0 END) - \ - SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS duration', + SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS play_duration', 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS paused_counter', + 'session_history.view_offset', 'session_history.user_id', 'session_history.user', '(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \ @@ -139,6 +140,9 @@ class DataFactory(object): 'MAX((CASE WHEN (view_offset IS NULL OR view_offset = "") THEN 0.1 ELSE view_offset * 1.0 END) / \ (CASE WHEN (session_history_metadata.duration IS NULL OR session_history_metadata.duration = "") \ THEN 1.0 ELSE session_history_metadata.duration * 1.0 END) * 100) AS percent_complete', + 'session_history_metadata.duration', + 'session_history_metadata.marker_credits_first', + 'session_history_metadata.marker_credits_final', 'session_history_media_info.transcode_decision', 'COUNT(*) AS group_count', 'GROUP_CONCAT(session_history.id) AS group_ids', @@ -159,8 +163,9 @@ class DataFactory(object): 'started', 'stopped', 'SUM(CASE WHEN stopped > 0 THEN (stopped - started) ELSE (strftime("%s", "now") - started) END) - \ - SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS duration', + SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS play_duration', 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS paused_counter', + 'view_offset', 'user_id', 'user', '(CASE WHEN friendly_name IS NULL OR TRIM(friendly_name) = "" \ @@ -198,6 +203,9 @@ class DataFactory(object): 'MAX((CASE WHEN (view_offset IS NULL OR view_offset = "") THEN 0.1 ELSE view_offset * 1.0 END) / \ (CASE WHEN (duration IS NULL OR duration = "") \ THEN 1.0 ELSE duration * 1.0 END) * 100) AS percent_complete', + 'duration', + 'NULL AS marker_credits_first', + 'NULL AS marker_credits_final', 'transcode_decision', 'NULL AS group_count', 'NULL AS group_ids', @@ -262,7 +270,7 @@ class DataFactory(object): item['user_thumb'] = users_lookup.get(item['user_id']) - filter_duration += int(item['duration']) + filter_duration += int(item['play_duration']) if item['media_type'] == 'episode' and item['parent_thumb']: thumb = item['parent_thumb'] @@ -274,7 +282,10 @@ class DataFactory(object): if item['live']: item['percent_complete'] = 100 - if item['percent_complete'] >= watched_percent[item['media_type']]: + if helpers.check_watched( + item['media_type'], item['view_offset'], item['duration'], + item['marker_credits_first'], item['marker_credits_final'] + ): watched_status = 1 elif item['percent_complete'] >= watched_percent[item['media_type']] / 2.0: watched_status = 0.5 @@ -297,7 +308,7 @@ class DataFactory(object): 'date': item['date'], 'started': item['started'], 'stopped': item['stopped'], - 'duration': item['duration'], + 'duration': item['play_duration'], 'paused_counter': item['paused_counter'], 'user_id': item['user_id'], 'user': item['user'], From 2a1bf7847b32f01059d990f564ce89777ed7751e Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Mon, 20 Feb 2023 18:35:55 -0800 Subject: [PATCH 029/113] Last watched statistics card based on credits markers --- plexpy/datafactory.py | 60 ++++++++++++++++++++++++++++++++++--------- 1 file changed, 48 insertions(+), 12 deletions(-) diff --git a/plexpy/datafactory.py b/plexpy/datafactory.py index e0e9fdee..e51b8a46 100644 --- a/plexpy/datafactory.py +++ b/plexpy/datafactory.py @@ -382,10 +382,6 @@ class DataFactory(object): if user_id: where_id += 'AND session_history.user_id = %s ' % user_id - movie_watched_percent = plexpy.CONFIG.MOVIE_WATCHED_PERCENT - tv_watched_percent = plexpy.CONFIG.TV_WATCHED_PERCENT - music_watched_percent = plexpy.CONFIG.MUSIC_WATCHED_PERCENT - group_by = 'session_history.reference_id' if grouping else 'session_history.id' sort_type = 'total_duration' if stats_type == 'duration' else 'total_plays' @@ -919,6 +915,43 @@ class DataFactory(object): 'rows': session.mask_session_info(top_platform, mask_metadata=False)}) elif stat == 'last_watched': + + movie_watched_percent = plexpy.CONFIG.MOVIE_WATCHED_PERCENT + tv_watched_percent = plexpy.CONFIG.TV_WATCHED_PERCENT + + if plexpy.CONFIG.WATCHED_MARKER == 1: + watched_threshold = ( + '(CASE WHEN shm.marker_credits_final IS NULL ' + 'THEN sh._duration * (CASE WHEN sh.media_type = "movie" THEN %d ELSE %d END) / 100.0 ' + 'ELSE shm.marker_credits_final END) ' + 'AS watched_threshold' + ) % (movie_watched_percent, tv_watched_percent) + watched_where = '_view_offset >= watched_threshold' + elif plexpy.CONFIG.WATCHED_MARKER == 2: + watched_threshold = ( + '(CASE WHEN shm.marker_credits_first IS NULL ' + 'THEN sh._duration * (CASE WHEN sh.media_type = "movie" THEN %d ELSE %d END) / 100.0 ' + 'ELSE shm.marker_credits_first END) ' + 'AS watched_threshold' + ) % (movie_watched_percent, tv_watched_percent) + watched_where = '_view_offset >= watched_threshold' + elif plexpy.CONFIG.WATCHED_MARKER == 3: + watched_threshold = ( + 'MIN(' + '(CASE WHEN shm.marker_credits_first IS NULL ' + 'THEN sh._duration * (CASE WHEN sh.media_type = "movie" THEN %d ELSE %d END) / 100.0 ' + 'ELSE shm.marker_credits_first END), ' + 'sh._duration * (CASE WHEN sh.media_type = "movie" THEN %d ELSE %d END) / 100.0) ' + 'AS watched_threshold' + ) % (movie_watched_percent, tv_watched_percent, movie_watched_percent, tv_watched_percent) + watched_where = '_view_offset >= watched_threshold' + else: + watched_threshold = 'NULL AS watched_threshold' + watched_where = ( + 'sh.media_type == "movie" AND percent_complete >= %d ' + 'OR sh.media_type == "episode" AND percent_complete >= %d' + ) % (movie_watched_percent, tv_watched_percent) + last_watched = [] try: query = 'SELECT sh.id, shm.title, shm.grandparent_title, shm.full_title, shm.year, ' \ @@ -929,22 +962,25 @@ class DataFactory(object): '(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = ""' \ ' THEN u.username ELSE u.friendly_name END) ' \ ' AS friendly_name, ' \ - 'MAX(sh.started) AS last_watch, ' \ - '((CASE WHEN sh.view_offset IS NULL THEN 0.1 ELSE sh.view_offset * 1.0 END) / ' \ - ' (CASE WHEN shm.duration IS NULL THEN 1.0 ELSE shm.duration * 1.0 END) * 100) ' \ - ' AS percent_complete ' \ - 'FROM (SELECT *, MAX(id) FROM session_history ' \ + 'MAX(sh.started) AS last_watch, sh._view_offset, sh._duration, ' \ + '(sh._view_offset / sh._duration * 100) AS percent_complete, ' \ + '%s ' \ + 'FROM (SELECT *, MAX(session_history.id), ' \ + ' (CASE WHEN view_offset IS NULL THEN 0.1 ELSE view_offset * 1.0 END) AS _view_offset, ' \ + ' (CASE WHEN duration IS NULL THEN 1.0 ELSE duration * 1.0 END) AS _duration ' \ + ' FROM session_history ' \ + ' JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \ ' WHERE session_history.stopped >= %s ' \ ' AND (session_history.media_type = "movie" ' \ ' OR session_history.media_type = "episode") %s ' \ ' GROUP BY %s) AS sh ' \ 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ 'LEFT OUTER JOIN users AS u ON sh.user_id = u.user_id ' \ - 'WHERE sh.media_type == "movie" AND percent_complete >= %s ' \ - ' OR sh.media_type == "episode" AND percent_complete >= %s ' \ + 'WHERE %s ' \ 'GROUP BY sh.id ' \ 'ORDER BY last_watch DESC ' \ - 'LIMIT %s OFFSET %s' % (timestamp, where_id, group_by, movie_watched_percent, tv_watched_percent, + 'LIMIT %s OFFSET %s' % (watched_threshold, + timestamp, where_id, group_by, watched_where, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: From ebe570d42f9a2e70a7bdbb7db2cdcf8577fc70f5 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Tue, 21 Feb 2023 11:12:56 -0800 Subject: [PATCH 030/113] Allow setting a custom Pushover sound * Closes #2005 --- data/interfaces/default/css/tautulli.css | 2 - .../interfaces/default/newsletter_config.html | 4 ++ data/interfaces/default/notifier_config.html | 10 +++ plexpy/newsletters.py | 3 +- plexpy/notifiers.py | 69 ++++++++++--------- 5 files changed, 54 insertions(+), 34 deletions(-) diff --git a/data/interfaces/default/css/tautulli.css b/data/interfaces/default/css/tautulli.css index ac99ae76..5f1d90a0 100644 --- a/data/interfaces/default/css/tautulli.css +++ b/data/interfaces/default/css/tautulli.css @@ -79,7 +79,6 @@ select.form-control { color: #eee !important; border: 0px solid #444 !important; background: #555 !important; - padding: 1px 2px; transition: background-color .3s; } .selectize-control.form-control .selectize-input { @@ -87,7 +86,6 @@ select.form-control { align-items: center; flex-wrap: wrap; margin-bottom: 4px; - padding-left: 5px; } .selectize-control.form-control.selectize-pms-ip .selectize-input { padding-left: 12px !important; diff --git a/data/interfaces/default/newsletter_config.html b/data/interfaces/default/newsletter_config.html index dc6de294..10583707 100644 --- a/data/interfaces/default/newsletter_config.html +++ b/data/interfaces/default/newsletter_config.html @@ -142,8 +142,10 @@

+ % if item['select_all']: + % endif % if isinstance(item['select_options'], dict): % for section, options in item['select_options'].items(): @@ -145,7 +147,9 @@ % endfor % else: + % if item['select_all']: + % endif % for option in sorted(item['select_options'], key=lambda x: x['text'].lower()): % endfor @@ -718,6 +722,12 @@ $('#pushover_priority').change( function () { pushoverPriority(); }); + + var $pushover_sound = $('#pushover_sound').selectize({ + create: true + }); + var pushover_sound = $pushover_sound[0].selectize; + pushover_sound.setValue(${json.dumps(next((c['value'] for c in notifier['config_options'] if c['name'] == 'pushover_sound'), [])) | n}); % elif notifier['agent_name'] == 'plexmobileapp': var $plexmobileapp_user_ids = $('#plexmobileapp_user_ids').selectize({ diff --git a/plexpy/newsletters.py b/plexpy/newsletters.py index 902eb69b..fdc1c5a8 100644 --- a/plexpy/newsletters.py +++ b/plexpy/newsletters.py @@ -971,7 +971,8 @@ class RecentlyAdded(Newsletter): 'description': 'Select the libraries to include in the newsletter.', 'name': 'newsletter_config_incl_libraries', 'input_type': 'selectize', - 'select_options': self._get_sections_options() + 'select_options': self._get_sections_options(), + 'select_all': True } ] diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py index f0f02bf0..1a747be9 100644 --- a/plexpy/notifiers.py +++ b/plexpy/notifiers.py @@ -1435,21 +1435,24 @@ class EMAIL(Notifier): 'name': 'email_to', 'description': 'The email address(es) of the recipients.', 'input_type': 'selectize', - 'select_options': user_emails_to + 'select_options': user_emails_to, + 'select_all': True }, {'label': 'CC', 'value': self.config['cc'], 'name': 'email_cc', 'description': 'The email address(es) to CC.', 'input_type': 'selectize', - 'select_options': user_emails_cc + 'select_options': user_emails_cc, + 'select_all': True }, {'label': 'BCC', 'value': self.config['bcc'], 'name': 'email_bcc', 'description': 'The email address(es) to BCC.', 'input_type': 'selectize', - 'select_options': user_emails_bcc + 'select_options': user_emails_bcc, + 'select_all': True }, {'label': 'SMTP Server', 'value': self.config['smtp_server'], @@ -3216,31 +3219,34 @@ class PUSHOVER(Notifier): return self.make_request('https://api.pushover.net/1/messages.json', headers=headers, data=data, files=files) def get_sounds(self): - sounds = { - '': '', - 'alien': 'Alien Alarm (long)', - 'bike': 'Bike', - 'bugle': 'Bugle', - 'cashregister': 'Cash Register', - 'classical': 'Classical', - 'climb': 'Climb (long)', - 'cosmic': 'Cosmic', - 'echo': 'Pushover Echo (long)', - 'falling': 'Falling', - 'gamelan': 'Gamelan', - 'incoming': 'Incoming', - 'intermission': 'Intermission', - 'magic': 'Magic', - 'mechanical': 'Mechanical', - 'none': 'None (silent)', - 'persistent': 'Persistent (long)', - 'pianobar': 'Piano Bar', - 'pushover': 'Pushover (default)', - 'siren': 'Siren', - 'spacealarm': 'Space Alarm', - 'tugboat': 'Tug Boat', - 'updown': 'Up Down (long)' - } + sounds = [ + {'value': '', 'text': ''}, + {'value': 'alien', 'text': 'Alien Alarm (long)'}, + {'value': 'bike', 'text': 'Bike'}, + {'value': 'bugle', 'text': 'Bugle'}, + {'value': 'cashregister', 'text': 'Cash Register'}, + {'value': 'classical', 'text': 'Classical'}, + {'value': 'climb', 'text': 'Climb (long)'}, + {'value': 'cosmic', 'text': 'Cosmic'}, + {'value': 'echo', 'text': 'Pushover Echo (long)'}, + {'value': 'falling', 'text': 'Falling'}, + {'value': 'gamelan', 'text': 'Gamelan'}, + {'value': 'incoming', 'text': 'Incoming'}, + {'value': 'intermission', 'text': 'Intermission'}, + {'value': 'magic', 'text': 'Magic'}, + {'value': 'mechanical', 'text': 'Mechanical'}, + {'value': 'none', 'text': 'None (silent)'}, + {'value': 'persistent', 'text': 'Persistent (long)'}, + {'value': 'pianobar', 'text': 'Piano Bar'}, + {'value': 'pushover', 'text': 'Pushover (default)'}, + {'value': 'siren', 'text': 'Siren'}, + {'value': 'spacealarm', 'text': 'Space Alarm'}, + {'value': 'tugboat', 'text': 'Tug Boat'}, + {'value': 'updown', 'text': 'Up Down (long)'}, + {'value': 'vibrate', 'text': 'Vibrate Only'}, + ] + if self.config['sound'] not in [s['value'] for s in sounds]: + sounds.append({'value': self.config['sound'], 'text': self.config['sound']}) return sounds @@ -3281,9 +3287,10 @@ class PUSHOVER(Notifier): {'label': 'Sound', 'value': self.config['sound'], 'name': 'pushover_sound', - 'description': 'Set the notification sound. Leave blank for the default sound.', - 'input_type': 'select', - 'select_options': self.get_sounds() + 'description': 'Select a notification sound or enter a custom sound name. Leave blank for the default sound.', + 'input_type': 'selectize', + 'select_options': self.get_sounds(), + 'select_all': False }, {'label': 'Priority', 'value': self.config['priority'], From e6d1712afdec09630c1a141afc47a826175d51b9 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sat, 25 Feb 2023 16:35:48 -0800 Subject: [PATCH 031/113] Fix styling on collection info pages --- data/interfaces/default/info.html | 12 ++--- .../default/info_children_list.html | 48 ++++++++++++++++++- .../default/js/tables/collections_table.js | 7 ++- plexpy/pmsconnect.py | 2 +- 4 files changed, 60 insertions(+), 9 deletions(-) diff --git a/data/interfaces/default/info.html b/data/interfaces/default/info.html index a7acb11b..05bccf52 100644 --- a/data/interfaces/default/info.html +++ b/data/interfaces/default/info.html @@ -213,7 +213,7 @@ DOCUMENTATION :: END % if _session['user_group'] == 'admin': % endif - % elif data['media_type'] in ('artist', 'album', 'track', 'playlist', 'photo_album', 'photo', 'clip'): + % elif data['media_type'] in ('artist', 'album', 'track', 'playlist', 'photo_album', 'photo', 'clip') or data['sub_media_type'] in ('artist', 'album', 'track'):
@@ -283,14 +283,14 @@ DOCUMENTATION :: END padding_height = '' if data['media_type'] == 'movie' or data['live']: padding_height = 'height: 305px;' - elif data['media_type'] in ('show', 'season', 'collection'): - padding_height = 'height: 270px;' - elif data['media_type'] == 'episode': - padding_height = 'height: 70px;' - elif data['media_type'] in ('artist', 'album', 'playlist', 'photo_album', 'photo'): + elif data['media_type'] in ('artist', 'album', 'playlist', 'photo_album', 'photo') or data['sub_media_type'] in ('artist', 'album', 'track'): padding_height = 'height: 150px;' elif data['media_type'] in ('track', 'clip'): padding_height = 'height: 180px;' + elif data['media_type'] == 'episode': + padding_height = 'height: 70px;' + elif data['media_type'] in ('show', 'season', 'collection'): + padding_height = 'height: 270px;' %> +
+

+ ${child['title']} +

+ % if media_type == 'collection': +

+ ${child['parent_title']} +

+ % endif +
% elif child['media_type'] == 'episode': + % elif child['media_type'] == 'artist': + +
+
+ % if _session['user_group'] == 'admin': + + % endif +
+
+ % elif child['media_type'] == 'album': @@ -193,6 +226,11 @@ DOCUMENTATION :: END

${child['title']}

+ % if media_type == 'collection': +

+ ${child['parent_title']} +

+ % endif
% elif child['media_type'] == 'track': <% e = 'even' if loop.index % 2 == 0 else 'odd' %> @@ -205,7 +243,15 @@ DOCUMENTATION :: END ${child['title']} - % if child['original_title']: + % if media_type == 'collection': + - + + + ${child['grandparent_title']} + + + (${child['parent_title']}) + % elif child['original_title']: - ${child['original_title']} % endif diff --git a/data/interfaces/default/js/tables/collections_table.js b/data/interfaces/default/js/tables/collections_table.js index b768c2d3..91eb68ab 100644 --- a/data/interfaces/default/js/tables/collections_table.js +++ b/data/interfaces/default/js/tables/collections_table.js @@ -32,7 +32,12 @@ collections_table_options = { if (rowData['smart']) { smart = ' ' } - var thumb_popover = '' + rowData['title'] + ''; + console.log(rowData['subtype']) + if (rowData['subtype'] === 'artist' || rowData['subtype'] === 'album' || rowData['subtype'] === 'track') { + var thumb_popover = '' + rowData['title'] + ''; + } else { + var thumb_popover = '' + rowData['title'] + ''; + } $(td).html(smart + '' + thumb_popover + ''); } }, diff --git a/plexpy/pmsconnect.py b/plexpy/pmsconnect.py index 347de513..1d161a04 100644 --- a/plexpy/pmsconnect.py +++ b/plexpy/pmsconnect.py @@ -2545,7 +2545,7 @@ class PmsConnect(object): children_list.append(children_output) output = {'children_count': helpers.cast_to_int(helpers.get_xml_attr(xml_head[0], 'size')), - 'children_type': helpers.get_xml_attr(xml_head[0], 'viewGroup'), + 'children_type': helpers.get_xml_attr(xml_head[0], 'viewGroup') or (children_list[0]['media_type'] if children_list else ''), 'title': helpers.get_xml_attr(xml_head[0], 'title2'), 'children_list': children_list } From ecb6d8b74329ea07ca651a4b83efc9a024c3cd0a Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sat, 25 Feb 2023 16:43:14 -0800 Subject: [PATCH 032/113] Move track artist to details tag on info page --- data/interfaces/default/info.html | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/data/interfaces/default/info.html b/data/interfaces/default/info.html index 05bccf52..dbce734b 100644 --- a/data/interfaces/default/info.html +++ b/data/interfaces/default/info.html @@ -267,7 +267,7 @@ DOCUMENTATION :: END

${data['parent_title']}

${data['title']}

% elif data['media_type'] == 'track': -

${data['original_title'] or data['grandparent_title']}

+

${data['grandparent_title']}

${data['parent_title']} - ${data['title']}

% elif data['media_type'] in ('photo', 'clip'): @@ -369,6 +369,11 @@ DOCUMENTATION :: END Studio ${data['studio']} % endif
+
+ % if data['media_type'] == 'track' and data['original_title']: + Track Artists ${data['original_title']} + % endif +
% if data['media_type'] == 'movie': Year ${data['year']} @@ -812,7 +817,7 @@ DOCUMENTATION :: END % elif data['media_type'] == 'album': ${data['parent_title']}
${data['title']} % elif data['media_type'] == 'track': - ${data['original_title'] or data['grandparent_title']}
${data['title']}
${data['parent_title']} + ${data['grandparent_title']}
${data['title']}
${data['parent_title']} % endif

From 42eeb90532289f30a19ee4bdb1c79ffc0e4b2cf4 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sun, 26 Feb 2023 15:09:12 -0800 Subject: [PATCH 033/113] Add ga4mp library * Remove UniversalAnalytics --- lib/UniversalAnalytics/HTTPLog.py | 121 -------- lib/UniversalAnalytics/Tracker.py | 424 ----------------------------- lib/UniversalAnalytics/__init__.py | 1 - lib/ga4mp/__init__.py | 3 + lib/ga4mp/event.py | 44 +++ lib/ga4mp/ga4mp.py | 416 ++++++++++++++++++++++++++++ lib/ga4mp/item.py | 11 + lib/ga4mp/store.py | 116 ++++++++ lib/ga4mp/utils.py | 392 ++++++++++++++++++++++++++ requirements.txt | 1 + 10 files changed, 983 insertions(+), 546 deletions(-) delete mode 100644 lib/UniversalAnalytics/HTTPLog.py delete mode 100644 lib/UniversalAnalytics/Tracker.py delete mode 100644 lib/UniversalAnalytics/__init__.py create mode 100644 lib/ga4mp/__init__.py create mode 100644 lib/ga4mp/event.py create mode 100644 lib/ga4mp/ga4mp.py create mode 100644 lib/ga4mp/item.py create mode 100644 lib/ga4mp/store.py create mode 100644 lib/ga4mp/utils.py diff --git a/lib/UniversalAnalytics/HTTPLog.py b/lib/UniversalAnalytics/HTTPLog.py deleted file mode 100644 index a8fc906d..00000000 --- a/lib/UniversalAnalytics/HTTPLog.py +++ /dev/null @@ -1,121 +0,0 @@ -#!/usr/bin/python -############################################################################### -# Formatting filter for urllib2's HTTPHandler(debuglevel=1) output -# Copyright (c) 2013, Analytics Pros -# -# This project is free software, distributed under the BSD license. -# Analytics Pros offers consulting and integration services if your firm needs -# assistance in strategy, implementation, or auditing existing work. -############################################################################### - - -import sys, re, os -from io import StringIO - - - -class BufferTranslator(object): - """ Provides a buffer-compatible interface for filtering buffer content. - """ - parsers = [] - - def __init__(self, output): - self.output = output - self.encoding = getattr(output, 'encoding', None) - - def write(self, content): - content = self.translate(content) - self.output.write(content) - - - @staticmethod - def stripslashes(content): - return content.decode('string_escape') - - @staticmethod - def addslashes(content): - return content.encode('string_escape') - - def translate(self, line): - for pattern, method in self.parsers: - match = pattern.match(line) - if match: - return method(match) - - return line - - - -class LineBufferTranslator(BufferTranslator): - """ Line buffer implementation supports translation of line-format input - even when input is not already line-buffered. Caches input until newlines - occur, and then dispatches translated input to output buffer. - """ - def __init__(self, *a, **kw): - self._linepending = [] - super(LineBufferTranslator, self).__init__(*a, **kw) - - def write(self, _input): - lines = _input.splitlines(True) - for i in range(0, len(lines)): - last = i - if lines[i].endswith('\n'): - prefix = len(self._linepending) and ''.join(self._linepending) or '' - self.output.write(self.translate(prefix + lines[i])) - del self._linepending[0:] - last = -1 - - if last >= 0: - self._linepending.append(lines[ last ]) - - - def __del__(self): - if len(self._linepending): - self.output.write(self.translate(''.join(self._linepending))) - - -class HTTPTranslator(LineBufferTranslator): - """ Translates output from |urllib2| HTTPHandler(debuglevel = 1) into - HTTP-compatible, readible text structures for human analysis. - """ - - RE_LINE_PARSER = re.compile(r'^(?:([a-z]+):)\s*(\'?)([^\r\n]*)\2(?:[\r\n]*)$') - RE_LINE_BREAK = re.compile(r'(\r?\n|(?:\\r)?\\n)') - RE_HTTP_METHOD = re.compile(r'^(POST|GET|HEAD|DELETE|PUT|TRACE|OPTIONS)') - RE_PARAMETER_SPACER = re.compile(r'&([a-z0-9]+)=') - - @classmethod - def spacer(cls, line): - return cls.RE_PARAMETER_SPACER.sub(r' &\1= ', line) - - def translate(self, line): - - parsed = self.RE_LINE_PARSER.match(line) - - if parsed: - value = parsed.group(3) - stage = parsed.group(1) - - if stage == 'send': # query string is rendered here - return '\n# HTTP Request:\n' + self.stripslashes(value) - elif stage == 'reply': - return '\n\n# HTTP Response:\n' + self.stripslashes(value) - elif stage == 'header': - return value + '\n' - else: - return value - - - return line - - -def consume(outbuffer = None): # Capture standard output - sys.stdout = HTTPTranslator(outbuffer or sys.stdout) - return sys.stdout - - -if __name__ == '__main__': - consume(sys.stdout).write(sys.stdin.read()) - print('\n') - -# vim: set nowrap tabstop=4 shiftwidth=4 softtabstop=0 expandtab textwidth=0 filetype=python foldmethod=indent foldcolumn=4 diff --git a/lib/UniversalAnalytics/Tracker.py b/lib/UniversalAnalytics/Tracker.py deleted file mode 100644 index b7d9476e..00000000 --- a/lib/UniversalAnalytics/Tracker.py +++ /dev/null @@ -1,424 +0,0 @@ -from future.moves.urllib.request import urlopen, build_opener, install_opener -from future.moves.urllib.request import Request, HTTPSHandler -from future.moves.urllib.error import URLError, HTTPError -from future.moves.urllib.parse import urlencode - -import random -import datetime -import time -import uuid -import hashlib -import socket - - -def generate_uuid(basedata=None): - """ Provides a _random_ UUID with no input, or a UUID4-format MD5 checksum of any input data provided """ - if basedata is None: - return str(uuid.uuid4()) - elif isinstance(basedata, str): - checksum = hashlib.md5(str(basedata).encode('utf-8')).hexdigest() - return '%8s-%4s-%4s-%4s-%12s' % ( - checksum[0:8], checksum[8:12], checksum[12:16], checksum[16:20], checksum[20:32]) - - -class Time(datetime.datetime): - """ Wrappers and convenience methods for processing various time representations """ - - @classmethod - def from_unix(cls, seconds, milliseconds=0): - """ Produce a full |datetime.datetime| object from a Unix timestamp """ - base = list(time.gmtime(seconds))[0:6] - base.append(milliseconds * 1000) # microseconds - return cls(*base) - - @classmethod - def to_unix(cls, timestamp): - """ Wrapper over time module to produce Unix epoch time as a float """ - if not isinstance(timestamp, datetime.datetime): - raise TypeError('Time.milliseconds expects a datetime object') - base = time.mktime(timestamp.timetuple()) - return base - - @classmethod - def milliseconds_offset(cls, timestamp, now=None): - """ Offset time (in milliseconds) from a |datetime.datetime| object to now """ - if isinstance(timestamp, (int, float)): - base = timestamp - else: - base = cls.to_unix(timestamp) - base = base + (timestamp.microsecond / 1000000) - if now is None: - now = time.time() - return (now - base) * 1000 - - -class HTTPRequest(object): - """ URL Construction and request handling abstraction. - This is not intended to be used outside this module. - - Automates mapping of persistent state (i.e. query parameters) - onto transcient datasets for each query. - """ - - endpoint = 'https://www.google-analytics.com/collect' - - @staticmethod - def debug(): - """ Activate debugging on urllib2 """ - handler = HTTPSHandler(debuglevel=1) - opener = build_opener(handler) - install_opener(opener) - - # Store properties for all requests - def __init__(self, user_agent=None, *args, **opts): - self.user_agent = user_agent or 'Analytics Pros - Universal Analytics (Python)' - - @classmethod - def fixUTF8(cls, data): # Ensure proper encoding for UA's servers... - """ Convert all strings to UTF-8 """ - for key in data: - if isinstance(data[key], str): - data[key] = data[key].encode('utf-8') - return data - - # Apply stored properties to the given dataset & POST to the configured endpoint - def send(self, data): - request = Request( - self.endpoint + '?' + urlencode(self.fixUTF8(data)).encode('utf-8'), - headers={ - 'User-Agent': self.user_agent - } - ) - self.open(request) - - def open(self, request): - try: - return urlopen(request) - except HTTPError as e: - return False - except URLError as e: - self.cache_request(request) - return False - - def cache_request(self, request): - # TODO: implement a proper caching mechanism here for re-transmitting hits - # record = (Time.now(), request.get_full_url(), request.get_data(), request.headers) - pass - - -class HTTPPost(HTTPRequest): - - # Apply stored properties to the given dataset & POST to the configured endpoint - def send(self, data): - request = Request( - self.endpoint, - data=urlencode(self.fixUTF8(data)).encode('utf-8'), - headers={ - 'User-Agent': self.user_agent - } - ) - self.open(request) - - -class Tracker(object): - """ Primary tracking interface for Universal Analytics """ - params = None - parameter_alias = {} - valid_hittypes = ('pageview', 'event', 'social', 'screenview', 'transaction', 'item', 'exception', 'timing') - - @classmethod - def alias(cls, typemap, base, *names): - """ Declare an alternate (humane) name for a measurement protocol parameter """ - cls.parameter_alias[base] = (typemap, base) - for i in names: - cls.parameter_alias[i] = (typemap, base) - - @classmethod - def coerceParameter(cls, name, value=None): - if isinstance(name, str) and name[0] == '&': - return name[1:], str(value) - elif name in cls.parameter_alias: - typecast, param_name = cls.parameter_alias.get(name) - return param_name, typecast(value) - else: - raise KeyError('Parameter "{0}" is not recognized'.format(name)) - - def payload(self, data): - for key, value in data.items(): - try: - yield self.coerceParameter(key, value) - except KeyError: - continue - - option_sequence = { - 'pageview': [(str, 'dp')], - 'event': [(str, 'ec'), (str, 'ea'), (str, 'el'), (int, 'ev')], - 'social': [(str, 'sn'), (str, 'sa'), (str, 'st')], - 'timing': [(str, 'utc'), (str, 'utv'), (str, 'utt'), (str, 'utl')] - } - - @classmethod - def consume_options(cls, data, hittype, args): - """ Interpret sequential arguments related to known hittypes based on declared structures """ - opt_position = 0 - data['t'] = hittype # integrate hit type parameter - if hittype in cls.option_sequence: - for expected_type, optname in cls.option_sequence[hittype]: - if opt_position < len(args) and isinstance(args[opt_position], expected_type): - data[optname] = args[opt_position] - opt_position += 1 - - @classmethod - def hittime(cls, timestamp=None, age=None, milliseconds=None): - """ Returns an integer represeting the milliseconds offset for a given hit (relative to now) """ - if isinstance(timestamp, (int, float)): - return int(Time.milliseconds_offset(Time.from_unix(timestamp, milliseconds=milliseconds))) - if isinstance(timestamp, datetime.datetime): - return int(Time.milliseconds_offset(timestamp)) - if isinstance(age, (int, float)): - return int(age * 1000) + (milliseconds or 0) - - @property - def account(self): - return self.params.get('tid', None) - - def __init__(self, account, name=None, client_id=None, hash_client_id=False, user_id=None, user_agent=None, - use_post=True): - - if use_post is False: - self.http = HTTPRequest(user_agent=user_agent) - else: - self.http = HTTPPost(user_agent=user_agent) - - self.params = {'v': 1, 'tid': account} - - if client_id is None: - client_id = generate_uuid() - - self.params['cid'] = client_id - - self.hash_client_id = hash_client_id - - if user_id is not None: - self.params['uid'] = user_id - - def set_timestamp(self, data): - """ Interpret time-related options, apply queue-time parameter as needed """ - if 'hittime' in data: # an absolute timestamp - data['qt'] = self.hittime(timestamp=data.pop('hittime', None)) - if 'hitage' in data: # a relative age (in seconds) - data['qt'] = self.hittime(age=data.pop('hitage', None)) - - def send(self, hittype, *args, **data): - """ Transmit HTTP requests to Google Analytics using the measurement protocol """ - - if hittype not in self.valid_hittypes: - raise KeyError('Unsupported Universal Analytics Hit Type: {0}'.format(repr(hittype))) - - self.set_timestamp(data) - self.consume_options(data, hittype, args) - - for item in args: # process dictionary-object arguments of transcient data - if isinstance(item, dict): - for key, val in self.payload(item): - data[key] = val - - for k, v in self.params.items(): # update only absent parameters - if k not in data: - data[k] = v - - data = dict(self.payload(data)) - - if self.hash_client_id: - data['cid'] = generate_uuid(data['cid']) - - # Transmit the hit to Google... - self.http.send(data) - - # Setting persistent attibutes of the session/hit/etc (inc. custom dimensions/metrics) - def set(self, name, value=None): - if isinstance(name, dict): - for key, value in name.items(): - try: - param, value = self.coerceParameter(key, value) - self.params[param] = value - except KeyError: - pass - elif isinstance(name, str): - try: - param, value = self.coerceParameter(name, value) - self.params[param] = value - except KeyError: - pass - - def __getitem__(self, name): - param, value = self.coerceParameter(name, None) - return self.params.get(param, None) - - def __setitem__(self, name, value): - param, value = self.coerceParameter(name, value) - self.params[param] = value - - def __delitem__(self, name): - param, value = self.coerceParameter(name, None) - if param in self.params: - del self.params[param] - - -def safe_unicode(obj): - """ Safe convertion to the Unicode string version of the object """ - try: - return str(obj) - except UnicodeDecodeError: - return obj.decode('utf-8') - - -# Declaring name mappings for Measurement Protocol parameters -MAX_CUSTOM_DEFINITIONS = 200 -MAX_EC_LISTS = 11 # 1-based index -MAX_EC_PRODUCTS = 11 # 1-based index -MAX_EC_PROMOTIONS = 11 # 1-based index - -Tracker.alias(int, 'v', 'protocol-version') -Tracker.alias(safe_unicode, 'cid', 'client-id', 'clientId', 'clientid') -Tracker.alias(safe_unicode, 'tid', 'trackingId', 'account') -Tracker.alias(safe_unicode, 'uid', 'user-id', 'userId', 'userid') -Tracker.alias(safe_unicode, 'uip', 'user-ip', 'userIp', 'ipaddr') -Tracker.alias(safe_unicode, 'ua', 'userAgent', 'userAgentOverride', 'user-agent') -Tracker.alias(safe_unicode, 'dp', 'page', 'path') -Tracker.alias(safe_unicode, 'dt', 'title', 'pagetitle', 'pageTitle' 'page-title') -Tracker.alias(safe_unicode, 'dl', 'location') -Tracker.alias(safe_unicode, 'dh', 'hostname') -Tracker.alias(safe_unicode, 'sc', 'sessioncontrol', 'session-control', 'sessionControl') -Tracker.alias(safe_unicode, 'dr', 'referrer', 'referer') -Tracker.alias(int, 'qt', 'queueTime', 'queue-time') -Tracker.alias(safe_unicode, 't', 'hitType', 'hittype') -Tracker.alias(int, 'aip', 'anonymizeIp', 'anonIp', 'anonymize-ip') -Tracker.alias(safe_unicode, 'ds', 'dataSource', 'data-source') - -# Campaign attribution -Tracker.alias(safe_unicode, 'cn', 'campaign', 'campaignName', 'campaign-name') -Tracker.alias(safe_unicode, 'cs', 'source', 'campaignSource', 'campaign-source') -Tracker.alias(safe_unicode, 'cm', 'medium', 'campaignMedium', 'campaign-medium') -Tracker.alias(safe_unicode, 'ck', 'keyword', 'campaignKeyword', 'campaign-keyword') -Tracker.alias(safe_unicode, 'cc', 'content', 'campaignContent', 'campaign-content') -Tracker.alias(safe_unicode, 'ci', 'campaignId', 'campaignID', 'campaign-id') - -# Technical specs -Tracker.alias(safe_unicode, 'sr', 'screenResolution', 'screen-resolution', 'resolution') -Tracker.alias(safe_unicode, 'vp', 'viewport', 'viewportSize', 'viewport-size') -Tracker.alias(safe_unicode, 'de', 'encoding', 'documentEncoding', 'document-encoding') -Tracker.alias(int, 'sd', 'colors', 'screenColors', 'screen-colors') -Tracker.alias(safe_unicode, 'ul', 'language', 'user-language', 'userLanguage') - -# Mobile app -Tracker.alias(safe_unicode, 'an', 'appName', 'app-name', 'app') -Tracker.alias(safe_unicode, 'cd', 'contentDescription', 'screenName', 'screen-name', 'content-description') -Tracker.alias(safe_unicode, 'av', 'appVersion', 'app-version', 'version') -Tracker.alias(safe_unicode, 'aid', 'appID', 'appId', 'application-id', 'app-id', 'applicationId') -Tracker.alias(safe_unicode, 'aiid', 'appInstallerId', 'app-installer-id') - -# Ecommerce -Tracker.alias(safe_unicode, 'ta', 'affiliation', 'transactionAffiliation', 'transaction-affiliation') -Tracker.alias(safe_unicode, 'ti', 'transaction', 'transactionId', 'transaction-id') -Tracker.alias(float, 'tr', 'revenue', 'transactionRevenue', 'transaction-revenue') -Tracker.alias(float, 'ts', 'shipping', 'transactionShipping', 'transaction-shipping') -Tracker.alias(float, 'tt', 'tax', 'transactionTax', 'transaction-tax') -Tracker.alias(safe_unicode, 'cu', 'currency', 'transactionCurrency', - 'transaction-currency') # Currency code, e.g. USD, EUR -Tracker.alias(safe_unicode, 'in', 'item-name', 'itemName') -Tracker.alias(float, 'ip', 'item-price', 'itemPrice') -Tracker.alias(float, 'iq', 'item-quantity', 'itemQuantity') -Tracker.alias(safe_unicode, 'ic', 'item-code', 'sku', 'itemCode') -Tracker.alias(safe_unicode, 'iv', 'item-variation', 'item-category', 'itemCategory', 'itemVariation') - -# Events -Tracker.alias(safe_unicode, 'ec', 'event-category', 'eventCategory', 'category') -Tracker.alias(safe_unicode, 'ea', 'event-action', 'eventAction', 'action') -Tracker.alias(safe_unicode, 'el', 'event-label', 'eventLabel', 'label') -Tracker.alias(int, 'ev', 'event-value', 'eventValue', 'value') -Tracker.alias(int, 'ni', 'noninteractive', 'nonInteractive', 'noninteraction', 'nonInteraction') - -# Social -Tracker.alias(safe_unicode, 'sa', 'social-action', 'socialAction') -Tracker.alias(safe_unicode, 'sn', 'social-network', 'socialNetwork') -Tracker.alias(safe_unicode, 'st', 'social-target', 'socialTarget') - -# Exceptions -Tracker.alias(safe_unicode, 'exd', 'exception-description', 'exceptionDescription', 'exDescription') -Tracker.alias(int, 'exf', 'exception-fatal', 'exceptionFatal', 'exFatal') - -# User Timing -Tracker.alias(safe_unicode, 'utc', 'timingCategory', 'timing-category') -Tracker.alias(safe_unicode, 'utv', 'timingVariable', 'timing-variable') -Tracker.alias(float, 'utt', 'time', 'timingTime', 'timing-time') -Tracker.alias(safe_unicode, 'utl', 'timingLabel', 'timing-label') -Tracker.alias(float, 'dns', 'timingDNS', 'timing-dns') -Tracker.alias(float, 'pdt', 'timingPageLoad', 'timing-page-load') -Tracker.alias(float, 'rrt', 'timingRedirect', 'timing-redirect') -Tracker.alias(safe_unicode, 'tcp', 'timingTCPConnect', 'timing-tcp-connect') -Tracker.alias(safe_unicode, 'srt', 'timingServerResponse', 'timing-server-response') - -# Custom dimensions and metrics -for i in range(0, 200): - Tracker.alias(safe_unicode, 'cd{0}'.format(i), 'dimension{0}'.format(i)) - Tracker.alias(int, 'cm{0}'.format(i), 'metric{0}'.format(i)) - -# Content groups -for i in range(0, 5): - Tracker.alias(safe_unicode, 'cg{0}'.format(i), 'contentGroup{0}'.format(i)) - -# Enhanced Ecommerce -Tracker.alias(str, 'pa') # Product action -Tracker.alias(str, 'tcc') # Coupon code -Tracker.alias(str, 'pal') # Product action list -Tracker.alias(int, 'cos') # Checkout step -Tracker.alias(str, 'col') # Checkout step option - -Tracker.alias(str, 'promoa') # Promotion action - -for product_index in range(1, MAX_EC_PRODUCTS): - Tracker.alias(str, 'pr{0}id'.format(product_index)) # Product SKU - Tracker.alias(str, 'pr{0}nm'.format(product_index)) # Product name - Tracker.alias(str, 'pr{0}br'.format(product_index)) # Product brand - Tracker.alias(str, 'pr{0}ca'.format(product_index)) # Product category - Tracker.alias(str, 'pr{0}va'.format(product_index)) # Product variant - Tracker.alias(str, 'pr{0}pr'.format(product_index)) # Product price - Tracker.alias(int, 'pr{0}qt'.format(product_index)) # Product quantity - Tracker.alias(str, 'pr{0}cc'.format(product_index)) # Product coupon code - Tracker.alias(int, 'pr{0}ps'.format(product_index)) # Product position - - for custom_index in range(MAX_CUSTOM_DEFINITIONS): - Tracker.alias(str, 'pr{0}cd{1}'.format(product_index, custom_index)) # Product custom dimension - Tracker.alias(int, 'pr{0}cm{1}'.format(product_index, custom_index)) # Product custom metric - - for list_index in range(1, MAX_EC_LISTS): - Tracker.alias(str, 'il{0}pi{1}id'.format(list_index, product_index)) # Product impression SKU - Tracker.alias(str, 'il{0}pi{1}nm'.format(list_index, product_index)) # Product impression name - Tracker.alias(str, 'il{0}pi{1}br'.format(list_index, product_index)) # Product impression brand - Tracker.alias(str, 'il{0}pi{1}ca'.format(list_index, product_index)) # Product impression category - Tracker.alias(str, 'il{0}pi{1}va'.format(list_index, product_index)) # Product impression variant - Tracker.alias(int, 'il{0}pi{1}ps'.format(list_index, product_index)) # Product impression position - Tracker.alias(int, 'il{0}pi{1}pr'.format(list_index, product_index)) # Product impression price - - for custom_index in range(MAX_CUSTOM_DEFINITIONS): - Tracker.alias(str, 'il{0}pi{1}cd{2}'.format(list_index, product_index, - custom_index)) # Product impression custom dimension - Tracker.alias(int, 'il{0}pi{1}cm{2}'.format(list_index, product_index, - custom_index)) # Product impression custom metric - -for list_index in range(1, MAX_EC_LISTS): - Tracker.alias(str, 'il{0}nm'.format(list_index)) # Product impression list name - -for promotion_index in range(1, MAX_EC_PROMOTIONS): - Tracker.alias(str, 'promo{0}id'.format(promotion_index)) # Promotion ID - Tracker.alias(str, 'promo{0}nm'.format(promotion_index)) # Promotion name - Tracker.alias(str, 'promo{0}cr'.format(promotion_index)) # Promotion creative - Tracker.alias(str, 'promo{0}ps'.format(promotion_index)) # Promotion position - - -# Shortcut for creating trackers -def create(account, *args, **kwargs): - return Tracker(account, *args, **kwargs) - -# vim: set nowrap tabstop=4 shiftwidth=4 softtabstop=0 expandtab textwidth=0 filetype=python foldmethod=indent foldcolumn=4 diff --git a/lib/UniversalAnalytics/__init__.py b/lib/UniversalAnalytics/__init__.py deleted file mode 100644 index 0d8817d6..00000000 --- a/lib/UniversalAnalytics/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from . import Tracker \ No newline at end of file diff --git a/lib/ga4mp/__init__.py b/lib/ga4mp/__init__.py new file mode 100644 index 00000000..9a817b94 --- /dev/null +++ b/lib/ga4mp/__init__.py @@ -0,0 +1,3 @@ +from ga4mp.ga4mp import GtagMP, FirebaseMP + +__all__ = ['GtagMP','FirebaseMP'] \ No newline at end of file diff --git a/lib/ga4mp/event.py b/lib/ga4mp/event.py new file mode 100644 index 00000000..12f65a10 --- /dev/null +++ b/lib/ga4mp/event.py @@ -0,0 +1,44 @@ +from ga4mp.item import Item + +class Event(dict): + def __init__(self, name): + self.set_event_name(name) + + def set_event_name(self, name): + if len(name) > 40: + raise ValueError("Event name cannot exceed 40 characters.") + self["name"] = name + + def get_event_name(self): + return self.get("name") + + def set_event_param(self, name, value): + # Series of checks to comply with GA4 event collection limits: https://support.google.com/analytics/answer/9267744 + if len(name) > 40: + raise ValueError("Event parameter name cannot exceed 40 characters.") + if name in ["page_location", "page_referrer", "page_title"] and len(str(value)) > 300: + raise ValueError("Event parameter value for page info cannot exceed 300 characters.") + if name not in ["page_location", "page_referrer", "page_title"] and len(str(value)) > 100: + raise ValueError("Event parameter value cannot exceed 100 characters.") + if "params" not in self.keys(): + self["params"] = {} + if len(self["params"]) >= 100: + raise RuntimeError("Event cannot contain more than 100 parameters.") + self["params"][name] = value + + def get_event_params(self): + return self.get("params") + + def delete_event_param(self, name): + # Since only 25 event parameters are allowed, this will allow the user to delete a parameter if necessary. + self["params"].pop(name, None) + + def create_new_item(self, item_id=None, item_name=None): + return Item(item_id=item_id, item_name=item_name) + + def add_item_to_event(self, item): + if not isinstance(item, dict): + raise ValueError("'item' must be an instance of a dictionary.") + if "items" not in self["params"].keys(): + self.set_event_param("items", []) + self["params"]["items"].append(item) \ No newline at end of file diff --git a/lib/ga4mp/ga4mp.py b/lib/ga4mp/ga4mp.py new file mode 100644 index 00000000..45fdc842 --- /dev/null +++ b/lib/ga4mp/ga4mp.py @@ -0,0 +1,416 @@ +############################################################################### +# Google Analytics 4 Measurement Protocol for Python +# Copyright (c) 2022, Adswerve +# +# This project is free software, distributed under the BSD license. +# Adswerve offers consulting and integration services if your firm needs +# assistance in strategy, implementation, or auditing existing work. +############################################################################### + +import json +import logging +import urllib.request +import time +import datetime +import random +from ga4mp.utils import params_dict +from ga4mp.event import Event +from ga4mp.store import BaseStore, DictStore + +import os, sys +sys.path.append( + os.path.normpath(os.path.join(os.path.dirname(__file__), "..")) +) + +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + +class BaseGa4mp(object): + """ + Parent class that provides an interface for sending data to Google Analytics, supporting the GA4 Measurement Protocol. + + Parameters + ---------- + api_secret : string + Generated through the Google Analytics UI. To create a new secret, navigate in the Google Analytics UI to: Admin > Data Streams > + [choose your stream] > Measurement Protocol API Secrets > Create + + See Also + -------- + + * Measurement Protocol (Google Analytics 4): https://developers.google.com/analytics/devguides/collection/protocol/ga4 + + Examples + -------- + # Initialize tracking object for gtag usage + >>> ga = gtagMP(api_secret = "API_SECRET", measurement_id = "MEASUREMENT_ID", client_id="CLIENT_ID") + + # Initialize tracking object for Firebase usage + >>> ga = firebaseMP(api_secret = "API_SECRET", firebase_app_id = "FIREBASE_APP_ID", app_instance_id="APP_INSTANCE_ID") + + # Build an event + >>> event_type = 'new_custom_event' + >>> event_parameters = {'parameter_key_1': 'parameter_1', 'parameter_key_2': 'parameter_2'} + >>> event = {'name': event_type, 'params': event_parameters } + >>> events = [event] + + # Send a custom event to GA4 immediately + >>> ga.send(events) + + # Postponed send of a custom event to GA4 + >>> ga.send(events, postpone=True) + >>> ga.postponed_send() + """ + + def __init__(self, api_secret, store: BaseStore = None): + self._initialization_time = time.time() # used for both session_id and calculating engagement time + self.api_secret = api_secret + self._event_list = [] + assert store is None or isinstance(store, BaseStore), "if supplied, store must be an instance of BaseStore" + self.store = store or DictStore() + self._check_store_requirements() + self._base_domain = "https://www.google-analytics.com/mp/collect" + self._validation_domain = "https://www.google-analytics.com/debug/mp/collect" + + def _check_store_requirements(self): + # Store must contain "session_id" and "last_interaction_time_msec" in order for tracking to work properly. + if self.store.get_session_parameter("session_id") is None: + self.store.set_session_parameter(name="session_id", value=int(self._initialization_time)) + # Note: "last_interaction_time_msec" factors into the required "engagement_time_msec" event parameter. + self.store.set_session_parameter(name="last_interaction_time_msec", value=int(self._initialization_time * 1000)) + + def create_new_event(self, name): + return Event(name=name) + + def send(self, events, validation_hit=False, postpone=False, date=None): + """ + Method to send an http post request to google analytics with the specified events. + + Parameters + ---------- + events : List[Dict] + A list of dictionaries of the events to be sent to Google Analytics. The list of dictionaries should adhere + to the following format: + + [{'name': 'level_end', + 'params' : {'level_name': 'First', + 'success': 'True'} + }, + {'name': 'level_up', + 'params': {'character': 'John Madden', + 'level': 'First'} + }] + + validation_hit : bool, optional + Boolean to depict if events should be tested against the Measurement Protocol Validation Server, by default False + postpone : bool, optional + Boolean to depict if provided event list should be postponed, by default False + date : datetime + Python datetime object for sending a historical event at the given date. Date cannot be in the future. + """ + + # check for any missing or invalid parameters among automatically collected and recommended event types + self._check_params(events) + self._check_date_not_in_future(date) + self._add_session_id_and_engagement_time(events) + + if postpone is True: + # build event list to send later + for event in events: + event["_timestamp_micros"] = self._get_timestamp(time.time()) + self._event_list.append(event) + else: + # batch events into sets of 25 events, the maximum allowed. + batched_event_list = [ + events[event : event + 25] for event in range(0, len(events), 25) + ] + # send http post request + self._http_post( + batched_event_list, validation_hit=validation_hit, date=date + ) + + def postponed_send(self): + """ + Method to send the events provided to Ga4mp.send(events,postpone=True) + """ + + for event in self._event_list: + self._http_post([event], postpone=True) + + # clear event_list for future use + self._event_list = [] + + def append_event_to_params_dict(self, new_name_and_parameters): + + """ + Method to append event name and parameters key-value pairing(s) to parameters dictionary. + + Parameters + ---------- + new_name_and_parameters : Dict + A dictionary with one key-value pair representing a new type of event to be sent to Google Analytics. + The dictionary should adhere to the following format: + + {'new_name': ['new_param_1', 'new_param_2', 'new_param_3']} + """ + + params_dict.update(new_name_and_parameters) + + def _http_post(self, batched_event_list, validation_hit=False, postpone=False, date=None): + """ + Method to send http POST request to google-analytics. + + Parameters + ---------- + batched_event_list : List[List[Dict]] + List of List of events. Places initial event payload into a list to send http POST in batches. + validation_hit : bool, optional + Boolean to depict if events should be tested against the Measurement Protocol Validation Server, by default False + postpone : bool, optional + Boolean to depict if provided event list should be postponed, by default False + date : datetime + Python datetime object for sending a historical event at the given date. Date cannot be in the future. + Timestamp micros supports up to 48 hours of backdating. + If date is specified, postpone must be False or an assertion will be thrown. + """ + self._check_date_not_in_future(date) + status_code = None # Default set to know if batch loop does not work and to bound status_code + + # set domain + domain = self._base_domain + if validation_hit is True: + domain = self._validation_domain + logger.info(f"Sending POST to: {domain}") + + # loop through events in batches of 25 + batch_number = 1 + for batch in batched_event_list: + # url and request slightly differ by subclass + url = self._build_url(domain=domain) + request = self._build_request(batch=batch) + self._add_user_props_to_hit(request) + + # make adjustments for postponed hit + request["events"] = ( + {"name": batch["name"], "params": batch["params"]} + if (postpone) + else batch + ) + + if date is not None: + logger.info(f"Setting event timestamp to: {date}") + assert ( + postpone is False + ), "Cannot send postponed historical hit, ensure postpone=False" + + ts = self._datetime_to_timestamp(date) + ts_micro = self._get_timestamp(ts) + request["timestamp_micros"] = int(ts_micro) + logger.info(f"Timestamp of request is: {request['timestamp_micros']}") + + if postpone: + # add timestamp to hit + request["timestamp_micros"] = batch["_timestamp_micros"] + + req = urllib.request.Request(url) + req.add_header("Content-Type", "application/json; charset=utf-8") + jsondata = json.dumps(request) + json_data_as_bytes = jsondata.encode("utf-8") # needs to be bytes + req.add_header("Content-Length", len(json_data_as_bytes)) + result = urllib.request.urlopen(req, json_data_as_bytes) + + status_code = result.status + logger.info(f"Batch Number: {batch_number}") + logger.info(f"Status code: {status_code}") + batch_number += 1 + + return status_code + + def _check_params(self, events): + + """ + Method to check whether the provided event payload parameters align with supported parameters. + + Parameters + ---------- + events : List[Dict] + A list of dictionaries of the events to be sent to Google Analytics. The list of dictionaries should adhere + to the following format: + + [{'name': 'level_end', + 'params' : {'level_name': 'First', + 'success': 'True'} + }, + {'name': 'level_up', + 'params': {'character': 'John Madden', + 'level': 'First'} + }] + """ + + # check to make sure it's a list of dictionaries with the right keys + + assert type(events) == list, "events should be a list" + + for event in events: + + assert isinstance(event, dict), "each event should be an instance of a dictionary" + + assert "name" in event, 'each event should have a "name" key' + + assert "params" in event, 'each event should have a "params" key' + + # check for any missing or invalid parameters + + for e in events: + event_name = e["name"] + event_params = e["params"] + if event_name in params_dict.keys(): + for parameter in params_dict[event_name]: + if parameter not in event_params.keys(): + logger.warning( + f"WARNING: Event parameters do not match event type.\nFor {event_name} event type, the correct parameter(s) are {params_dict[event_name]}.\nThe parameter '{parameter}' triggered this warning.\nFor a breakdown of currently supported event types and their parameters go here: https://support.google.com/analytics/answer/9267735\n" + ) + + def _add_session_id_and_engagement_time(self, events): + """ + Method to add the session_id and engagement_time_msec parameter to all events. + """ + for event in events: + current_time_in_milliseconds = int(time.time() * 1000) + + event_params = event["params"] + if "session_id" not in event_params.keys(): + event_params["session_id"] = self.store.get_session_parameter("session_id") + if "engagement_time_msec" not in event_params.keys(): + last_interaction_time = self.store.get_session_parameter("last_interaction_time_msec") + event_params["engagement_time_msec"] = current_time_in_milliseconds - last_interaction_time if current_time_in_milliseconds > last_interaction_time else 0 + self.store.set_session_parameter(name="last_interaction_time_msec", value=current_time_in_milliseconds) + + def _add_user_props_to_hit(self, hit): + + """ + Method is a helper function to add user properties to outgoing hits. + + Parameters + ---------- + hit : dict + """ + + for key in self.store.get_all_user_properties(): + try: + if key in ["user_id", "non_personalized_ads"]: + hit.update({key: self.store.get_user_property(key)}) + else: + if "user_properties" not in hit.keys(): + hit.update({"user_properties": {}}) + hit["user_properties"].update( + {key: {"value": self.store.get_user_property(key)}} + ) + except: + logger.info(f"Failed to add user property to outgoing hit: {key}") + + def _get_timestamp(self, timestamp): + """ + Method returns UNIX timestamp in microseconds for postponed hits. + + Parameters + ---------- + None + """ + return int(timestamp * 1e6) + + def _datetime_to_timestamp(self, dt): + """ + Private method to convert a datetime object into a timestamp + + Parameters + ---------- + dt : datetime + A datetime object in any format + + Returns + ------- + timestamp + A UNIX timestamp in milliseconds + """ + return time.mktime(dt.timetuple()) + + def _check_date_not_in_future(self, date): + """ + Method to check that provided date is not in the future. + + Parameters + ---------- + date : datetime + Python datetime object + """ + if date is None: + pass + else: + assert ( + date <= datetime.datetime.now() + ), "Provided date cannot be in the future" + + def _build_url(self, domain): + raise NotImplementedError("Subclass should be using this function, but it was called through the base class instead.") + + def _build_request(self, batch): + raise NotImplementedError("Subclass should be using this function, but it was called through the base class instead.") + +class GtagMP(BaseGa4mp): + """ + Subclass for users of gtag. See `Ga4mp` parent class for examples. + + Parameters + ---------- + measurement_id : string + The identifier for a Data Stream. Found in the Google Analytics UI under: Admin > Data Streams > [choose your stream] > Measurement ID (top-right) + client_id : string + A unique identifier for a client, representing a specific browser/device. + """ + + def __init__(self, api_secret, measurement_id, client_id,): + super().__init__(api_secret) + self.measurement_id = measurement_id + self.client_id = client_id + + def _build_url(self, domain): + return f"{domain}?measurement_id={self.measurement_id}&api_secret={self.api_secret}" + + def _build_request(self, batch): + return {"client_id": self.client_id, "events": batch} + + def random_client_id(self): + """ + Utility function for generating a new client ID matching the typical format of 10 random digits and the UNIX timestamp in seconds, joined by a period. + """ + return "%0.10d" % random.randint(0,9999999999) + "." + str(int(time.time())) + +class FirebaseMP(BaseGa4mp): + """ + Subclass for users of Firebase. See `Ga4mp` parent class for examples. + + Parameters + ---------- + firebase_app_id : string + The identifier for a Firebase app. Found in the Firebase console under: Project Settings > General > Your Apps > App ID. + app_instance_id : string + A unique identifier for a Firebase app instance. + * Android - getAppInstanceId() - https://firebase.google.com/docs/reference/android/com/google/firebase/analytics/FirebaseAnalytics#public-taskstring-getappinstanceid + * Kotlin - getAppInstanceId() - https://firebase.google.com/docs/reference/kotlin/com/google/firebase/analytics/FirebaseAnalytics#getappinstanceid + * Swift - appInstanceID() - https://firebase.google.com/docs/reference/swift/firebaseanalytics/api/reference/Classes/Analytics#appinstanceid + * Objective-C - appInstanceID - https://firebase.google.com/docs/reference/ios/firebaseanalytics/api/reference/Classes/FIRAnalytics#+appinstanceid + * C++ - GetAnalyticsInstanceId() - https://firebase.google.com/docs/reference/cpp/namespace/firebase/analytics#getanalyticsinstanceid + * Unity - GetAnalyticsInstanceIdAsync() - https://firebase.google.com/docs/reference/unity/class/firebase/analytics/firebase-analytics#getanalyticsinstanceidasync + """ + + def __init__(self, api_secret, firebase_app_id, app_instance_id): + super().__init__(api_secret) + self.firebase_app_id = firebase_app_id + self.app_instance_id = app_instance_id + + def _build_url(self, domain): + return f"{domain}?firebase_app_id={self.firebase_app_id}&api_secret={self.api_secret}" + + def _build_request(self, batch): + return {"app_instance_id": self.app_instance_id, "events": batch} \ No newline at end of file diff --git a/lib/ga4mp/item.py b/lib/ga4mp/item.py new file mode 100644 index 00000000..9c5ee9cd --- /dev/null +++ b/lib/ga4mp/item.py @@ -0,0 +1,11 @@ +class Item(dict): + def __init__(self, item_id=None, item_name=None): + if item_id is None and item_name is None: + raise ValueError("At least one of 'item_id' and 'item_name' is required.") + if item_id is not None: + self.set_parameter("item_id", str(item_id)) + if item_name is not None: + self.set_parameter("item_name", item_name) + + def set_parameter(self, name, value): + self[name] = value \ No newline at end of file diff --git a/lib/ga4mp/store.py b/lib/ga4mp/store.py new file mode 100644 index 00000000..d85bc0c2 --- /dev/null +++ b/lib/ga4mp/store.py @@ -0,0 +1,116 @@ +import json +import logging +from pathlib import Path + +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + +class BaseStore(dict): + def __init__(self): + self.update([("user_properties", {}),("session_parameters", {})]) + + def save(self): + raise NotImplementedError("Subclass should be using this function, but it was called through the base class instead.") + + def _check_exists(self, key): + # Helper function to make sure a key exists before trying to work with values within it. + if key not in self.keys(): + self[key] = {} + + def _set(self, param_type, name, value): + # Helper function to set a single parameter (user or session or other). + self._check_exists(key=param_type) + self[param_type][name] = value + + def _get_one(self, param_type, name): + # Helper function to get a single parameter value (user or session). + self._check_exists(key=param_type) + return self[param_type].get(name, None) + + def _get_all(self, param_type=None): + # Helper function to get all user or session parameters - or the entire dictionary if not specified. + if param_type is not None: + return self[param_type] + else: + return self + + # While redundant, the following make sure the distinction between session and user items is easier for the end user. + def set_user_property(self, name, value): + self._set(param_type="user_properties", name=name, value=value) + + def get_user_property(self, name): + return self._get_one(param_type="user_properties", name=name) + + def get_all_user_properties(self): + return self._get_all(param_type="user_properties") + + def clear_user_properties(self): + self["user_properties"] = {} + + def set_session_parameter(self, name, value): + self._set(param_type="session_parameters", name=name, value=value) + + def get_session_parameter(self, name): + return self._get_one(param_type="session_parameters", name=name) + + def get_all_session_parameters(self): + return self._get_all(param_type="session_parameters") + + def clear_session_parameters(self): + self["session_parameters"] = {} + + # Similar functions for other items the user wants to store that don't fit the other two categories. + def set_other_parameter(self, name, value): + self._set(param_type="other", name=name, value=value) + + def get_other_parameter(self, name): + return self._get_one(param_type="other", name=name) + + def get_all_other_parameters(self): + return self._get_all(param_type="other") + + def clear_other_parameters(self): + self["other"] = {} + +class DictStore(BaseStore): + # Class for working with dictionaries that persist for the life of the class. + def __init__(self, data: dict = None): + super().__init__() + if data: + self.update(data) + + def save(self): + # Give the user back what's in the dictionary so they can decide how to save it. + self._get_all() + +class FileStore(BaseStore): + # Class for working with dictionaries that get saved to a JSON file. + def __init__(self, data_location: str = None): + super().__init__() + self.data_location = data_location + try: + self._load_file(data_location) + except: + logger.info(f"Failed to find file at location: {data_location}") + + def _load_file(self): + # Function to get data from the object's initialized location. + # If the provided or stored data_location exists, read the file and overwrite the object's contents. + if Path(self.data_location).exists(): + with open(self.data_location, "r") as json_file: + self = json.load(json_file) + # If the data_location doesn't exist, try to create a new starter JSON file at the location given. + else: + starter_dict = '{"user_properties":{}, "session_parameters":{}}' + starter_json = json.loads(starter_dict) + Path(self.data_location).touch() + with open(self.data_location, "w") as json_file: + json.dumps(starter_json, json_file) + + def save(self): + # Function to save the current dictionary to a JSON file at the object's initialized location. + try: + with open(self.data_location, "w") as outfile: + json.dump(self, outfile) + except: + logger.info(f"Failed to save file at location: {self.data_location}") \ No newline at end of file diff --git a/lib/ga4mp/utils.py b/lib/ga4mp/utils.py new file mode 100644 index 00000000..27fbca86 --- /dev/null +++ b/lib/ga4mp/utils.py @@ -0,0 +1,392 @@ +# all automatically collected and recommended event types +params_dict = { + "ad_click": [ + "ad_event_id" + ], + "ad_exposure": [ + "firebase_screen", + "firebase_screen_id", + "firebase_screen_class", + "exposure_time", + ], + "ad_impression": [ + "ad_event_id" + ], + "ad_query": [ + "ad_event_id" + ], + "ad_reward": [ + "ad_unit_id", + "reward_type", + "reward_value" + ], + "add_payment_info": [ + "coupon", + "currency", + "items", + "payment_type", + "value" + ], + "add_shipping_info": [ + "coupon", + "currency", + "items", + "shipping_tier", + "value" + ], + "add_to_cart": [ + "currency", + "items", + "value" + ], + "add_to_wishlist": [ + "currency", + "items", + "value" + ], + "adunit_exposure": [ + "firebase_screen", + "firebase_screen_id", + "firebase_screen_class", + "exposure_time", + ], + "app_clear_data": [], + "app_exception": [ + "fatal", + "timestamp", + "engagement_time_msec" + ], + "app_remove": [], + "app_store_refund": [ + "product_id", + "value", + "currency", + "quantity" + ], + "app_store_subscription_cancel": [ + "product_id", + "price", + "value", + "currency", + "cancellation_reason", + ], + "app_store_subscription_convert": [ + "product_id", + "price", + "value", + "currency", + "quantity", + ], + "app_store_subscription_renew": [ + "product_id", + "price", + "value", + "currency", + "quantity", + "renewal_count", + ], + "app_update": [ + "previous_app_version" + ], + "begin_checkout": [ + "coupon", + "currency", + "items", + "value" + ], + "click": [], + "dynamic_link_app_open": [ + "source", + "medium", + "campaign", + "link_id", + "accept_time" + ], + "dynamic_link_app_update": [ + "source", + "medium", + "campaign", + "link_id", + "accept_time", + ], + "dynamic_link_first_open": [ + "source", + "medium", + "campaign", + "link_id", + "accept_time", + ], + "earn_virtual_currency": [ + "virtual_currency_name", + "value" + ], + "error": [ + "firebase_error", + "firebase_error_value" + ], + "file_download": [ + "file_extension", + "file_name", + "link_classes", + "link_domain", + "link_id", + "link_text", + "link_url", + ], + "firebase_campaign": [ + "source", + "medium", + "campaign", + "term", + "content", + "gclid", + "aclid", + "cp1", + "anid", + "click_timestamp", + "campaign_info_source", + ], + "firebase_in_app_message_action": [ + "message_name", + "message_device_time", + "message_id", + ], + "firebase_in_app_message_dismiss": [ + "message_name", + "message_device_time", + "message_id", + ], + "firebase_in_app_message_impression": [ + "message_name", + "message_device_time", + "message_id", + ], + "first_open": [ + "previous_gmp_app_id", + "updated_with_analytics", + "previous_first_open_count", + "system_app", + "system_app_update", + "deferred_analytics_collection", + "reset_analytics_cause", + "engagement_time_msec", + ], + "first_visit": [], + "generate_lead": [ + "value", + "currency" + ], + "in_app_purchase": [ + "product_id", + "price", + "value", + "currency", + "quantity", + "subscription", + "free_trial", + "introductory_price", + ], + "join_group": [ + "group_id" + ], + "level_end": [ + "level_name", + "success" + ], + "level_start": [ + "level_name" + ], + "level_up": [ + "character", + "level" + ], + "login": [ + "method" + ], + "notification_dismiss": [ + "message_name", + "message_time", + "message_device_time", + "message_id", + "topic", + "label", + "message_channel", + ], + "notification_foreground": [ + "message_name", + "message_time", + "message_device_time", + "message_id", + "topic", + "label", + "message_channel", + "message_type", + ], + "notification_open": [ + "message_name", + "message_time", + "message_device_time", + "message_id", + "topic", + "label", + "message_channel", + ], + "notification_receive": [ + "message_name", + "message_time", + "message_device_time", + "message_id", + "topic", + "label", + "message_channel", + "message_type", + ], + "notification_send": [ + "message_name", + "message_time", + "message_device_time", + "message_id", + "topic", + "label", + "message_channel", + ], + "os_update": [ + "previous_os_version" + ], + "page_view": [ + "page_location", + "page_referrer" + ], + "post_score": [ + "level", + "character", + "score" + ], + "purchase": [ + "affiliation", + "coupon", + "currency", + "items", + "transaction_id", + "shipping", + "tax", + "value", + ], + "refund": [ + "transaction_id", + "value", + "currency", + "tax", + "shipping", + "items" + ], + "remove_from_cart": [ + "currency", + "items", + "value" + ], + "screen_view": [ + "firebase_screen", + "firebase_screen_class", + "firebase_screen_id", + "firebase_previous_screen", + "firebase_previous_class", + "firebase_previous_id", + "engagement_time_msec", + ], + "scroll": [], + "search": [ + "search_term" + ], + "select_content": [ + "content_type", + "item_id" + ], + "select_item": [ + "items", + "item_list_name", + "item_list_id" + ], + "select_promotion": [ + "items", + "promotion_id", + "promotion_name", + "creative_name", + "creative_slot", + "location_id", + ], + "session_start": [], + "share": [ + "content_type", + "item_id" + ], + "sign_up": [ + "method" + ], + "view_search_results": [ + "search_term" + ], + "spend_virtual_currency": [ + "item_name", + "virtual_currency_name", + "value" + ], + "tutorial_begin": [], + "tutorial_complete": [], + "unlock_achievement": [ + "achievement_id" + ], + "user_engagement": [ + "engagement_time_msec" + ], + "video_start": [ + "video_current_time", + "video_duration", + "video_percent", + "video_provider", + "video_title", + "video_url", + "visible", + ], + "video_progress": [ + "video_current_time", + "video_duration", + "video_percent", + "video_provider", + "video_title", + "video_url", + "visible", + ], + "video_complete": [ + "video_current_time", + "video_duration", + "video_percent", + "video_provider", + "video_title", + "video_url", + "visible", + ], + "view_cart": [ + "currency", + "items", + "value" + ], + "view_item": [ + "currency", + "items", + "value" + ], + "view_item_list": [ + "items", + "item_list_name", + "item_list_id" + ], + "view_promotion": [ + "items", + "promotion_id", + "promotion_name", + "creative_name", + "creative_slot", + "location_id", + ], +} \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index b69f5fc2..6d3fa686 100644 --- a/requirements.txt +++ b/requirements.txt @@ -14,6 +14,7 @@ distro==1.8.0 dnspython==2.2.1 facebook-sdk==3.1.0 future==0.18.2 +ga4mp==2.0.4 gntp==1.0.3 html5lib==1.1 httpagentparser==1.9.5 From af3e5574f5442fb45a722462ba62965fe2dce247 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sun, 26 Feb 2023 15:09:51 -0800 Subject: [PATCH 034/113] Migrate to Google Analytics 4 --- plexpy/__init__.py | 66 ++++++++++++++++++++++++---------------------- plexpy/plextv.py | 8 ++++++ 2 files changed, 42 insertions(+), 32 deletions(-) diff --git a/plexpy/__init__.py b/plexpy/__init__.py index d0aed7cf..31e3d51c 100644 --- a/plexpy/__init__.py +++ b/plexpy/__init__.py @@ -36,7 +36,7 @@ except ImportError: from apscheduler.schedulers.background import BackgroundScheduler from apscheduler.triggers.interval import IntervalTrigger -from UniversalAnalytics import Tracker +from ga4mp import GtagMP import pytz PYTHON2 = sys.version_info[0] == 2 @@ -578,12 +578,12 @@ def start(): # Send system analytics events if not CONFIG.FIRST_RUN_COMPLETE: - analytics_event(category='system', action='install') + analytics_event(name='install') elif _UPDATE: - analytics_event(category='system', action='update') + analytics_event(name='update') - analytics_event(category='system', action='start') + analytics_event(name='start') _STARTED = True @@ -2843,44 +2843,46 @@ def generate_uuid(): def initialize_tracker(): - data = { - 'dataSource': 'server', - 'appName': common.PRODUCT, - 'appVersion': common.RELEASE, - 'appId': INSTALL_TYPE, - 'appInstallerId': CONFIG.GIT_BRANCH, - 'dimension1': '{} {}'.format(common.PLATFORM, common.PLATFORM_RELEASE), # App Platform - 'dimension2': common.PLATFORM_LINUX_DISTRO, # Linux Distro - 'dimension3': common.PYTHON_VERSION, - 'userLanguage': SYS_LANGUAGE, - 'documentEncoding': SYS_ENCODING, - 'noninteractive': True - } - - tracker = Tracker.create('UA-111522699-2', client_id=CONFIG.PMS_UUID, hash_client_id=True, - user_agent=common.USER_AGENT) - tracker.set(data) - + tracker = GtagMP( + api_secret='Cl_LjAKUT26AS22YZwqaPw', + measurement_id='G-NH1M4BYM2P', + client_id=CONFIG.PMS_UUID + ) return tracker -def analytics_event(category, action, label=None, value=None, **kwargs): - data = {'category': category, 'action': action} +def analytics_event(name, **kwargs): + event = TRACKER.create_new_event(name=name) + event.set_event_param('name', common.PRODUCT) + event.set_event_param('version', common.RELEASE) + event.set_event_param('install', INSTALL_TYPE) + event.set_event_param('branch', CONFIG.GIT_BRANCH) + event.set_event_param('platform', common.PLATFORM) + event.set_event_param('platformRelease', common.PLATFORM_RELEASE) + event.set_event_param('platformVersion', common.PLATFORM_VERSION) + event.set_event_param('linuxDistro', common.PLATFORM_LINUX_DISTRO) + event.set_event_param('pythonVersion', common.PYTHON_VERSION) + event.set_event_param('language', SYS_LANGUAGE) + event.set_event_param('encoding', SYS_ENCODING) + event.set_event_param('timezone', str(SYS_TIMEZONE)) + event.set_event_param('timezoneUTCOffset', f'UTC{SYS_UTC_OFFSET}') - if label is not None: - data['label'] = label + for key, value in kwargs.items(): + event.set_event_param(key, value) - if value is not None: - data['value'] = value + plex_tv = plextv.PlexTV() + ip_address = plex_tv.get_public_ip(output_format='text') + geolocation = plex_tv.get_geoip_lookup(ip_address) or {} - if kwargs: - data.update(kwargs) + event.set_event_param('country', geolocation.get('country', 'Unknown')) + event.set_event_param('countryCode', geolocation.get('code', 'Unknown')) if TRACKER: try: - TRACKER.send('event', data) + TRACKER.send(events=[event]) + pass except Exception as e: - logger.warn("Failed to send analytics event for category '%s', action '%s': %s" % (category, action, e)) + logger.warn("Failed to send analytics event for name '%s': %s" % (name, e)) def check_folder_writable(folder, fallback, name): diff --git a/plexpy/plextv.py b/plexpy/plextv.py index 1b596fc7..e7815756 100644 --- a/plexpy/plextv.py +++ b/plexpy/plextv.py @@ -331,6 +331,14 @@ class PlexTV(object): return request + def get_public_ip(self, output_format=''): + uri = '/:/ip' + request = self.request_handler.make_request(uri=uri, + request_type='GET', + output_format=output_format) + + return request + def get_plextv_geoip(self, ip_address='', output_format=''): uri = '/api/v2/geoip?ip_address=%s' % ip_address request = self.request_handler.make_request(uri=uri, From 731d5c9bafab5ad505d8f30afd8a5e3e1128803b Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sun, 26 Feb 2023 15:17:04 -0800 Subject: [PATCH 035/113] Add multiprocessing semaphore prefix * Fixes #2007 --- Tautulli.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Tautulli.py b/Tautulli.py index eebfa55a..2c03eb3b 100755 --- a/Tautulli.py +++ b/Tautulli.py @@ -29,6 +29,7 @@ import appdirs import argparse import datetime import locale +import multiprocessing import pytz import signal import shutil @@ -47,6 +48,8 @@ elif common.PLATFORM == 'Darwin': signal.signal(signal.SIGINT, plexpy.sig_handler) signal.signal(signal.SIGTERM, plexpy.sig_handler) +multiprocessing.current_process()._config['semprefix'] = '/tautulli.tautulli.mp' + def main(): """ From cd3ff6eed76f0fb33a2e7b9a1ac754548d48e0b9 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sun, 26 Feb 2023 16:06:44 -0800 Subject: [PATCH 036/113] Remove extra pass --- plexpy/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/plexpy/__init__.py b/plexpy/__init__.py index 31e3d51c..993436b8 100644 --- a/plexpy/__init__.py +++ b/plexpy/__init__.py @@ -2880,7 +2880,6 @@ def analytics_event(name, **kwargs): if TRACKER: try: TRACKER.send(events=[event]) - pass except Exception as e: logger.warn("Failed to send analytics event for name '%s': %s" % (name, e)) From ce45321e3fd3d188ad29bebd277d9beff3a04278 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sun, 26 Feb 2023 15:17:04 -0800 Subject: [PATCH 037/113] Revert "Add multiprocessing semaphore prefix" This reverts commit 731d5c9bafab5ad505d8f30afd8a5e3e1128803b. --- Tautulli.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/Tautulli.py b/Tautulli.py index 2c03eb3b..eebfa55a 100755 --- a/Tautulli.py +++ b/Tautulli.py @@ -29,7 +29,6 @@ import appdirs import argparse import datetime import locale -import multiprocessing import pytz import signal import shutil @@ -48,8 +47,6 @@ elif common.PLATFORM == 'Darwin': signal.signal(signal.SIGINT, plexpy.sig_handler) signal.signal(signal.SIGTERM, plexpy.sig_handler) -multiprocessing.current_process()._config['semprefix'] = '/tautulli.tautulli.mp' - def main(): """ From 993909fa089dcbb21b7d80c46d9f784ad6aeefe9 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Sun, 26 Feb 2023 16:17:58 -0800 Subject: [PATCH 038/113] Use private shared-memory for Snap --- snap/snapcraft.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/snap/snapcraft.yaml b/snap/snapcraft.yaml index 1220971e..030e72a9 100644 --- a/snap/snapcraft.yaml +++ b/snap/snapcraft.yaml @@ -15,6 +15,10 @@ architectures: - build-on: arm64 - build-on: armhf +plugs: + shared-memory: + private: true + parts: tautulli: plugin: dump From ae3d75bbe309b835f819d61fadfa46deb7c813e1 Mon Sep 17 00:00:00 2001 From: herby2212 <12448284+herby2212@users.noreply.github.com> Date: Mon, 27 Feb 2023 02:32:50 +0100 Subject: [PATCH 039/113] watch time & user stats for collections (#1982) * user_stats for collection * watch_time_stats for collection * check for media_type to be compatible with API * update API and datafactory optimizations * beautify webserve class * fix sql query build * filter on suitable collections * stats for collections of sub media type * optimize array creation --- data/interfaces/default/info.html | 15 ++++++-- plexpy/datafactory.py | 61 ++++++++++++++++++++++--------- plexpy/webserve.py | 22 +++++++---- 3 files changed, 69 insertions(+), 29 deletions(-) diff --git a/data/interfaces/default/info.html b/data/interfaces/default/info.html index dbce734b..471c8e7e 100644 --- a/data/interfaces/default/info.html +++ b/data/interfaces/default/info.html @@ -12,6 +12,7 @@ data :: Usable parameters (if not applicable for media type, blank value will be == Global keys == rating_key Returns the unique identifier for the media item. media_type Returns the type of media. Either 'movie', 'show', 'season', 'episode', 'artist', 'album', or 'track'. +sub_media_type Returns the subtype of media. Either 'movie', 'show', 'season', 'episode', 'artist', 'album', or 'track'. art Returns the location of the item's artwork title Returns the name of the movie, show, episode, artist, album, or track. edition_title Returns the edition title of a movie. @@ -553,7 +554,7 @@ DOCUMENTATION :: END
% endif - % if data['media_type'] in ('movie', 'show', 'season', 'episode', 'artist', 'album', 'track'): + % if data['media_type'] in ('movie', 'show', 'season', 'episode', 'artist', 'album', 'track', 'collection'):
@@ -936,13 +937,16 @@ DOCUMENTATION :: END }); % endif -% if data['media_type'] in ('movie', 'show', 'season', 'episode', 'artist', 'album', 'track'): +% if data['media_type'] in ('movie', 'show', 'season', 'episode', 'artist', 'album', 'track', 'collection'): % endif -% if data['media_type'] in ('movie', 'show', 'season', 'episode', 'artist', 'album', 'track', 'collection'): +% if data['media_type'] in ('movie', 'show', 'season', 'episode', 'artist', 'album', 'track', 'collection', 'playlist'): -% if data['media_type'] in ('movie', 'show', 'season', 'episode', 'artist', 'album', 'track'): +% if data['media_type'] in ('movie', 'show', 'season', 'episode', 'artist', 'album', 'track', 'collection', 'playlist'): @@ -373,14 +373,35 @@ type: 'get', dataType: "json", success: function (data) { - var select = $('#graph-user'); + let select = $('#graph-user'); + let by_id = {}; data.sort(function(a, b) { return a.friendly_name.localeCompare(b.friendly_name); }); data.forEach(function(item) { select.append(''); + by_id[item.user_id] = item.friendly_name; }); + select.selectpicker({ + countSelectedText: function(sel, total) { + if (sel === 0 || sel === total) { + return 'All users'; + } else if (sel > 1) { + return sel + ' users'; + } else { + return select.val().map(function(id) { + return by_id[id]; + }).join(', '); + } + }, + style: 'btn-dark', + actionsBox: true, + selectedTextFormat: 'count', + noneSelectedText: 'All users' + }); + select.selectpicker('render'); + select.selectpicker('selectAll'); } }); @@ -602,11 +623,6 @@ $('#nav-tabs-total').tab('show'); } - // Set initial state - if (current_tab === '#tabs-plays') { loadGraphsTab1(current_day_range, yaxis); } - if (current_tab === '#tabs-stream') { loadGraphsTab2(current_day_range, yaxis); } - if (current_tab === '#tabs-total') { loadGraphsTab3(current_month_range, yaxis); } - // Tab1 opened $('#nav-tabs-plays').on('shown.bs.tab', function (e) { e.preventDefault(); @@ -652,9 +668,20 @@ $('.months').text(current_month_range); }); + let graph_user_last_id = undefined; + // User changed $('#graph-user').on('change', function() { - selected_user_id = $(this).val() || null; + let val = $(this).val(); + if (val.length === 0 || val.length === $(this).children().length) { + selected_user_id = null; // if all users are selected, just send an empty list + } else { + selected_user_id = val.join(","); + } + if (selected_user_id === graph_user_last_id) { + return; + } + graph_user_last_id = selected_user_id; if (current_tab === '#tabs-plays') { loadGraphsTab1(current_day_range, yaxis); } if (current_tab === '#tabs-stream') { loadGraphsTab2(current_day_range, yaxis); } if (current_tab === '#tabs-total') { loadGraphsTab3(current_month_range, yaxis); } diff --git a/data/interfaces/default/history.html b/data/interfaces/default/history.html index 327b99b7..8ab8b19e 100644 --- a/data/interfaces/default/history.html +++ b/data/interfaces/default/history.html @@ -1,6 +1,7 @@ <%inherit file="base.html"/> <%def name="headIncludes()"> + @@ -31,9 +32,7 @@ % if _session['user_group'] == 'admin':
@@ -121,6 +120,7 @@ <%def name="javascriptIncludes()"> + @@ -134,17 +134,40 @@ type: 'GET', dataType: 'json', success: function (data) { - var select = $('#history-user'); + let select = $('#history-user'); + let by_id = {}; data.sort(function (a, b) { return a.friendly_name.localeCompare(b.friendly_name); }); data.forEach(function (item) { select.append(''); + by_id[item.user_id] = item.friendly_name; }); + select.selectpicker({ + countSelectedText: function(sel, total) { + if (sel === 0 || sel === total) { + return 'All users'; + } else if (sel > 1) { + return sel + ' users'; + } else { + return select.val().map(function(id) { + return by_id[id]; + }).join(', '); + } + }, + style: 'btn-dark', + actionsBox: true, + selectedTextFormat: 'count', + noneSelectedText: 'All users' + }); + select.selectpicker('render'); + select.selectpicker('selectAll'); } }); + let history_user_last_id = undefined; + function loadHistoryTable(media_type, transcode_decision, selected_user_id) { history_table_options.ajax = { url: 'get_history', @@ -187,7 +210,16 @@ }); $('#history-user').on('change', function () { - selected_user_id = $(this).val() || null; + let val = $(this).val(); + if (val.length === 0 || val.length === $(this).children().length) { + selected_user_id = null; // if all users are selected, just send an empty list + } else { + selected_user_id = val.join(","); + } + if (selected_user_id === history_user_last_id) { + return; + } + history_user_last_id = selected_user_id; history_table.draw(); }); } diff --git a/data/interfaces/default/js/bootstrap-select.min.js b/data/interfaces/default/js/bootstrap-select.min.js new file mode 100644 index 00000000..92e3a32e --- /dev/null +++ b/data/interfaces/default/js/bootstrap-select.min.js @@ -0,0 +1,9 @@ +/*! + * Bootstrap-select v1.13.14 (https://developer.snapappointments.com/bootstrap-select) + * + * Copyright 2012-2020 SnapAppointments, LLC + * Licensed under MIT (https://github.com/snapappointments/bootstrap-select/blob/master/LICENSE) + */ + +!function(e,t){void 0===e&&void 0!==window&&(e=window),"function"==typeof define&&define.amd?define(["jquery"],function(e){return t(e)}):"object"==typeof module&&module.exports?module.exports=t(require("jquery")):t(e.jQuery)}(this,function(e){!function(z){"use strict";var d=["sanitize","whiteList","sanitizeFn"],r=["background","cite","href","itemtype","longdesc","poster","src","xlink:href"],e={"*":["class","dir","id","lang","role","tabindex","style",/^aria-[\w-]*$/i],a:["target","href","title","rel"],area:[],b:[],br:[],col:[],code:[],div:[],em:[],hr:[],h1:[],h2:[],h3:[],h4:[],h5:[],h6:[],i:[],img:["src","alt","title","width","height"],li:[],ol:[],p:[],pre:[],s:[],small:[],span:[],sub:[],sup:[],strong:[],u:[],ul:[]},l=/^(?:(?:https?|mailto|ftp|tel|file):|[^&:/?#]*(?:[/?#]|$))/gi,a=/^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[a-z0-9+/]+=*$/i;function v(e,t){var i=e.nodeName.toLowerCase();if(-1!==z.inArray(i,t))return-1===z.inArray(i,r)||Boolean(e.nodeValue.match(l)||e.nodeValue.match(a));for(var s=z(t).filter(function(e,t){return t instanceof RegExp}),n=0,o=s.length;n]+>/g,"")),s&&(a=w(a)),a=a.toUpperCase(),o="contains"===i?0<=a.indexOf(t):a.startsWith(t)))break}return o}function L(e){return parseInt(e,10)||0}z.fn.triggerNative=function(e){var t,i=this[0];i.dispatchEvent?(u?t=new Event(e,{bubbles:!0}):(t=document.createEvent("Event")).initEvent(e,!0,!1),i.dispatchEvent(t)):i.fireEvent?((t=document.createEventObject()).eventType=e,i.fireEvent("on"+e,t)):this.trigger(e)};var f={"\xc0":"A","\xc1":"A","\xc2":"A","\xc3":"A","\xc4":"A","\xc5":"A","\xe0":"a","\xe1":"a","\xe2":"a","\xe3":"a","\xe4":"a","\xe5":"a","\xc7":"C","\xe7":"c","\xd0":"D","\xf0":"d","\xc8":"E","\xc9":"E","\xca":"E","\xcb":"E","\xe8":"e","\xe9":"e","\xea":"e","\xeb":"e","\xcc":"I","\xcd":"I","\xce":"I","\xcf":"I","\xec":"i","\xed":"i","\xee":"i","\xef":"i","\xd1":"N","\xf1":"n","\xd2":"O","\xd3":"O","\xd4":"O","\xd5":"O","\xd6":"O","\xd8":"O","\xf2":"o","\xf3":"o","\xf4":"o","\xf5":"o","\xf6":"o","\xf8":"o","\xd9":"U","\xda":"U","\xdb":"U","\xdc":"U","\xf9":"u","\xfa":"u","\xfb":"u","\xfc":"u","\xdd":"Y","\xfd":"y","\xff":"y","\xc6":"Ae","\xe6":"ae","\xde":"Th","\xfe":"th","\xdf":"ss","\u0100":"A","\u0102":"A","\u0104":"A","\u0101":"a","\u0103":"a","\u0105":"a","\u0106":"C","\u0108":"C","\u010a":"C","\u010c":"C","\u0107":"c","\u0109":"c","\u010b":"c","\u010d":"c","\u010e":"D","\u0110":"D","\u010f":"d","\u0111":"d","\u0112":"E","\u0114":"E","\u0116":"E","\u0118":"E","\u011a":"E","\u0113":"e","\u0115":"e","\u0117":"e","\u0119":"e","\u011b":"e","\u011c":"G","\u011e":"G","\u0120":"G","\u0122":"G","\u011d":"g","\u011f":"g","\u0121":"g","\u0123":"g","\u0124":"H","\u0126":"H","\u0125":"h","\u0127":"h","\u0128":"I","\u012a":"I","\u012c":"I","\u012e":"I","\u0130":"I","\u0129":"i","\u012b":"i","\u012d":"i","\u012f":"i","\u0131":"i","\u0134":"J","\u0135":"j","\u0136":"K","\u0137":"k","\u0138":"k","\u0139":"L","\u013b":"L","\u013d":"L","\u013f":"L","\u0141":"L","\u013a":"l","\u013c":"l","\u013e":"l","\u0140":"l","\u0142":"l","\u0143":"N","\u0145":"N","\u0147":"N","\u014a":"N","\u0144":"n","\u0146":"n","\u0148":"n","\u014b":"n","\u014c":"O","\u014e":"O","\u0150":"O","\u014d":"o","\u014f":"o","\u0151":"o","\u0154":"R","\u0156":"R","\u0158":"R","\u0155":"r","\u0157":"r","\u0159":"r","\u015a":"S","\u015c":"S","\u015e":"S","\u0160":"S","\u015b":"s","\u015d":"s","\u015f":"s","\u0161":"s","\u0162":"T","\u0164":"T","\u0166":"T","\u0163":"t","\u0165":"t","\u0167":"t","\u0168":"U","\u016a":"U","\u016c":"U","\u016e":"U","\u0170":"U","\u0172":"U","\u0169":"u","\u016b":"u","\u016d":"u","\u016f":"u","\u0171":"u","\u0173":"u","\u0174":"W","\u0175":"w","\u0176":"Y","\u0177":"y","\u0178":"Y","\u0179":"Z","\u017b":"Z","\u017d":"Z","\u017a":"z","\u017c":"z","\u017e":"z","\u0132":"IJ","\u0133":"ij","\u0152":"Oe","\u0153":"oe","\u0149":"'n","\u017f":"s"},m=/[\xc0-\xd6\xd8-\xf6\xf8-\xff\u0100-\u017f]/g,g=RegExp("[\\u0300-\\u036f\\ufe20-\\ufe2f\\u20d0-\\u20ff\\u1ab0-\\u1aff\\u1dc0-\\u1dff]","g");function b(e){return f[e]}function w(e){return(e=e.toString())&&e.replace(m,b).replace(g,"")}var I,x,y,$,S=(I={"&":"&","<":"<",">":">",'"':""","'":"'","`":"`"},x="(?:"+Object.keys(I).join("|")+")",y=RegExp(x),$=RegExp(x,"g"),function(e){return e=null==e?"":""+e,y.test(e)?e.replace($,E):e});function E(e){return I[e]}var C={32:" ",48:"0",49:"1",50:"2",51:"3",52:"4",53:"5",54:"6",55:"7",56:"8",57:"9",59:";",65:"A",66:"B",67:"C",68:"D",69:"E",70:"F",71:"G",72:"H",73:"I",74:"J",75:"K",76:"L",77:"M",78:"N",79:"O",80:"P",81:"Q",82:"R",83:"S",84:"T",85:"U",86:"V",87:"W",88:"X",89:"Y",90:"Z",96:"0",97:"1",98:"2",99:"3",100:"4",101:"5",102:"6",103:"7",104:"8",105:"9"},N=27,D=13,H=32,W=9,B=38,M=40,R={success:!1,major:"3"};try{R.full=(z.fn.dropdown.Constructor.VERSION||"").split(" ")[0].split("."),R.major=R.full[0],R.success=!0}catch(e){}var U=0,j=".bs.select",V={DISABLED:"disabled",DIVIDER:"divider",SHOW:"open",DROPUP:"dropup",MENU:"dropdown-menu",MENURIGHT:"dropdown-menu-right",MENULEFT:"dropdown-menu-left",BUTTONCLASS:"btn-default",POPOVERHEADER:"popover-title",ICONBASE:"glyphicon",TICKICON:"glyphicon-ok"},F={MENU:"."+V.MENU},_={span:document.createElement("span"),i:document.createElement("i"),subtext:document.createElement("small"),a:document.createElement("a"),li:document.createElement("li"),whitespace:document.createTextNode("\xa0"),fragment:document.createDocumentFragment()};_.a.setAttribute("role","option"),"4"===R.major&&(_.a.className="dropdown-item"),_.subtext.className="text-muted",_.text=_.span.cloneNode(!1),_.text.className="text",_.checkMark=_.span.cloneNode(!1);var G=new RegExp(B+"|"+M),q=new RegExp("^"+W+"$|"+N),K={li:function(e,t,i){var s=_.li.cloneNode(!1);return e&&(1===e.nodeType||11===e.nodeType?s.appendChild(e):s.innerHTML=e),void 0!==t&&""!==t&&(s.className=t),null!=i&&s.classList.add("optgroup-"+i),s},a:function(e,t,i){var s=_.a.cloneNode(!0);return e&&(11===e.nodeType?s.appendChild(e):s.insertAdjacentHTML("beforeend",e)),void 0!==t&&""!==t&&s.classList.add.apply(s.classList,t.split(" ")),i&&s.setAttribute("style",i),s},text:function(e,t){var i,s,n=_.text.cloneNode(!1);if(e.content)n.innerHTML=e.content;else{if(n.textContent=e.text,e.icon){var o=_.whitespace.cloneNode(!1);(s=(!0===t?_.i:_.span).cloneNode(!1)).className=this.options.iconBase+" "+e.icon,_.fragment.appendChild(s),_.fragment.appendChild(o)}e.subtext&&((i=_.subtext.cloneNode(!1)).textContent=e.subtext,n.appendChild(i))}if(!0===t)for(;0'},maxOptions:!1,mobile:!1,selectOnTab:!1,dropdownAlignRight:!1,windowPadding:0,virtualScroll:600,display:!1,sanitize:!0,sanitizeFn:null,whiteList:e},Y.prototype={constructor:Y,init:function(){var i=this,e=this.$element.attr("id");U++,this.selectId="bs-select-"+U,this.$element[0].classList.add("bs-select-hidden"),this.multiple=this.$element.prop("multiple"),this.autofocus=this.$element.prop("autofocus"),this.$element[0].classList.contains("show-tick")&&(this.options.showTick=!0),this.$newElement=this.createDropdown(),this.buildData(),this.$element.after(this.$newElement).prependTo(this.$newElement),this.$button=this.$newElement.children("button"),this.$menu=this.$newElement.children(F.MENU),this.$menuInner=this.$menu.children(".inner"),this.$searchbox=this.$menu.find("input"),this.$element[0].classList.remove("bs-select-hidden"),!0===this.options.dropdownAlignRight&&this.$menu[0].classList.add(V.MENURIGHT),void 0!==e&&this.$button.attr("data-id",e),this.checkDisabled(),this.clickListener(),this.options.liveSearch?(this.liveSearchListener(),this.focusedParent=this.$searchbox[0]):this.focusedParent=this.$menuInner[0],this.setStyle(),this.render(),this.setWidth(),this.options.container?this.selectPosition():this.$element.on("hide"+j,function(){if(i.isVirtual()){var e=i.$menuInner[0],t=e.firstChild.cloneNode(!1);e.replaceChild(t,e.firstChild),e.scrollTop=0}}),this.$menu.data("this",this),this.$newElement.data("this",this),this.options.mobile&&this.mobile(),this.$newElement.on({"hide.bs.dropdown":function(e){i.$element.trigger("hide"+j,e)},"hidden.bs.dropdown":function(e){i.$element.trigger("hidden"+j,e)},"show.bs.dropdown":function(e){i.$element.trigger("show"+j,e)},"shown.bs.dropdown":function(e){i.$element.trigger("shown"+j,e)}}),i.$element[0].hasAttribute("required")&&this.$element.on("invalid"+j,function(){i.$button[0].classList.add("bs-invalid"),i.$element.on("shown"+j+".invalid",function(){i.$element.val(i.$element.val()).off("shown"+j+".invalid")}).on("rendered"+j,function(){this.validity.valid&&i.$button[0].classList.remove("bs-invalid"),i.$element.off("rendered"+j)}),i.$button.on("blur"+j,function(){i.$element.trigger("focus").trigger("blur"),i.$button.off("blur"+j)})}),setTimeout(function(){i.buildList(),i.$element.trigger("loaded"+j)})},createDropdown:function(){var e=this.multiple||this.options.showTick?" show-tick":"",t=this.multiple?' aria-multiselectable="true"':"",i="",s=this.autofocus?" autofocus":"";R.major<4&&this.$element.parent().hasClass("input-group")&&(i=" input-group-btn");var n,o="",r="",l="",a="";return this.options.header&&(o='
'+this.options.header+"
"),this.options.liveSearch&&(r=''),this.multiple&&this.options.actionsBox&&(l='
"),this.multiple&&this.options.doneButton&&(a='
"),n='",z(n)},setPositionData:function(){this.selectpicker.view.canHighlight=[];for(var e=this.selectpicker.view.size=0;e=this.options.virtualScroll||!0===this.options.virtualScroll},createView:function(A,e,t){var L,N,D=this,i=0,H=[];if(this.selectpicker.isSearching=A,this.selectpicker.current=A?this.selectpicker.search:this.selectpicker.main,this.setPositionData(),e)if(t)i=this.$menuInner[0].scrollTop;else if(!D.multiple){var s=D.$element[0],n=(s.options[s.selectedIndex]||{}).liIndex;if("number"==typeof n&&!1!==D.options.size){var o=D.selectpicker.main.data[n],r=o&&o.position;r&&(i=r-(D.sizeInfo.menuInnerHeight+D.sizeInfo.liHeight)/2)}}function l(e,t){var i,s,n,o,r,l,a,c,d=D.selectpicker.current.elements.length,h=[],p=!0,u=D.isVirtual();D.selectpicker.view.scrollTop=e,i=Math.ceil(D.sizeInfo.menuInnerHeight/D.sizeInfo.liHeight*1.5),s=Math.round(d/i)||1;for(var f=0;fd-1?0:D.selectpicker.current.data[d-1].position-D.selectpicker.current.data[D.selectpicker.view.position1-1].position,b.firstChild.style.marginTop=v+"px",b.firstChild.style.marginBottom=g+"px"):(b.firstChild.style.marginTop=0,b.firstChild.style.marginBottom=0),b.firstChild.appendChild(w),!0===u&&D.sizeInfo.hasScrollBar){var C=b.firstChild.offsetWidth;if(t&&CD.sizeInfo.selectWidth)b.firstChild.style.minWidth=D.sizeInfo.menuInnerInnerWidth+"px";else if(C>D.sizeInfo.menuInnerInnerWidth){D.$menu[0].style.minWidth=0;var O=b.firstChild.offsetWidth;O>D.sizeInfo.menuInnerInnerWidth&&(D.sizeInfo.menuInnerInnerWidth=O,b.firstChild.style.minWidth=D.sizeInfo.menuInnerInnerWidth+"px"),D.$menu[0].style.minWidth=""}}}if(D.prevActiveIndex=D.activeIndex,D.options.liveSearch){if(A&&t){var z,T=0;D.selectpicker.view.canHighlight[T]||(T=1+D.selectpicker.view.canHighlight.slice(1).indexOf(!0)),z=D.selectpicker.view.visibleElements[T],D.defocusItem(D.selectpicker.view.currentActive),D.activeIndex=(D.selectpicker.current.data[T]||{}).index,D.focusItem(z)}}else D.$menuInner.trigger("focus")}l(i,!0),this.$menuInner.off("scroll.createView").on("scroll.createView",function(e,t){D.noScroll||l(this.scrollTop,t),D.noScroll=!1}),z(window).off("resize"+j+"."+this.selectId+".createView").on("resize"+j+"."+this.selectId+".createView",function(){D.$newElement.hasClass(V.SHOW)&&l(D.$menuInner[0].scrollTop)})},focusItem:function(e,t,i){if(e){t=t||this.selectpicker.main.data[this.activeIndex];var s=e.firstChild;s&&(s.setAttribute("aria-setsize",this.selectpicker.view.size),s.setAttribute("aria-posinset",t.posinset),!0!==i&&(this.focusedParent.setAttribute("aria-activedescendant",s.id),e.classList.add("active"),s.classList.add("active")))}},defocusItem:function(e){e&&(e.classList.remove("active"),e.firstChild&&e.firstChild.classList.remove("active"))},setPlaceholder:function(){var e=!1;if(this.options.title&&!this.multiple){this.selectpicker.view.titleOption||(this.selectpicker.view.titleOption=document.createElement("option")),e=!0;var t=this.$element[0],i=!1,s=!this.selectpicker.view.titleOption.parentNode;if(s)this.selectpicker.view.titleOption.className="bs-title-option",this.selectpicker.view.titleOption.value="",i=void 0===z(t.options[t.selectedIndex]).attr("selected")&&void 0===this.$element.data("selected");!s&&0===this.selectpicker.view.titleOption.index||t.insertBefore(this.selectpicker.view.titleOption,t.firstChild),i&&(t.selectedIndex=0)}return e},buildData:function(){var p=':not([hidden]):not([data-hidden="true"])',u=[],f=0,e=this.setPlaceholder()?1:0;this.options.hideDisabled&&(p+=":not(:disabled)");var t=this.$element[0].querySelectorAll("select > *"+p);function m(e){var t=u[u.length-1];t&&"divider"===t.type&&(t.optID||e.optID)||((e=e||{}).type="divider",u.push(e))}function v(e,t){if((t=t||{}).divider="true"===e.getAttribute("data-divider"),t.divider)m({optID:t.optID});else{var i=u.length,s=e.style.cssText,n=s?S(s):"",o=(e.className||"")+(t.optgroupClass||"");t.optID&&(o="opt "+o),t.optionClass=o.trim(),t.inlineStyle=n,t.text=e.textContent,t.content=e.getAttribute("data-content"),t.tokens=e.getAttribute("data-tokens"),t.subtext=e.getAttribute("data-subtext"),t.icon=e.getAttribute("data-icon"),e.liIndex=i,t.display=t.content||t.text,t.type="option",t.index=i,t.option=e,t.selected=!!e.selected,t.disabled=t.disabled||!!e.disabled,u.push(t)}}function i(e,t){var i=t[e],s=t[e-1],n=t[e+1],o=i.querySelectorAll("option"+p);if(o.length){var r,l,a={display:S(i.label),subtext:i.getAttribute("data-subtext"),icon:i.getAttribute("data-icon"),type:"optgroup-label",optgroupClass:" "+(i.className||"")};f++,s&&m({optID:f}),a.optID=f,u.push(a);for(var c=0,d=o.length;c li")},render:function(){var e,t=this,i=this.$element[0],s=this.setPlaceholder()&&0===i.selectedIndex,n=O(i,this.options.hideDisabled),o=n.length,r=this.$button[0],l=r.querySelector(".filter-option-inner-inner"),a=document.createTextNode(this.options.multipleSeparator),c=_.fragment.cloneNode(!1),d=!1;if(r.classList.toggle("bs-placeholder",t.multiple?!o:!T(i,n)),this.tabIndex(),"static"===this.options.selectedTextFormat)c=K.text.call(this,{text:this.options.title},!0);else if(!1===(this.multiple&&-1!==this.options.selectedTextFormat.indexOf("count")&&1")).length&&o>e[1]||1===e.length&&2<=o))){if(!s){for(var h=0;h option"+m+", optgroup"+m+" option"+m).length,g="function"==typeof this.options.countSelectedText?this.options.countSelectedText(o,v):this.options.countSelectedText;c=K.text.call(this,{text:g.replace("{0}",o.toString()).replace("{1}",v.toString())},!0)}if(null==this.options.title&&(this.options.title=this.$element.attr("title")),c.childNodes.length||(c=K.text.call(this,{text:void 0!==this.options.title?this.options.title:this.options.noneSelectedText},!0)),r.title=c.textContent.replace(/<[^>]*>?/g,"").trim(),this.options.sanitize&&d&&P([c],t.options.whiteList,t.options.sanitizeFn),l.innerHTML="",l.appendChild(c),R.major<4&&this.$newElement[0].classList.contains("bs3-has-addon")){var b=r.querySelector(".filter-expand"),w=l.cloneNode(!0);w.className="filter-expand",b?r.replaceChild(w,b):r.appendChild(w)}this.$element.trigger("rendered"+j)},setStyle:function(e,t){var i,s=this.$button[0],n=this.$newElement[0],o=this.options.style.trim();this.$element.attr("class")&&this.$newElement.addClass(this.$element.attr("class").replace(/selectpicker|mobile-device|bs-select-hidden|validate\[.*\]/gi,"")),R.major<4&&(n.classList.add("bs3"),n.parentNode.classList.contains("input-group")&&(n.previousElementSibling||n.nextElementSibling)&&(n.previousElementSibling||n.nextElementSibling).classList.contains("input-group-addon")&&n.classList.add("bs3-has-addon")),i=e?e.trim():o,"add"==t?i&&s.classList.add.apply(s.classList,i.split(" ")):"remove"==t?i&&s.classList.remove.apply(s.classList,i.split(" ")):(o&&s.classList.remove.apply(s.classList,o.split(" ")),i&&s.classList.add.apply(s.classList,i.split(" ")))},liHeight:function(e){if(e||!1!==this.options.size&&!Object.keys(this.sizeInfo).length){var t=document.createElement("div"),i=document.createElement("div"),s=document.createElement("div"),n=document.createElement("ul"),o=document.createElement("li"),r=document.createElement("li"),l=document.createElement("li"),a=document.createElement("a"),c=document.createElement("span"),d=this.options.header&&0this.sizeInfo.menuExtras.vert&&l+this.sizeInfo.menuExtras.vert+50>this.sizeInfo.selectOffsetBot,!0===this.selectpicker.isSearching&&(a=this.selectpicker.dropup),this.$newElement.toggleClass(V.DROPUP,a),this.selectpicker.dropup=a),"auto"===this.options.size)n=3this.options.size){for(var b=0;bthis.sizeInfo.menuInnerHeight&&(this.sizeInfo.hasScrollBar=!0,this.sizeInfo.totalMenuWidth=this.sizeInfo.menuWidth+this.sizeInfo.scrollBarWidth),"auto"===this.options.dropdownAlignRight&&this.$menu.toggleClass(V.MENURIGHT,this.sizeInfo.selectOffsetLeft>this.sizeInfo.selectOffsetRight&&this.sizeInfo.selectOffsetRightthis.options.size&&i.off("resize"+j+"."+this.selectId+".setMenuSize scroll"+j+"."+this.selectId+".setMenuSize")}this.createView(!1,!0,e)},setWidth:function(){var i=this;"auto"===this.options.width?requestAnimationFrame(function(){i.$menu.css("min-width","0"),i.$element.on("loaded"+j,function(){i.liHeight(),i.setMenuSize();var e=i.$newElement.clone().appendTo("body"),t=e.css("width","auto").children("button").outerWidth();e.remove(),i.sizeInfo.selectWidth=Math.max(i.sizeInfo.totalMenuWidth,t),i.$newElement.css("width",i.sizeInfo.selectWidth+"px")})}):"fit"===this.options.width?(this.$menu.css("min-width",""),this.$newElement.css("width","").addClass("fit-width")):this.options.width?(this.$menu.css("min-width",""),this.$newElement.css("width",this.options.width)):(this.$menu.css("min-width",""),this.$newElement.css("width","")),this.$newElement.hasClass("fit-width")&&"fit"!==this.options.width&&this.$newElement[0].classList.remove("fit-width")},selectPosition:function(){this.$bsContainer=z('
');function e(e){var t={},i=r.options.display||!!z.fn.dropdown.Constructor.Default&&z.fn.dropdown.Constructor.Default.display;r.$bsContainer.addClass(e.attr("class").replace(/form-control|fit-width/gi,"")).toggleClass(V.DROPUP,e.hasClass(V.DROPUP)),s=e.offset(),l.is("body")?n={top:0,left:0}:((n=l.offset()).top+=parseInt(l.css("borderTopWidth"))-l.scrollTop(),n.left+=parseInt(l.css("borderLeftWidth"))-l.scrollLeft()),o=e.hasClass(V.DROPUP)?0:e[0].offsetHeight,(R.major<4||"static"===i)&&(t.top=s.top-n.top+o,t.left=s.left-n.left),t.width=e[0].offsetWidth,r.$bsContainer.css(t)}var s,n,o,r=this,l=z(this.options.container);this.$button.on("click.bs.dropdown.data-api",function(){r.isDisabled()||(e(r.$newElement),r.$bsContainer.appendTo(r.options.container).toggleClass(V.SHOW,!r.$button.hasClass(V.SHOW)).append(r.$menu))}),z(window).off("resize"+j+"."+this.selectId+" scroll"+j+"."+this.selectId).on("resize"+j+"."+this.selectId+" scroll"+j+"."+this.selectId,function(){r.$newElement.hasClass(V.SHOW)&&e(r.$newElement)}),this.$element.on("hide"+j,function(){r.$menu.data("height",r.$menu.height()),r.$bsContainer.detach()})},setOptionStatus:function(e){var t=this;if(t.noScroll=!1,t.selectpicker.view.visibleElements&&t.selectpicker.view.visibleElements.length)for(var i=0;i
');y[2]&&($=$.replace("{var}",y[2][1"+$+"
")),d=!1,C.$element.trigger("maxReached"+j)),g&&w&&(E.append(z("
"+S+"
")),d=!1,C.$element.trigger("maxReachedGrp"+j)),setTimeout(function(){C.setSelected(r,!1)},10),E[0].classList.add("fadeOut"),setTimeout(function(){E.remove()},1050)}}}else c&&(c.selected=!1),h.selected=!0,C.setSelected(r,!0);!C.multiple||C.multiple&&1===C.options.maxOptions?C.$button.trigger("focus"):C.options.liveSearch&&C.$searchbox.trigger("focus"),d&&(!C.multiple&&a===s.selectedIndex||(A=[h.index,p.prop("selected"),l],C.$element.triggerNative("change")))}}),this.$menu.on("click","li."+V.DISABLED+" a, ."+V.POPOVERHEADER+", ."+V.POPOVERHEADER+" :not(.close)",function(e){e.currentTarget==this&&(e.preventDefault(),e.stopPropagation(),C.options.liveSearch&&!z(e.target).hasClass("close")?C.$searchbox.trigger("focus"):C.$button.trigger("focus"))}),this.$menuInner.on("click",".divider, .dropdown-header",function(e){e.preventDefault(),e.stopPropagation(),C.options.liveSearch?C.$searchbox.trigger("focus"):C.$button.trigger("focus")}),this.$menu.on("click","."+V.POPOVERHEADER+" .close",function(){C.$button.trigger("click")}),this.$searchbox.on("click",function(e){e.stopPropagation()}),this.$menu.on("click",".actions-btn",function(e){C.options.liveSearch?C.$searchbox.trigger("focus"):C.$button.trigger("focus"),e.preventDefault(),e.stopPropagation(),z(this).hasClass("bs-select-all")?C.selectAll():C.deselectAll()}),this.$element.on("change"+j,function(){C.render(),C.$element.trigger("changed"+j,A),A=null}).on("focus"+j,function(){C.options.mobile||C.$button.trigger("focus")})},liveSearchListener:function(){var u=this,f=document.createElement("li");this.$button.on("click.bs.dropdown.data-api",function(){u.$searchbox.val()&&u.$searchbox.val("")}),this.$searchbox.on("click.bs.dropdown.data-api focus.bs.dropdown.data-api touchend.bs.dropdown.data-api",function(e){e.stopPropagation()}),this.$searchbox.on("input propertychange",function(){var e=u.$searchbox.val();if(u.selectpicker.search.elements=[],u.selectpicker.search.data=[],e){var t=[],i=e.toUpperCase(),s={},n=[],o=u._searchStyle(),r=u.options.liveSearchNormalize;r&&(i=w(i));for(var l=0;l=a.selectpicker.view.canHighlight.length&&(t=0),a.selectpicker.view.canHighlight[t+f]||(t=t+1+a.selectpicker.view.canHighlight.slice(t+f+1).indexOf(!0))),e.preventDefault();var m=f+t;e.which===B?0===f&&t===c.length-1?(a.$menuInner[0].scrollTop=a.$menuInner[0].scrollHeight,m=a.selectpicker.current.elements.length-1):d=(o=(n=a.selectpicker.current.data[m]).position-n.height)u+a.sizeInfo.menuInnerHeight),s=a.selectpicker.main.elements[v],a.activeIndex=b[x],a.focusItem(s),s&&s.firstChild.focus(),d&&(a.$menuInner[0].scrollTop=o),r.trigger("focus")}}i&&(e.which===H&&!a.selectpicker.keydown.keyHistory||e.which===D||e.which===W&&a.options.selectOnTab)&&(e.which!==H&&e.preventDefault(),a.options.liveSearch&&e.which===H||(a.$menuInner.find(".active a").trigger("click",!0),r.trigger("focus"),a.options.liveSearch||(e.preventDefault(),z(document).data("spaceSelect",!0))))}},mobile:function(){this.$element[0].classList.add("mobile-device")},refresh:function(){var e=z.extend({},this.options,this.$element.data());this.options=e,this.checkDisabled(),this.setStyle(),this.render(),this.buildData(),this.buildList(),this.setWidth(),this.setSize(!0),this.$element.trigger("refreshed"+j)},hide:function(){this.$newElement.hide()},show:function(){this.$newElement.show()},remove:function(){this.$newElement.remove(),this.$element.remove()},destroy:function(){this.$newElement.before(this.$element).remove(),this.$bsContainer?this.$bsContainer.remove():this.$menu.remove(),this.$element.off(j).removeData("selectpicker").removeClass("bs-select-hidden selectpicker"),z(window).off(j+"."+this.selectId)}};var J=z.fn.selectpicker;z.fn.selectpicker=Z,z.fn.selectpicker.Constructor=Y,z.fn.selectpicker.noConflict=function(){return z.fn.selectpicker=J,this};var Q=z.fn.dropdown.Constructor._dataApiKeydownHandler||z.fn.dropdown.Constructor.prototype.keydown;z(document).off("keydown.bs.dropdown.data-api").on("keydown.bs.dropdown.data-api",':not(.bootstrap-select) > [data-toggle="dropdown"]',Q).on("keydown.bs.dropdown.data-api",":not(.bootstrap-select) > .dropdown-menu",Q).on("keydown"+j,'.bootstrap-select [data-toggle="dropdown"], .bootstrap-select [role="listbox"], .bootstrap-select .bs-searchbox input',Y.prototype.keydown).on("focusin.modal",'.bootstrap-select [data-toggle="dropdown"], .bootstrap-select [role="listbox"], .bootstrap-select .bs-searchbox input',function(e){e.stopPropagation()}),z(window).on("load"+j+".data-api",function(){z(".selectpicker").each(function(){var e=z(this);Z.call(e,e.data())})})}(e)}); +//# sourceMappingURL=bootstrap-select.min.js.map \ No newline at end of file diff --git a/plexpy/graphs.py b/plexpy/graphs.py index 49dfee57..58a199c0 100644 --- a/plexpy/graphs.py +++ b/plexpy/graphs.py @@ -51,11 +51,7 @@ class Graphs(object): time_range = helpers.cast_to_int(time_range) or 30 timestamp = helpers.timestamp() - time_range * 24 * 60 * 60 - user_cond = '' - if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() - elif user_id and user_id.isdigit(): - user_cond = 'AND session_history.user_id = %s ' % user_id + user_cond = self._make_user_cond(user_id) if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -171,11 +167,7 @@ class Graphs(object): time_range = helpers.cast_to_int(time_range) or 30 timestamp = helpers.timestamp() - time_range * 24 * 60 * 60 - user_cond = '' - if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = "AND session_history.user_id = %s " % session.get_session_user_id() - elif user_id and user_id.isdigit(): - user_cond = "AND session_history.user_id = %s " % user_id + user_cond = self._make_user_cond(user_id) if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -308,11 +300,7 @@ class Graphs(object): time_range = helpers.cast_to_int(time_range) or 30 timestamp = helpers.timestamp() - time_range * 24 * 60 * 60 - user_cond = '' - if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() - elif user_id and user_id.isdigit(): - user_cond = 'AND session_history.user_id = %s ' % user_id + user_cond = self._make_user_cond(user_id) if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -427,11 +415,7 @@ class Graphs(object): time_range = helpers.cast_to_int(time_range) or 12 timestamp = arrow.get(helpers.timestamp()).shift(months=-time_range).floor('month').timestamp() - user_cond = '' - if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() - elif user_id and user_id.isdigit(): - user_cond = 'AND session_history.user_id = %s ' % user_id + user_cond = self._make_user_cond(user_id) if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -554,11 +538,7 @@ class Graphs(object): time_range = helpers.cast_to_int(time_range) or 30 timestamp = helpers.timestamp() - time_range * 24 * 60 * 60 - user_cond = '' - if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() - elif user_id and user_id.isdigit(): - user_cond = 'AND session_history.user_id = %s ' % user_id + user_cond = self._make_user_cond(user_id) if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -653,11 +633,7 @@ class Graphs(object): time_range = helpers.cast_to_int(time_range) or 30 timestamp = helpers.timestamp() - time_range * 24 * 60 * 60 - user_cond = '' - if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() - elif user_id and user_id.isdigit(): - user_cond = 'AND session_history.user_id = %s ' % user_id + user_cond = self._make_user_cond(user_id) if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -763,11 +739,7 @@ class Graphs(object): time_range = helpers.cast_to_int(time_range) or 30 timestamp = helpers.timestamp() - time_range * 24 * 60 * 60 - user_cond = '' - if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() - elif user_id and user_id.isdigit(): - user_cond = 'AND session_history.user_id = %s ' % user_id + user_cond = self._make_user_cond(user_id) if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -860,11 +832,7 @@ class Graphs(object): time_range = helpers.cast_to_int(time_range) or 30 timestamp = helpers.timestamp() - time_range * 24 * 60 * 60 - user_cond = '' - if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() - elif user_id and user_id.isdigit(): - user_cond = 'AND session_history.user_id = %s ' % user_id + user_cond = self._make_user_cond(user_id) if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -941,11 +909,7 @@ class Graphs(object): time_range = helpers.cast_to_int(time_range) or 30 timestamp = helpers.timestamp() - time_range * 24 * 60 * 60 - user_cond = '' - if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() - elif user_id and user_id.isdigit(): - user_cond = 'AND session_history.user_id = %s ' % user_id + user_cond = self._make_user_cond(user_id) if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -1048,11 +1012,7 @@ class Graphs(object): time_range = helpers.cast_to_int(time_range) or 30 timestamp = helpers.timestamp() - time_range * 24 * 60 * 60 - user_cond = '' - if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() - elif user_id and user_id.isdigit(): - user_cond = 'AND session_history.user_id = %s ' % user_id + user_cond = self._make_user_cond(user_id) if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -1128,11 +1088,7 @@ class Graphs(object): time_range = helpers.cast_to_int(time_range) or 30 timestamp = helpers.timestamp() - time_range * 24 * 60 * 60 - user_cond = '' - if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() - elif user_id and user_id.isdigit(): - user_cond = 'AND session_history.user_id = %s ' % user_id + user_cond = self._make_user_cond(user_id) if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -1212,3 +1168,16 @@ class Graphs(object): 'series': [series_1_output, series_2_output, series_3_output]} return output + + def _make_user_cond(self, user_id): + """ + Expects user_id to be a comma-separated list of ints. + """ + user_cond = '' + if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): + user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() + elif user_id: + user_ids = helpers.split_strip(user_id) + if all(id.isdigit() for id in user_ids): + user_cond = 'AND session_history.user_id IN (%s) ' % ','.join(user_ids) + return user_cond diff --git a/plexpy/webserve.py b/plexpy/webserve.py index 06a9a802..7549aefa 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -2258,7 +2258,7 @@ class WebInterface(object): Optional parameters: time_range (str): The number of days of data to return y_axis (str): "plays" or "duration" - user_id (str): The user id to filter the data + user_id (str): Comma separated list of user id to filter the data grouping (int): 0 or 1 Returns: @@ -2302,7 +2302,7 @@ class WebInterface(object): Optional parameters: time_range (str): The number of days of data to return y_axis (str): "plays" or "duration" - user_id (str): The user id to filter the data + user_id (str): Comma separated list of user id to filter the data grouping (int): 0 or 1 Returns: @@ -2346,7 +2346,7 @@ class WebInterface(object): Optional parameters: time_range (str): The number of days of data to return y_axis (str): "plays" or "duration" - user_id (str): The user id to filter the data + user_id (str): Comma separated list of user id to filter the data grouping (int): 0 or 1 Returns: @@ -2390,7 +2390,7 @@ class WebInterface(object): Optional parameters: time_range (str): The number of months of data to return y_axis (str): "plays" or "duration" - user_id (str): The user id to filter the data + user_id (str): Comma separated list of user id to filter the data grouping (int): 0 or 1 Returns: @@ -2434,7 +2434,7 @@ class WebInterface(object): Optional parameters: time_range (str): The number of days of data to return y_axis (str): "plays" or "duration" - user_id (str): The user id to filter the data + user_id (str): Comma separated list of user id to filter the data grouping (int): 0 or 1 Returns: @@ -2478,7 +2478,7 @@ class WebInterface(object): Optional parameters: time_range (str): The number of days of data to return y_axis (str): "plays" or "duration" - user_id (str): The user id to filter the data + user_id (str): Comma separated list of user id to filter the data grouping (int): 0 or 1 Returns: @@ -2522,7 +2522,7 @@ class WebInterface(object): Optional parameters: time_range (str): The number of days of data to return y_axis (str): "plays" or "duration" - user_id (str): The user id to filter the data + user_id (str): Comma separated list of user id to filter the data grouping (int): 0 or 1 Returns: @@ -2565,7 +2565,7 @@ class WebInterface(object): Optional parameters: time_range (str): The number of days of data to return y_axis (str): "plays" or "duration" - user_id (str): The user id to filter the data + user_id (str): Comma separated list of user id to filter the data grouping (int): 0 or 1 Returns: @@ -2608,7 +2608,7 @@ class WebInterface(object): Optional parameters: time_range (str): The number of days of data to return y_axis (str): "plays" or "duration" - user_id (str): The user id to filter the data + user_id (str): Comma separated list of user id to filter the data grouping (int): 0 or 1 Returns: @@ -2651,7 +2651,7 @@ class WebInterface(object): Optional parameters: time_range (str): The number of days of data to return y_axis (str): "plays" or "duration" - user_id (str): The user id to filter the data + user_id (str): Comma separated list of user id to filter the data grouping (int): 0 or 1 Returns: @@ -2694,7 +2694,7 @@ class WebInterface(object): Optional parameters: time_range (str): The number of days of data to return y_axis (str): "plays" or "duration" - user_id (str): The user id to filter the data + user_id (str): Comma separated list of user id to filter the data grouping (int): 0 or 1 Returns: From d91e561a56a14728970b3f581a3329a40cff0abf Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Fri, 7 Jul 2023 17:47:38 -0700 Subject: [PATCH 111/113] Regroup history in separate thread and improve logging --- data/interfaces/default/settings.html | 4 ++-- plexpy/activity_processor.py | 12 +++++++++++- plexpy/webserve.py | 8 +++----- 3 files changed, 16 insertions(+), 8 deletions(-) diff --git a/data/interfaces/default/settings.html b/data/interfaces/default/settings.html index 7cd614e0..c5d8fe37 100644 --- a/data/interfaces/default/settings.html +++ b/data/interfaces/default/settings.html @@ -2498,9 +2498,9 @@ $(document).ready(function() { }); $("#regroup_history").click(function () { - var msg = 'Are you sure you want to regroup play history in the database?'; + var msg = 'Are you sure you want to regroup play history in the database?

This make take a long time for large databases.
Regrouping will continue in the background.
'; var url = 'regroup_history'; - confirmAjaxCall(url, msg, null, 'Regrouping play history...'); + confirmAjaxCall(url, msg); }); $("#delete_temp_sessions").click(function () { diff --git a/plexpy/activity_processor.py b/plexpy/activity_processor.py index b1558a56..0437d2d5 100644 --- a/plexpy/activity_processor.py +++ b/plexpy/activity_processor.py @@ -719,8 +719,14 @@ class ActivityProcessor(object): "JOIN session_history_metadata ON session_history.id = session_history_metadata.id" ) results = self.db.select(query) + count = len(results) + progress = 0 + + for i, session in enumerate(results, start=1): + if int(i / count * 10) > progress: + progress = int(i / count * 10) + logger.info("Tautulli ActivityProcessor :: Regrouping session history: %d%%", progress * 10) - for session in results: try: self.group_history(session['id'], session) except Exception as e: @@ -729,3 +735,7 @@ class ActivityProcessor(object): logger.info("Tautulli ActivityProcessor :: Regrouping session history complete.") return True + + +def regroup_history(): + ActivityProcessor().regroup_history() diff --git a/plexpy/webserve.py b/plexpy/webserve.py index 7549aefa..b98c9e7c 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -443,12 +443,10 @@ class WebInterface(object): def regroup_history(self, **kwargs): """ Regroup play history in the database.""" - result = activity_processor.ActivityProcessor().regroup_history() + threading.Thread(target=activity_processor.regroup_history).start() - if result: - return {'result': 'success', 'message': 'Regrouped play history.'} - else: - return {'result': 'error', 'message': 'Regrouping play history failed.'} + return {'result': 'success', + 'message': 'Regrouping play history started. Check the logs to monitor any problems.'} @cherrypy.expose @cherrypy.tools.json_out() From 6010e406c817ecfb11d873824e0adcc22f5bcc80 Mon Sep 17 00:00:00 2001 From: David Pooley Date: Sun, 9 Jul 2023 00:32:42 +0100 Subject: [PATCH 112/113] Fix simultaneous streams per IP not behaving as expected with IPv6 (#2096) * Fix IPv6 comparisson for concurrent streams * Update regex to allow numbers in config variables * Remove additional logging for local testing * Update plexpy/notification_handler.py Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> --------- Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> --- plexpy/config.py | 3 ++- plexpy/helpers.py | 16 ++++++++++++++++ plexpy/notification_handler.py | 9 ++++++++- 3 files changed, 26 insertions(+), 2 deletions(-) diff --git a/plexpy/config.py b/plexpy/config.py index 7b583d8d..6f2926d9 100644 --- a/plexpy/config.py +++ b/plexpy/config.py @@ -177,6 +177,7 @@ _CONFIG_DEFINITIONS = { 'NOTIFY_RECENTLY_ADDED_UPGRADE': (int, 'Monitoring', 0), 'NOTIFY_REMOTE_ACCESS_THRESHOLD': (int, 'Monitoring', 60), 'NOTIFY_CONCURRENT_BY_IP': (int, 'Monitoring', 0), + 'NOTIFY_CONCURRENT_IPV6_CIDR': (str, 'Monitoring', '/64'), 'NOTIFY_CONCURRENT_THRESHOLD': (int, 'Monitoring', 2), 'NOTIFY_NEW_DEVICE_INITIAL_ONLY': (int, 'Monitoring', 1), 'NOTIFY_SERVER_CONNECTION_THRESHOLD': (int, 'Monitoring', 60), @@ -536,7 +537,7 @@ class Config(object): Returns something from the ini unless it is a real property of the configuration object or is not all caps. """ - if not re.match(r'[A-Z_]+$', name): + if not re.match(r'[A-Z0-9_]+$', name): return super(Config, self).__getattr__(name) else: return self.check_setting(name) diff --git a/plexpy/helpers.py b/plexpy/helpers.py index 9cfb9c45..085dfc12 100644 --- a/plexpy/helpers.py +++ b/plexpy/helpers.py @@ -33,6 +33,7 @@ from functools import reduce, wraps import hashlib import imghdr from future.moves.itertools import islice, zip_longest +from ipaddress import ip_address, ip_network, IPv4Address import ipwhois import ipwhois.exceptions import ipwhois.utils @@ -1777,3 +1778,18 @@ def check_watched(media_type, view_offset, duration, marker_credits_first=None, def pms_name(): return plexpy.CONFIG.PMS_NAME_OVERRIDE or plexpy.CONFIG.PMS_NAME + + +def ip_type(ip: str) -> str: + try: + return "IPv4" if type(ip_address(ip)) is IPv4Address else "IPv6" + except ValueError: + return "Invalid" + + +def get_ipv6_network_address(ip: str) -> str: + cidr = "/64" + cidr_pattern = re.compile(r'^/(1([0-1]\d|2[0-8]))$|^/(\d\d)$|^/[1-9]$') + if cidr_pattern.match(plexpy.CONFIG.NOTIFY_CONCURRENT_IPV6_CIDR): + cidr = plexpy.CONFIG.NOTIFY_CONCURRENT_IPV6_CIDR + return str(ip_network(ip+cidr, strict=False).network_address) diff --git a/plexpy/notification_handler.py b/plexpy/notification_handler.py index 7dd81627..8b4b8583 100644 --- a/plexpy/notification_handler.py +++ b/plexpy/notification_handler.py @@ -160,6 +160,7 @@ def add_notifier_each(notifier_id=None, notify_action=None, stream_data=None, ti def notify_conditions(notify_action=None, stream_data=None, timeline_data=None, **kwargs): logger.debug("Tautulli NotificationHandler :: Checking global notification conditions.") + evaluated = False # Activity notifications if stream_data: @@ -187,7 +188,13 @@ def notify_conditions(notify_action=None, stream_data=None, timeline_data=None, user_sessions = [s for s in result['sessions'] if s['user_id'] == stream_data['user_id']] if plexpy.CONFIG.NOTIFY_CONCURRENT_BY_IP: - evaluated = len(Counter(s['ip_address'] for s in user_sessions)) >= plexpy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD + ip_addresses = set() + for s in user_sessions: + if helpers.ip_type(s['ip_address']) == 'IPv6': + ip_addresses.add(helpers.get_ipv6_network_address(s['ip_address'])) + elif helpers.ip_type(s['ip_address']) == 'IPv4': + ip_addresses.add(s['ip_address']) + evaluated = len(ip_addresses) >= plexpy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD else: evaluated = len(user_sessions) >= plexpy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD From 571a6b6d2df91d209907800af3f1b5c7356e2577 Mon Sep 17 00:00:00 2001 From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> Date: Mon, 10 Jul 2023 08:56:27 -0700 Subject: [PATCH 113/113] Cast view_offset to int for regrouping history --- plexpy/activity_processor.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plexpy/activity_processor.py b/plexpy/activity_processor.py index 0437d2d5..9115f332 100644 --- a/plexpy/activity_processor.py +++ b/plexpy/activity_processor.py @@ -519,12 +519,12 @@ class ActivityProcessor(object): if len(result) > 1: new_session = {'id': result[0]['id'], 'rating_key': result[0]['rating_key'], - 'view_offset': result[0]['view_offset'], + 'view_offset': helpers.cast_to_int(result[0]['view_offset']), 'reference_id': result[0]['reference_id']} prev_session = {'id': result[1]['id'], 'rating_key': result[1]['rating_key'], - 'view_offset': result[1]['view_offset'], + 'view_offset': helpers.cast_to_int(result[1]['view_offset']), 'reference_id': result[1]['reference_id']} if metadata: