From 2f6869ed2ad55f50b364e2fb9d63ca5b791ed6cd Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Fri, 23 Dec 2022 11:10:39 -0800
Subject: [PATCH 001/113] Update CI badges in README
* Ref: badges/shields#8671
[skip ci]
---
README.md | 18 +++++++++---------
1 file changed, 9 insertions(+), 9 deletions(-)
diff --git a/README.md b/README.md
index fa35aab8..d38f69e1 100644
--- a/README.md
+++ b/README.md
@@ -57,24 +57,24 @@ Read the [Installation Guides][Installation] for instructions on how to install
[badge-release-nightly-last-commit]: https://img.shields.io/github/last-commit/Tautulli/Tautulli/nightly?style=flat-square&color=blue
[badge-release-nightly-commits]: https://img.shields.io/github/commits-since/Tautulli/Tautulli/latest/nightly?style=flat-square&color=blue
[badge-docker-master]: https://img.shields.io/badge/docker-latest-blue?style=flat-square
-[badge-docker-master-ci]: https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Docker/master?style=flat-square
+[badge-docker-master-ci]: https://img.shields.io/github/actions/workflow/status/Tautulli/Tautulli/.github/workflows/publish-docker.yml?style=flat-square&branch=master
[badge-docker-beta]: https://img.shields.io/badge/docker-beta-blue?style=flat-square
-[badge-docker-beta-ci]: https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Docker/beta?style=flat-square
+[badge-docker-beta-ci]: https://img.shields.io/github/actions/workflow/status/Tautulli/Tautulli/.github/workflows/publish-docker.yml?style=flat-square&branch=beta
[badge-docker-nightly]: https://img.shields.io/badge/docker-nightly-blue?style=flat-square
-[badge-docker-nightly-ci]: https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Docker/nightly?style=flat-square
+[badge-docker-nightly-ci]: https://img.shields.io/github/actions/workflow/status/Tautulli/Tautulli/.github/workflows/publish-docker.yml?style=flat-square&branch=nightly
[badge-snap-master]: https://img.shields.io/badge/snap-stable-blue?style=flat-square
-[badge-snap-master-ci]: https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Snap/master?style=flat-square
+[badge-snap-master-ci]: https://img.shields.io/github/actions/workflow/status/Tautulli/Tautulli/.github/workflows/publish-snap.yml?style=flat-square&branch=master
[badge-snap-beta]: https://img.shields.io/badge/snap-beta-blue?style=flat-square
-[badge-snap-beta-ci]: https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Snap/beta?style=flat-square
+[badge-snap-beta-ci]: https://img.shields.io/github/actions/workflow/status/Tautulli/Tautulli/.github/workflows/publish-snap.yml?style=flat-square&branch=beta
[badge-snap-nightly]: https://img.shields.io/badge/snap-edge-blue?style=flat-square
-[badge-snap-nightly-ci]: https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Snap/nightly?style=flat-square
+[badge-snap-nightly-ci]: https://img.shields.io/github/actions/workflow/status/Tautulli/Tautulli/.github/workflows/publish-snap.yml?style=flat-square&branch=nightly
[badge-installer-master-win]: https://img.shields.io/github/v/release/Tautulli/Tautulli?label=windows&style=flat-square
[badge-installer-master-macos]: https://img.shields.io/github/v/release/Tautulli/Tautulli?label=macos&style=flat-square
-[badge-installer-master-ci]: https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Installers/master?style=flat-square
+[badge-installer-master-ci]: https://img.shields.io/github/actions/workflow/status/Tautulli/Tautulli/.github/workflows/publish-installers.yml?style=flat-square&branch=master
[badge-installer-beta-win]: https://img.shields.io/github/v/release/Tautulli/Tautulli?label=windows&include_prereleases&style=flat-square
[badge-installer-beta-macos]: https://img.shields.io/github/v/release/Tautulli/Tautulli?label=macos&include_prereleases&style=flat-square
-[badge-installer-beta-ci]: https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Installers/beta?style=flat-square
-[badge-installer-nightly-ci]: https://img.shields.io/github/workflow/status/Tautulli/Tautulli/Publish%20Installers/nightly?style=flat-square
+[badge-installer-beta-ci]: https://img.shields.io/github/actions/workflow/status/Tautulli/Tautulli/.github/workflows/publish-installers.yml?style=flat-square&branch=beta
+[badge-installer-nightly-ci]: https://img.shields.io/github/actions/workflow/status/Tautulli/Tautulli/.github/workflows/publish-installers.yml?style=flat-square&branch=nightly
## Support
From 460a463be11386e2777f488f60e6d437672f283c Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Thu, 19 Jan 2023 13:54:55 -0800
Subject: [PATCH 002/113] Update notification parameter description for Plex
API image paths
---
plexpy/common.py | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/plexpy/common.py b/plexpy/common.py
index 33b4cc00..3e56a4c9 100644
--- a/plexpy/common.py
+++ b/plexpy/common.py
@@ -600,11 +600,11 @@ NOTIFICATION_PARAMETERS = [
{'name': 'Rating Key', 'type': 'int', 'value': 'rating_key', 'description': 'The unique identifier for the movie, episode, or track.'},
{'name': 'Parent Rating Key', 'type': 'int', 'value': 'parent_rating_key', 'description': 'The unique identifier for the season or album.'},
{'name': 'Grandparent Rating Key', 'type': 'int', 'value': 'grandparent_rating_key', 'description': 'The unique identifier for the TV show or artist.'},
- {'name': 'Art', 'type': 'str', 'value': 'art', 'description': 'The Plex background art for the media.'},
- {'name': 'Thumb', 'type': 'str', 'value': 'thumb', 'description': 'The Plex thumbnail for the movie or episode.'},
- {'name': 'Parent Thumb', 'type': 'str', 'value': 'parent_thumb', 'description': 'The Plex thumbnail for the season or album.'},
- {'name': 'Grandparent Thumb', 'type': 'str', 'value': 'grandparent_thumb', 'description': 'The Plex thumbnail for the TV show or artist.'},
- {'name': 'Poster Thumb', 'type': 'str', 'value': 'poster_thumb', 'description': 'The Plex thumbnail for the poster image.'},
+ {'name': 'Art', 'type': 'str', 'value': 'art', 'description': 'The Plex API path to the background art for the media.'},
+ {'name': 'Thumb', 'type': 'str', 'value': 'thumb', 'description': 'The Plex API path to the thumbnail for the movie or episode.'},
+ {'name': 'Parent Thumb', 'type': 'str', 'value': 'parent_thumb', 'description': 'The Plex API path to the thumbnail for the season or album.'},
+ {'name': 'Grandparent Thumb', 'type': 'str', 'value': 'grandparent_thumb', 'description': 'The Plex API path to the thumbnail for the TV show or artist.'},
+ {'name': 'Poster Thumb', 'type': 'str', 'value': 'poster_thumb', 'description': 'The Plex API path to the thumbnail for the poster image.'},
{'name': 'Poster Title', 'type': 'str', 'value': 'poster_title', 'description': 'The title for the poster image.'},
{'name': 'Indexes', 'type': 'int', 'value': 'indexes', 'description': 'If the media has video preview thumbnails.', 'example': '0 or 1'},
]
From 548264d51a905bd80d418e0bdb34266b6da32a8c Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Fri, 20 Jan 2023 17:05:30 -0800
Subject: [PATCH 003/113] Add prvenance: false to docker/build-push-action
Ref: docker/buildx#1533
---
.github/workflows/publish-docker.yml | 1 +
1 file changed, 1 insertion(+)
diff --git a/.github/workflows/publish-docker.yml b/.github/workflows/publish-docker.yml
index 125cae51..37885c2f 100644
--- a/.github/workflows/publish-docker.yml
+++ b/.github/workflows/publish-docker.yml
@@ -87,6 +87,7 @@ jobs:
ghcr.io/${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.tag }}
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache
+ provenance: false
discord:
name: Discord Notification
From 1e02c26a9ad1fa3c36040e9a6c3cd58179739d0c Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Fri, 20 Jan 2023 17:09:03 -0800
Subject: [PATCH 004/113] Relax workflow action versions
---
.github/workflows/publish-docker.yml | 4 ++--
.github/workflows/publish-installers.yml | 4 ++--
.github/workflows/publish-snap.yml | 2 +-
.github/workflows/pull-requests.yml | 2 +-
4 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/.github/workflows/publish-docker.yml b/.github/workflows/publish-docker.yml
index 37885c2f..85ce6266 100644
--- a/.github/workflows/publish-docker.yml
+++ b/.github/workflows/publish-docker.yml
@@ -13,7 +13,7 @@ jobs:
if: ${{ !contains(github.event.head_commit.message, '[skip ci]') }}
steps:
- name: Checkout Code
- uses: actions/checkout@v3.2.0
+ uses: actions/checkout@v3
- name: Prepare
id: prepare
@@ -47,7 +47,7 @@ jobs:
version: latest
- name: Cache Docker Layers
- uses: actions/cache@v3.2.0
+ uses: actions/cache@v3
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
diff --git a/.github/workflows/publish-installers.yml b/.github/workflows/publish-installers.yml
index 3b271a1b..31ad0f81 100644
--- a/.github/workflows/publish-installers.yml
+++ b/.github/workflows/publish-installers.yml
@@ -24,7 +24,7 @@ jobs:
steps:
- name: Checkout Code
- uses: actions/checkout@v3.2.0
+ uses: actions/checkout@v3
- name: Set Release Version
id: get_version
@@ -52,7 +52,7 @@ jobs:
echo $GITHUB_SHA > version.txt
- name: Set Up Python
- uses: actions/setup-python@v4.4.0
+ uses: actions/setup-python@v4
with:
python-version: '3.9'
cache: pip
diff --git a/.github/workflows/publish-snap.yml b/.github/workflows/publish-snap.yml
index 7ad8fe95..9df4d2fd 100644
--- a/.github/workflows/publish-snap.yml
+++ b/.github/workflows/publish-snap.yml
@@ -20,7 +20,7 @@ jobs:
- armhf
steps:
- name: Checkout Code
- uses: actions/checkout@v3.2.0
+ uses: actions/checkout@v3
- name: Prepare
id: prepare
diff --git a/.github/workflows/pull-requests.yml b/.github/workflows/pull-requests.yml
index d7c8e45d..58cb4ee4 100644
--- a/.github/workflows/pull-requests.yml
+++ b/.github/workflows/pull-requests.yml
@@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout Code
- uses: actions/checkout@v3.2.0
+ uses: actions/checkout@v3
- name: Comment on Pull Request
uses: mshick/add-pr-comment@v2
From 0959f28e957ef119e5abbd083796c650862501c4 Mon Sep 17 00:00:00 2001
From: herby2212 <12448284+herby2212@users.noreply.github.com>
Date: Sat, 28 Jan 2023 23:09:11 +0100
Subject: [PATCH 005/113] Add edition detail field for movie info (#1957)
* edition addition for movie info
* swap position to match plex order
---
data/interfaces/default/info.html | 6 ++++++
1 file changed, 6 insertions(+)
diff --git a/data/interfaces/default/info.html b/data/interfaces/default/info.html
index 6d8b3aaf..a7acb11b 100644
--- a/data/interfaces/default/info.html
+++ b/data/interfaces/default/info.html
@@ -14,6 +14,7 @@ rating_key Returns the unique identifier for the media item.
media_type Returns the type of media. Either 'movie', 'show', 'season', 'episode', 'artist', 'album', or 'track'.
art Returns the location of the item's artwork
title Returns the name of the movie, show, episode, artist, album, or track.
+edition_title Returns the edition title of a movie.
duration Returns the standard runtime of the media.
content_rating Returns the age rating for the media.
summary Returns a brief description of the media plot.
@@ -390,6 +391,11 @@ DOCUMENTATION :: END
Runtime ${data['duration']}
% endif
+ % if data['edition_title']:
+
% if data['content_rating']:
Rated
${data['content_rating']}
From c51ee673e83e4ed2fe143f3ef8466575070e31ff Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Sat, 28 Jan 2023 13:29:12 -0800
Subject: [PATCH 006/113] Add support for Telegram group topics
* Closes #1980
---
plexpy/notifiers.py | 8 ++++++--
1 file changed, 6 insertions(+), 2 deletions(-)
diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py
index fbd48c4b..75f810d8 100644
--- a/plexpy/notifiers.py
+++ b/plexpy/notifiers.py
@@ -3962,7 +3962,10 @@ class TELEGRAM(Notifier):
}
def agent_notify(self, subject='', body='', action='', **kwargs):
- data = {'chat_id': self.config['chat_id']}
+ chat_id, *message_thread_id = self.config['chat_id'].split('/')
+ data = {'chat_id': chat_id}
+ if message_thread_id:
+ data['message_thread_id'] = message_thread_id[0]
if self.config['incl_subject']:
text = subject + '\r\n' + body
@@ -4032,7 +4035,8 @@ class TELEGRAM(Notifier):
'description': 'Your Telegram Chat ID, Group ID, Channel ID or @channelusername. '
'Contact
@myidbot'
- ' on Telegram to get an ID.',
+ ' on Telegram to get an ID. '
+ 'For a group topic, append
/topicID to the group ID.',
'input_type': 'text'
},
{'label': 'Include Subject Line',
From b0a55df8620e37795e6a5e202fdd8b10e4d5338a Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Sat, 28 Jan 2023 13:43:42 -0800
Subject: [PATCH 007/113] Add anidb_id and anidb_url notification parameters
* Closes #1973
---
plexpy/common.py | 2 ++
plexpy/notification_handler.py | 6 ++++++
2 files changed, 8 insertions(+)
diff --git a/plexpy/common.py b/plexpy/common.py
index 3e56a4c9..ad91ee87 100644
--- a/plexpy/common.py
+++ b/plexpy/common.py
@@ -553,6 +553,8 @@ NOTIFICATION_PARAMETERS = [
{'name': 'TVmaze URL', 'type': 'str', 'value': 'tvmaze_url', 'description': 'The TVmaze URL for the TV show.'},
{'name': 'MusicBrainz ID', 'type': 'str', 'value': 'musicbrainz_id', 'description': 'The MusicBrainz ID for the artist, album, or track.', 'example': 'e.g. b670dfcf-9824-4309-a57e-03595aaba286'},
{'name': 'MusicBrainz URL', 'type': 'str', 'value': 'musicbrainz_url', 'description': 'The MusicBrainz URL for the artist, album, or track.'},
+ {'name': 'AniDB ID', 'type': 'str', 'value': 'anidb_id', 'description': 'The AniDB ID for the Anime', 'example': 'e.g. 69', 'help_text': 'TV show library agent must be HAMA'},
+ {'name': 'AniDB URL', 'type': 'str', 'value': 'anidb_url', 'description': 'The AniDB URL for the Anime', 'help_text': 'TV show library agent must be HAMA'},
{'name': 'Last.fm URL', 'type': 'str', 'value': 'lastfm_url', 'description': 'The Last.fm URL for the album.', 'help_text': 'Music library agent must be Last.fm'},
{'name': 'Trakt.tv URL', 'type': 'str', 'value': 'trakt_url', 'description': 'The trakt.tv URL for the movie or TV show.'},
{'name': 'Container', 'type': 'str', 'value': 'container', 'description': 'The media container of the original media.'},
diff --git a/plexpy/notification_handler.py b/plexpy/notification_handler.py
index 57f5ff83..6b55c5b7 100644
--- a/plexpy/notification_handler.py
+++ b/plexpy/notification_handler.py
@@ -715,6 +715,10 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
else:
notify_params['musicbrainz_url'] = 'https://musicbrainz.org/track/' + notify_params['musicbrainz_id']
+ if 'hama://' in notify_params['guid']:
+ notify_params['anidb_id'] = notify_params['guid'].split('hama://')[1].split('/')[0].split('?')[0].split('-')[1]
+ notify_params['anidb_url'] = 'https://anidb.net/anime/' + notify_params['anidb_id']
+
# Get TheMovieDB info (for movies and tv only)
if plexpy.CONFIG.THEMOVIEDB_LOOKUP and notify_params['media_type'] in ('movie', 'show', 'season', 'episode'):
if notify_params.get('themoviedb_id'):
@@ -1142,6 +1146,8 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
'tvmaze_url': notify_params['tvmaze_url'],
'musicbrainz_id': notify_params['musicbrainz_id'],
'musicbrainz_url': notify_params['musicbrainz_url'],
+ 'anidb_id': notify_params['anidb_id'],
+ 'anidb_url': notify_params['anidb_url'],
'lastfm_url': notify_params['lastfm_url'],
'trakt_url': notify_params['trakt_url'],
'container': notify_params['container'],
From b6ff45138f29763b92fa59646c7499cfd3d28c41 Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Sat, 28 Jan 2023 14:04:45 -0800
Subject: [PATCH 008/113] Add section_id and user_id parameters to
get_home_stats API command
* Closes #1944
---
plexpy/datafactory.py | 49 ++++++++++++++++++++++++-------------------
plexpy/webserve.py | 9 ++++++--
2 files changed, 35 insertions(+), 23 deletions(-)
diff --git a/plexpy/datafactory.py b/plexpy/datafactory.py
index 700c14c6..cf55a2c0 100644
--- a/plexpy/datafactory.py
+++ b/plexpy/datafactory.py
@@ -349,7 +349,8 @@ class DataFactory(object):
return dict
def get_home_stats(self, grouping=None, time_range=30, stats_type='plays',
- stats_start=0, stats_count=10, stat_id='', stats_cards=None):
+ stats_start=0, stats_count=10, stat_id='', stats_cards=None,
+ section_id=None, user_id=None):
monitor_db = database.MonitorDatabase()
time_range = helpers.cast_to_int(time_range)
@@ -364,6 +365,12 @@ class DataFactory(object):
if stats_cards is None:
stats_cards = plexpy.CONFIG.HOME_STATS_CARDS
+ where_id = ''
+ if section_id:
+ where_id += 'AND session_history.section_id = %s ' % section_id
+ if user_id:
+ where_id += 'AND session_history.user_id = %s ' % user_id
+
movie_watched_percent = plexpy.CONFIG.MOVIE_WATCHED_PERCENT
tv_watched_percent = plexpy.CONFIG.TV_WATCHED_PERCENT
music_watched_percent = plexpy.CONFIG.MUSIC_WATCHED_PERCENT
@@ -385,12 +392,12 @@ class DataFactory(object):
' AS d ' \
' FROM session_history ' \
' WHERE session_history.stopped >= %s ' \
- ' AND session_history.media_type = "movie" ' \
+ ' AND session_history.media_type = "movie" %s ' \
' GROUP BY %s) AS sh ' \
'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \
'GROUP BY shm.full_title, shm.year ' \
'ORDER BY %s DESC, sh.started DESC ' \
- 'LIMIT %s OFFSET %s ' % (timestamp, group_by, sort_type, stats_count, stats_start)
+ 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start)
result = monitor_db.select(query)
except Exception as e:
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_movies: %s." % e)
@@ -438,12 +445,12 @@ class DataFactory(object):
' AS d ' \
' FROM session_history ' \
' WHERE session_history.stopped >= %s ' \
- ' AND session_history.media_type = "movie" ' \
+ ' AND session_history.media_type = "movie" %s ' \
' GROUP BY %s) AS sh ' \
'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \
'GROUP BY shm.full_title, shm.year ' \
'ORDER BY users_watched DESC, %s DESC, sh.started DESC ' \
- 'LIMIT %s OFFSET %s ' % (timestamp, group_by, sort_type, stats_count, stats_start)
+ 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start)
result = monitor_db.select(query)
except Exception as e:
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_movies: %s." % e)
@@ -490,12 +497,12 @@ class DataFactory(object):
' AS d ' \
' FROM session_history ' \
' WHERE session_history.stopped >= %s ' \
- ' AND session_history.media_type = "episode" ' \
+ ' AND session_history.media_type = "episode" %s ' \
' GROUP BY %s) AS sh ' \
'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \
'GROUP BY shm.grandparent_title ' \
'ORDER BY %s DESC, sh.started DESC ' \
- 'LIMIT %s OFFSET %s ' % (timestamp, group_by, sort_type, stats_count, stats_start)
+ 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start)
result = monitor_db.select(query)
except Exception as e:
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_tv: %s." % e)
@@ -545,12 +552,12 @@ class DataFactory(object):
' AS d ' \
' FROM session_history ' \
' WHERE session_history.stopped >= %s ' \
- ' AND session_history.media_type = "episode" ' \
+ ' AND session_history.media_type = "episode" %s ' \
' GROUP BY %s) AS sh ' \
'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \
'GROUP BY shm.grandparent_title ' \
'ORDER BY users_watched DESC, %s DESC, sh.started DESC ' \
- 'LIMIT %s OFFSET %s ' % (timestamp, group_by, sort_type, stats_count, stats_start)
+ 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start)
result = monitor_db.select(query)
except Exception as e:
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_tv: %s." % e)
@@ -596,12 +603,12 @@ class DataFactory(object):
' AS d ' \
' FROM session_history ' \
' WHERE session_history.stopped >= %s ' \
- ' AND session_history.media_type = "track" ' \
+ ' AND session_history.media_type = "track" %s ' \
' GROUP BY %s) AS sh ' \
'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \
'GROUP BY shm.original_title, shm.grandparent_title ' \
'ORDER BY %s DESC, sh.started DESC ' \
- 'LIMIT %s OFFSET %s ' % (timestamp, group_by, sort_type, stats_count, stats_start)
+ 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start)
result = monitor_db.select(query)
except Exception as e:
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_music: %s." % e)
@@ -650,12 +657,12 @@ class DataFactory(object):
' AS d ' \
' FROM session_history ' \
' WHERE session_history.stopped >= %s ' \
- ' AND session_history.media_type = "track" ' \
+ ' AND session_history.media_type = "track" %s ' \
' GROUP BY %s) AS sh ' \
'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \
'GROUP BY shm.original_title, shm.grandparent_title ' \
'ORDER BY users_watched DESC, %s DESC, sh.started DESC ' \
- 'LIMIT %s OFFSET %s ' % (timestamp, group_by, sort_type, stats_count, stats_start)
+ 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start)
result = monitor_db.select(query)
except Exception as e:
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_music: %s." % e)
@@ -706,14 +713,14 @@ class DataFactory(object):
' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \
' AS d ' \
' FROM session_history ' \
- ' WHERE session_history.stopped >= %s ' \
+ ' WHERE session_history.stopped >= %s %s ' \
' GROUP BY %s) AS sh ' \
'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \
'LEFT OUTER JOIN (SELECT * FROM library_sections WHERE deleted_section = 0) ' \
' AS ls ON sh.section_id = ls.section_id ' \
'GROUP BY sh.section_id ' \
'ORDER BY %s DESC, sh.started DESC ' \
- 'LIMIT %s OFFSET %s ' % (timestamp, group_by, sort_type, stats_count, stats_start)
+ 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start)
result = monitor_db.select(query)
except Exception as e:
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_libraries: %s." % e)
@@ -793,13 +800,13 @@ class DataFactory(object):
' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \
' AS d ' \
' FROM session_history ' \
- ' WHERE session_history.stopped >= %s ' \
+ ' WHERE session_history.stopped >= %s %s ' \
' GROUP BY %s) AS sh ' \
'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \
'LEFT OUTER JOIN users AS u ON sh.user_id = u.user_id ' \
'GROUP BY sh.user_id ' \
'ORDER BY %s DESC, sh.started DESC ' \
- 'LIMIT %s OFFSET %s ' % (timestamp, group_by, sort_type, stats_count, stats_start)
+ 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start)
result = monitor_db.select(query)
except Exception as e:
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_users: %s." % e)
@@ -862,11 +869,11 @@ class DataFactory(object):
' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \
' AS d ' \
' FROM session_history ' \
- ' WHERE session_history.stopped >= %s ' \
+ ' WHERE session_history.stopped >= %s %s ' \
' GROUP BY %s) AS sh ' \
'GROUP BY sh.platform ' \
'ORDER BY %s DESC, sh.started DESC ' \
- 'LIMIT %s OFFSET %s ' % (timestamp, group_by, sort_type, stats_count, stats_start)
+ 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start)
result = monitor_db.select(query)
except Exception as e:
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_platforms: %s." % e)
@@ -918,7 +925,7 @@ class DataFactory(object):
'FROM (SELECT *, MAX(id) FROM session_history ' \
' WHERE session_history.stopped >= %s ' \
' AND (session_history.media_type = "movie" ' \
- ' OR session_history.media_type = "episode") ' \
+ ' OR session_history.media_type = "episode") %s ' \
' GROUP BY %s) AS sh ' \
'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \
'LEFT OUTER JOIN users AS u ON sh.user_id = u.user_id ' \
@@ -926,7 +933,7 @@ class DataFactory(object):
' OR sh.media_type == "episode" AND percent_complete >= %s ' \
'GROUP BY sh.id ' \
'ORDER BY last_watch DESC ' \
- 'LIMIT %s OFFSET %s' % (timestamp, group_by, movie_watched_percent, tv_watched_percent,
+ 'LIMIT %s OFFSET %s' % (timestamp, where_id, group_by, movie_watched_percent, tv_watched_percent,
stats_count, stats_start)
result = monitor_db.select(query)
except Exception as e:
diff --git a/plexpy/webserve.py b/plexpy/webserve.py
index 76c44243..8ad3e664 100644
--- a/plexpy/webserve.py
+++ b/plexpy/webserve.py
@@ -6185,7 +6185,8 @@ class WebInterface(object):
@requireAuth(member_of("admin"))
@addtoapi()
def get_home_stats(self, grouping=None, time_range=30, stats_type='plays',
- stats_start=0, stats_count=10, stat_id='', **kwargs):
+ stats_start=0, stats_count=10, stat_id='',
+ section_id=None, user_id=None, **kwargs):
""" Get the homepage watch statistics.
```
@@ -6201,6 +6202,8 @@ class WebInterface(object):
stat_id (str): A single stat to return, 'top_movies', 'popular_movies',
'top_tv', 'popular_tv', 'top_music', 'popular_music', 'top_libraries',
'top_users', 'top_platforms', 'last_watched', 'most_concurrent'
+ section_id (int): The id of the Plex library section
+ user_id (int): The id of the Plex user
Returns:
json:
@@ -6282,7 +6285,9 @@ class WebInterface(object):
stats_type=stats_type,
stats_start=stats_start,
stats_count=stats_count,
- stat_id=stat_id)
+ stat_id=stat_id,
+ section_id=section_id,
+ user_id=user_id)
if result:
return result
From 5ab9315f1601850c04bff5858429799da0148ee8 Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Tue, 14 Feb 2023 18:19:35 -0800
Subject: [PATCH 009/113] Upload notification images directly to Discord
---
plexpy/notifiers.py | 31 +++++++++++++++++++++++--------
1 file changed, 23 insertions(+), 8 deletions(-)
diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py
index 75f810d8..3e721515 100644
--- a/plexpy/notifiers.py
+++ b/plexpy/notifiers.py
@@ -892,6 +892,15 @@ class PrettyMetadata(object):
parameters[''] = ''
return parameters
+ def get_image(self):
+ result = pmsconnect.PmsConnect().get_image(img=self.parameters.get('poster_thumb', ''))
+ if result and result[0]:
+ poster_content = result[0]
+ poster_filename = 'poster_{}.png'.format(self.parameters['rating_key'])
+ return (poster_filename, poster_content, 'image/png')
+
+ logger.error("Tautulli Notifiers :: Unable to retrieve image for notification.")
+
class Notifier(object):
NAME = ''
@@ -1117,10 +1126,16 @@ class DISCORD(Notifier):
if self.config['tts']:
data['tts'] = True
+ files = {}
+
if self.config['incl_card'] and kwargs.get('parameters', {}).get('media_type'):
# Grab formatted metadata
pretty_metadata = PrettyMetadata(kwargs['parameters'])
+ image = pretty_metadata.get_image()
+ if image:
+ files = {'files[0]': image}
+
if pretty_metadata.media_type == 'movie':
provider = self.config['movie_provider']
elif pretty_metadata.media_type in ('show', 'season', 'episode'):
@@ -1150,9 +1165,9 @@ class DISCORD(Notifier):
attachment['color'] = helpers.hex_to_int(hex)
if self.config['incl_thumbnail']:
- attachment['thumbnail'] = {'url': poster_url}
+ attachment['thumbnail'] = {'url': 'attachment://{}'.format(image[0]) if image else poster_url}
else:
- attachment['image'] = {'url': poster_url}
+ attachment['image'] = {'url': 'attachment://{}'.format(image[0]) if image else poster_url}
if self.config['incl_description']:
attachment['description'] = description[:2045] + (description[2045:] and '...')
@@ -1172,10 +1187,13 @@ class DISCORD(Notifier):
data['embeds'] = [attachment]
- headers = {'Content-type': 'application/json'}
params = {'wait': True}
- return self.make_request(self.config['hook'], params=params, headers=headers, json=data)
+ if files:
+ files['payload_json'] = (None, json.dumps(data), 'application/json')
+ return self.make_request(self.config['hook'], params=params, files=files)
+ else:
+ return self.make_request(self.config['hook'], params=params, json=data)
def _return_config_options(self):
config_option = [{'label': 'Discord Webhook URL',
@@ -1217,10 +1235,7 @@ class DISCORD(Notifier):
{'label': 'Include Rich Metadata Info',
'value': self.config['incl_card'],
'name': 'discord_incl_card',
- 'description': 'Include an info card with a poster and metadata with the notifications.
'
- 'Note:
Image Hosting '
- 'must be enabled under the 3rd Party APIs settings tab.',
+ 'description': 'Include an info card with a poster and metadata with the notifications.',
'input_type': 'checkbox'
},
{'label': 'Include Summary',
From e263f0b8a3a40f92820a6a230f5a377eef817a5c Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Tue, 14 Feb 2023 18:21:11 -0800
Subject: [PATCH 010/113] Refactor notification image upload code
---
plexpy/notifiers.py | 57 +++++++++++----------------------------------
1 file changed, 13 insertions(+), 44 deletions(-)
diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py
index 3e721515..e3ef6862 100644
--- a/plexpy/notifiers.py
+++ b/plexpy/notifiers.py
@@ -1807,19 +1807,12 @@ class GROUPME(Notifier):
if self.config['incl_poster'] and kwargs.get('parameters'):
pretty_metadata = PrettyMetadata(kwargs.get('parameters'))
- # Retrieve the poster from Plex
- result = pmsconnect.PmsConnect().get_image(img=pretty_metadata.parameters.get('poster_thumb',''))
- if result and result[0]:
- poster_content = result[0]
- else:
- poster_content = ''
- logger.error("Tautulli Notifiers :: Unable to retrieve image for {name}.".format(name=self.NAME))
-
- if poster_content:
+ image = pretty_metadata.get_image()
+ if image:
headers = {'X-Access-Token': self.config['access_token'],
'Content-Type': 'image/png'}
- r = requests.post('https://image.groupme.com/pictures', headers=headers, data=poster_content)
+ r = requests.post('https://image.groupme.com/pictures', headers=headers, data=image[1])
if r.status_code == 200:
logger.info("Tautulli Notifiers :: {name} poster sent.".format(name=self.NAME))
@@ -3042,18 +3035,10 @@ class PUSHBULLET(Notifier):
# Grab formatted metadata
pretty_metadata = PrettyMetadata(kwargs['parameters'])
- # Retrieve the poster from Plex
- result = pmsconnect.PmsConnect().get_image(img=pretty_metadata.parameters.get('poster_thumb', ''))
- if result and result[0]:
- poster_content = result[0]
- else:
- poster_content = ''
- logger.error("Tautulli Notifiers :: Unable to retrieve image for {name}.".format(name=self.NAME))
-
- if poster_content:
- poster_filename = 'poster_{}.png'.format(pretty_metadata.parameters['rating_key'])
- file_json = {'file_name': poster_filename, 'file_type': 'image/png'}
- files = {'file': (poster_filename, poster_content, 'image/png')}
+ image = pretty_metadata.get_image()
+ if image:
+ file_json = {'file_name': image[0], 'file_type': image[2]}
+ files = {'file': image}
r = requests.post('https://api.pushbullet.com/v2/upload-request', headers=headers, json=file_json)
@@ -3199,17 +3184,9 @@ class PUSHOVER(Notifier):
# Grab formatted metadata
pretty_metadata = PrettyMetadata(kwargs['parameters'])
- # Retrieve the poster from Plex
- result = pmsconnect.PmsConnect().get_image(img=pretty_metadata.parameters.get('poster_thumb', ''))
- if result and result[0]:
- poster_content = result[0]
- else:
- poster_content = ''
- logger.error("Tautulli Notifiers :: Unable to retrieve image for {name}.".format(name=self.NAME))
-
- if poster_content:
- poster_filename = 'poster_{}.png'.format(pretty_metadata.parameters['rating_key'])
- files = {'attachment': (poster_filename, poster_content, 'image/png')}
+ image = pretty_metadata.get_image()
+ if image:
+ files = {'attachment': image}
headers = {}
return self.make_request('https://api.pushover.net/1/messages.json', headers=headers, data=data, files=files)
@@ -3994,17 +3971,9 @@ class TELEGRAM(Notifier):
# Grab formatted metadata
pretty_metadata = PrettyMetadata(kwargs['parameters'])
- # Retrieve the poster from Plex
- result = pmsconnect.PmsConnect().get_image(img=pretty_metadata.parameters.get('poster_thumb', ''))
- if result and result[0]:
- poster_content = result[0]
- else:
- poster_content = ''
- logger.error("Tautulli Notifiers :: Unable to retrieve image for {name}.".format(name=self.NAME))
-
- if poster_content:
- poster_filename = 'poster_{}.png'.format(pretty_metadata.parameters['rating_key'])
- files = {'photo': (poster_filename, poster_content, 'image/png')}
+ image = pretty_metadata.get_image()
+ if image:
+ files = {'photo': image}
if len(text) > 1024:
data['disable_notification'] = True
From 0db9548995303d5669e06d5111ed074a486583dd Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Fri, 20 Jan 2023 17:05:30 -0800
Subject: [PATCH 011/113] Revert "Add prvenance: false to
docker/build-push-action"
This reverts commit 548264d51a905bd80d418e0bdb34266b6da32a8c.
---
.github/workflows/publish-docker.yml | 1 -
1 file changed, 1 deletion(-)
diff --git a/.github/workflows/publish-docker.yml b/.github/workflows/publish-docker.yml
index 85ce6266..8cb8267f 100644
--- a/.github/workflows/publish-docker.yml
+++ b/.github/workflows/publish-docker.yml
@@ -87,7 +87,6 @@ jobs:
ghcr.io/${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.tag }}
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache
- provenance: false
discord:
name: Discord Notification
From a8539b29272995e0cf6ade177923c26119838dc3 Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Thu, 16 Feb 2023 11:29:18 -0800
Subject: [PATCH 012/113] Retrieve intro/credits markers for metadata details
---
plexpy/pmsconnect.py | 24 +++++++++++++++++++++++-
1 file changed, 23 insertions(+), 1 deletion(-)
diff --git a/plexpy/pmsconnect.py b/plexpy/pmsconnect.py
index 11142873..3a7a2450 100644
--- a/plexpy/pmsconnect.py
+++ b/plexpy/pmsconnect.py
@@ -140,7 +140,7 @@ class PmsConnect(object):
Output: array
"""
- uri = '/library/metadata/' + rating_key
+ uri = '/library/metadata/' + rating_key + '?includeMarkers=1'
request = self.request_handler.make_request(uri=uri,
request_type='GET',
output_format=output_format)
@@ -745,6 +745,7 @@ class PmsConnect(object):
labels = []
collections = []
guids = []
+ markers = []
if metadata_main.getElementsByTagName('Director'):
for director in metadata_main.getElementsByTagName('Director'):
@@ -774,6 +775,16 @@ class PmsConnect(object):
for guid in metadata_main.getElementsByTagName('Guid'):
guids.append(helpers.get_xml_attr(guid, 'id'))
+ if metadata_main.getElementsByTagName('Marker'):
+ for marker in metadata_main.getElementsByTagName('Marker'):
+ markers.append({
+ 'id': helpers.get_xml_attr(marker, 'id'),
+ 'type': helpers.get_xml_attr(marker, 'type'),
+ 'start_time_offset': helpers.cast_to_int(helpers.get_xml_attr(marker, 'startTimeOffset')),
+ 'end_time_offset': helpers.cast_to_int(helpers.get_xml_attr(marker, 'endTimeOffset')),
+ 'final': helpers.bool_true(helpers.get_xml_attr(marker, 'final'))
+ })
+
if metadata_type == 'movie':
metadata = {'media_type': metadata_type,
'section_id': section_id,
@@ -821,6 +832,7 @@ class PmsConnect(object):
'labels': labels,
'collections': collections,
'guids': guids,
+ 'markers': markers,
'parent_guids': [],
'grandparent_guids': [],
'full_title': helpers.get_xml_attr(metadata_main, 'title'),
@@ -880,6 +892,7 @@ class PmsConnect(object):
'labels': labels,
'collections': collections,
'guids': guids,
+ 'markers': markers,
'parent_guids': [],
'grandparent_guids': [],
'full_title': helpers.get_xml_attr(metadata_main, 'title'),
@@ -942,6 +955,7 @@ class PmsConnect(object):
'labels': show_details.get('labels', []),
'collections': show_details.get('collections', []),
'guids': guids,
+ 'markers': markers,
'parent_guids': show_details.get('guids', []),
'grandparent_guids': [],
'full_title': '{} - {}'.format(helpers.get_xml_attr(metadata_main, 'parentTitle'),
@@ -1021,6 +1035,7 @@ class PmsConnect(object):
'labels': show_details.get('labels', []),
'collections': show_details.get('collections', []),
'guids': guids,
+ 'markers': markers,
'parent_guids': season_details.get('guids', []),
'grandparent_guids': show_details.get('guids', []),
'full_title': '{} - {}'.format(helpers.get_xml_attr(metadata_main, 'grandparentTitle'),
@@ -1076,6 +1091,7 @@ class PmsConnect(object):
'labels': labels,
'collections': collections,
'guids': guids,
+ 'markers': markers,
'parent_guids': [],
'grandparent_guids': [],
'full_title': helpers.get_xml_attr(metadata_main, 'title'),
@@ -1132,6 +1148,7 @@ class PmsConnect(object):
'labels': labels,
'collections': collections,
'guids': guids,
+ 'markers': markers,
'parent_guids': artist_details.get('guids', []),
'grandparent_guids': [],
'full_title': '{} - {}'.format(helpers.get_xml_attr(metadata_main, 'parentTitle'),
@@ -1191,6 +1208,7 @@ class PmsConnect(object):
'labels': album_details.get('labels', []),
'collections': album_details.get('collections', []),
'guids': guids,
+ 'markers': markers,
'parent_guids': album_details.get('guids', []),
'grandparent_guids': album_details.get('parent_guids', []),
'full_title': '{} - {}'.format(helpers.get_xml_attr(metadata_main, 'title'),
@@ -1246,6 +1264,7 @@ class PmsConnect(object):
'labels': labels,
'collections': collections,
'guids': guids,
+ 'markers': markers,
'parent_guids': [],
'grandparent_guids': [],
'full_title': helpers.get_xml_attr(metadata_main, 'title'),
@@ -1302,6 +1321,7 @@ class PmsConnect(object):
'labels': photo_album_details.get('labels', []),
'collections': photo_album_details.get('collections', []),
'guids': [],
+ 'markers': markers,
'parent_guids': photo_album_details.get('guids', []),
'grandparent_guids': [],
'full_title': '{} - {}'.format(helpers.get_xml_attr(metadata_main, 'parentTitle') or library_name,
@@ -1361,6 +1381,7 @@ class PmsConnect(object):
'labels': labels,
'collections': collections,
'guids': guids,
+ 'markers': markers,
'parent_guids': [],
'grandparent_guids': [],
'full_title': helpers.get_xml_attr(metadata_main, 'title'),
@@ -1435,6 +1456,7 @@ class PmsConnect(object):
'labels': labels,
'collections': collections,
'guids': guids,
+ 'markers': markers,
'parent_guids': [],
'grandparent_guids': [],
'full_title': helpers.get_xml_attr(metadata_main, 'title'),
From 9a152932ee8c81341123749d61de8b1987390a0d Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Thu, 16 Feb 2023 11:29:47 -0800
Subject: [PATCH 013/113] Monitor stream intro/credits marker activity
---
plexpy/__init__.py | 15 +-
plexpy/activity_handler.py | 380 ++++++++++++++++++-----------------
plexpy/activity_processor.py | 12 +-
3 files changed, 226 insertions(+), 181 deletions(-)
diff --git a/plexpy/__init__.py b/plexpy/__init__.py
index d27cd396..003259bc 100644
--- a/plexpy/__init__.py
+++ b/plexpy/__init__.py
@@ -656,7 +656,8 @@ def dbcheck():
'synced_version INTEGER, synced_version_profile TEXT, '
'live INTEGER, live_uuid TEXT, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT, '
'secure INTEGER, relayed INTEGER, '
- 'buffer_count INTEGER DEFAULT 0, buffer_last_triggered INTEGER, last_paused INTEGER, watched INTEGER DEFAULT 0, '
+ 'buffer_count INTEGER DEFAULT 0, buffer_last_triggered INTEGER, last_paused INTEGER, '
+ 'watched INTEGER DEFAULT 0, intro INTEGER DEFAULT 0, credits INTEGER DEFAULT 0, '
'initial_stream INTEGER DEFAULT 1, write_attempts INTEGER DEFAULT 0, raw_stream_info TEXT, '
'rating_key_websocket TEXT)'
)
@@ -1401,6 +1402,18 @@ def dbcheck():
'ALTER TABLE sessions ADD COLUMN stream_subtitle_forced INTEGER'
)
+ # Upgrade sessions table from earlier versions
+ try:
+ c_db.execute('SELECT intro FROM sessions')
+ except sqlite3.OperationalError:
+ logger.debug(u"Altering database. Updating database table sessions.")
+ c_db.execute(
+ 'ALTER TABLE sessions ADD COLUMN intro INTEGER DEFAULT 0'
+ )
+ c_db.execute(
+ 'ALTER TABLE sessions ADD COLUMN credits INTEGER DEFAULT 0'
+ )
+
# Upgrade session_history table from earlier versions
try:
c_db.execute('SELECT reference_id FROM session_history')
diff --git a/plexpy/activity_handler.py b/plexpy/activity_handler.py
index 07d0f8e3..a89ccb98 100644
--- a/plexpy/activity_handler.py
+++ b/plexpy/activity_handler.py
@@ -51,7 +51,11 @@ RECENTLY_ADDED_QUEUE = {}
class ActivityHandler(object):
def __init__(self, timeline):
+ self.ap = activity_processor.ActivityProcessor()
self.timeline = timeline
+ self.db_session = None
+ self.session = None
+ self.metadata = None
def is_valid_session(self):
if 'sessionKey' in self.timeline:
@@ -72,15 +76,18 @@ class ActivityHandler(object):
return None
+ def get_db_session(self):
+ # Retrieve the session data from our temp table
+ self.db_session = self.ap.get_session_by_key(session_key=self.get_session_key())
+
def get_metadata(self, skip_cache=False):
- cache_key = None if skip_cache else self.get_session_key()
- pms_connect = pmsconnect.PmsConnect()
- metadata = pms_connect.get_metadata_details(rating_key=self.get_rating_key(), cache_key=cache_key)
+ if self.metadata is None:
+ cache_key = None if skip_cache else self.get_session_key()
+ pms_connect = pmsconnect.PmsConnect()
+ metadata = pms_connect.get_metadata_details(rating_key=self.get_rating_key(), cache_key=cache_key)
- if metadata:
- return metadata
-
- return None
+ if metadata:
+ self.metadata = metadata
def get_live_session(self, skip_cache=False):
pms_connect = pmsconnect.PmsConnect()
@@ -94,196 +101,179 @@ class ActivityHandler(object):
if not session['rating_key']:
session['rating_key'] = self.get_rating_key()
session['rating_key_websocket'] = self.get_rating_key()
+ self.session = session
return session
- return None
+ def update_db_session(self, notify=False):
+ if self.session is None:
+ self.get_live_session()
- def update_db_session(self, session=None, notify=False):
- if session is None:
- session = self.get_live_session()
-
- if session:
+ if self.session:
# Update our session temp table values
- ap = activity_processor.ActivityProcessor()
- ap.write_session(session=session, notify=notify)
+ self.ap.write_session(session=self.session, notify=notify)
self.set_session_state()
+ self.get_db_session()
def set_session_state(self):
- ap = activity_processor.ActivityProcessor()
- ap.set_session_state(session_key=self.get_session_key(),
+ self.ap.set_session_state(session_key=self.get_session_key(),
state=self.timeline['state'],
view_offset=self.timeline['viewOffset'],
stopped=helpers.timestamp())
+
+ def put_notification(self, notify_action, **kwargs):
+ notification = {'stream_data': self.db_session.copy(), 'notify_action': notify_action}
+ notification.update(kwargs)
+ plexpy.NOTIFY_QUEUE.put(notification)
def on_start(self):
- if self.is_valid_session():
- session = self.get_live_session(skip_cache=True)
+ self.get_live_session(skip_cache=True)
- if not session:
+ if not self.session:
+ return
+
+ # Some DLNA clients create a new session temporarily when browsing the library
+ # Wait and get session again to make sure it is an actual session
+ if self.session['platform'] == 'DLNA':
+ time.sleep(1)
+ self.get_live_session()
+ if not self.session:
return
- # Some DLNA clients create a new session temporarily when browsing the library
- # Wait and get session again to make sure it is an actual session
- if session['platform'] == 'DLNA':
- time.sleep(1)
- session = self.get_live_session()
- if not session:
- return
+ logger.debug("Tautulli ActivityHandler :: Session %s started by user %s (%s) with ratingKey %s (%s)%s."
+ % (str(self.session['session_key']), str(self.session['user_id']), self.session['username'],
+ str(self.session['rating_key']), self.session['full_title'], '[Live TV]' if self.session['live'] else ''))
- logger.debug("Tautulli ActivityHandler :: Session %s started by user %s (%s) with ratingKey %s (%s)%s."
- % (str(session['session_key']), str(session['user_id']), session['username'],
- str(session['rating_key']), session['full_title'], '[Live TV]' if session['live'] else ''))
+ # Write the new session to our temp session table
+ self.update_db_session(notify=True)
- # Send notification after updating db
- #plexpy.NOTIFY_QUEUE.put({'stream_data': session.copy(), 'notify_action': 'on_play'})
-
- # Write the new session to our temp session table
- self.update_db_session(session=session, notify=True)
-
- # Schedule a callback to force stop a stale stream 5 minutes later
- schedule_callback('session_key-{}'.format(self.get_session_key()),
- func=force_stop_stream,
- args=[self.get_session_key(), session['full_title'], session['username']],
- minutes=5)
+ # Schedule a callback to force stop a stale stream 5 minutes later
+ schedule_callback('session_key-{}'.format(self.get_session_key()),
+ func=force_stop_stream,
+ args=[self.get_session_key(), self.session['full_title'], self.session['username']],
+ minutes=5)
+
+ self.check_markers()
def on_stop(self, force_stop=False):
- if self.is_valid_session():
- logger.debug("Tautulli ActivityHandler :: Session %s %sstopped."
- % (str(self.get_session_key()), 'force ' if force_stop else ''))
+ logger.debug("Tautulli ActivityHandler :: Session %s %sstopped."
+ % (str(self.get_session_key()), 'force ' if force_stop else ''))
- # Set the session last_paused timestamp
- ap = activity_processor.ActivityProcessor()
- ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=None)
+ # Set the session last_paused timestamp
+ self.ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=None)
- # Update the session state and viewOffset
- # Set force_stop to true to disable the state set
- if not force_stop:
- self.set_session_state()
+ # Update the session state and viewOffset
+ # Set force_stop to true to disable the state set
+ if not force_stop:
+ self.set_session_state()
- # Retrieve the session data from our temp table
- db_session = ap.get_session_by_key(session_key=self.get_session_key())
+ # Write it to the history table
+ row_id = self.ap.write_session_history(session=self.db_session)
- # Write it to the history table
- monitor_proc = activity_processor.ActivityProcessor()
- row_id = monitor_proc.write_session_history(session=db_session)
+ if row_id:
+ self.put_notification('on_stop')
- if row_id:
- plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_stop'})
+ schedule_callback('session_key-{}'.format(self.get_session_key()), remove_job=True)
- schedule_callback('session_key-{}'.format(self.get_session_key()), remove_job=True)
-
- # Remove the session from our temp session table
- logger.debug("Tautulli ActivityHandler :: Removing sessionKey %s ratingKey %s from session queue"
- % (str(self.get_session_key()), str(self.get_rating_key())))
- ap.delete_session(row_id=row_id)
- delete_metadata_cache(self.get_session_key())
- else:
- schedule_callback('session_key-{}'.format(self.get_session_key()),
- func=force_stop_stream,
- args=[self.get_session_key(), db_session['full_title'], db_session['user']],
- seconds=30)
+ # Remove the session from our temp session table
+ logger.debug("Tautulli ActivityHandler :: Removing sessionKey %s ratingKey %s from session queue"
+ % (str(self.get_session_key()), str(self.get_rating_key())))
+ self.ap.delete_session(row_id=row_id)
+ delete_metadata_cache(self.get_session_key())
+ else:
+ schedule_callback('session_key-{}'.format(self.get_session_key()),
+ func=force_stop_stream,
+ args=[self.get_session_key(), self.db_session['full_title'], self.db_session['user']],
+ seconds=30)
def on_pause(self, still_paused=False):
- if self.is_valid_session():
- if not still_paused:
- logger.debug("Tautulli ActivityHandler :: Session %s paused." % str(self.get_session_key()))
+ if not still_paused:
+ logger.debug("Tautulli ActivityHandler :: Session %s paused." % str(self.get_session_key()))
- # Set the session last_paused timestamp
- ap = activity_processor.ActivityProcessor()
- ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=helpers.timestamp())
+ # Set the session last_paused timestamp
+ self.ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=helpers.timestamp())
- # Update the session state and viewOffset
- self.update_db_session()
+ self.update_db_session()
- # Retrieve the session data from our temp table
- db_session = ap.get_session_by_key(session_key=self.get_session_key())
-
- if not still_paused:
- plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_pause'})
+ if not still_paused:
+ self.put_notification('on_pause')
def on_resume(self):
- if self.is_valid_session():
- logger.debug("Tautulli ActivityHandler :: Session %s resumed." % str(self.get_session_key()))
+ logger.debug("Tautulli ActivityHandler :: Session %s resumed." % str(self.get_session_key()))
- # Set the session last_paused timestamp
- ap = activity_processor.ActivityProcessor()
- ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=None)
+ # Set the session last_paused timestamp
+ self.ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=None)
- # Update the session state and viewOffset
- self.update_db_session()
+ self.update_db_session()
- # Retrieve the session data from our temp table
- db_session = ap.get_session_by_key(session_key=self.get_session_key())
-
- plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_resume'})
-
- def on_change(self):
- if self.is_valid_session():
- logger.debug("Tautulli ActivityHandler :: Session %s has changed transcode decision." % str(self.get_session_key()))
-
- # Update the session state and viewOffset
- self.update_db_session()
-
- # Retrieve the session data from our temp table
- ap = activity_processor.ActivityProcessor()
- db_session = ap.get_session_by_key(session_key=self.get_session_key())
-
- plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_change'})
+ self.put_notification('on_resume')
def on_buffer(self):
- if self.is_valid_session():
- logger.debug("Tautulli ActivityHandler :: Session %s is buffering." % self.get_session_key())
- ap = activity_processor.ActivityProcessor()
- db_stream = ap.get_session_by_key(session_key=self.get_session_key())
+ logger.debug("Tautulli ActivityHandler :: Session %s is buffering." % self.get_session_key())
- # Increment our buffer count
- ap.increment_session_buffer_count(session_key=self.get_session_key())
+ # Increment our buffer count
+ self.ap.increment_session_buffer_count(session_key=self.get_session_key())
- # Get our current buffer count
- current_buffer_count = ap.get_session_buffer_count(self.get_session_key())
- logger.debug("Tautulli ActivityHandler :: Session %s buffer count is %s." %
- (self.get_session_key(), current_buffer_count))
+ # Get our current buffer count
+ current_buffer_count = self.ap.get_session_buffer_count(self.get_session_key())
+ logger.debug("Tautulli ActivityHandler :: Session %s buffer count is %s." %
+ (self.get_session_key(), current_buffer_count))
- # Get our last triggered time
- buffer_last_triggered = ap.get_session_buffer_trigger_time(self.get_session_key())
+ # Get our last triggered time
+ buffer_last_triggered = self.ap.get_session_buffer_trigger_time(self.get_session_key())
- # Update the session state and viewOffset
- self.update_db_session()
+ self.update_db_session()
- time_since_last_trigger = 0
- if buffer_last_triggered:
- logger.debug("Tautulli ActivityHandler :: Session %s buffer last triggered at %s." %
- (self.get_session_key(), buffer_last_triggered))
- time_since_last_trigger = helpers.timestamp() - int(buffer_last_triggered)
+ time_since_last_trigger = 0
+ if buffer_last_triggered:
+ logger.debug("Tautulli ActivityHandler :: Session %s buffer last triggered at %s." %
+ (self.get_session_key(), buffer_last_triggered))
+ time_since_last_trigger = helpers.timestamp() - int(buffer_last_triggered)
- if current_buffer_count >= plexpy.CONFIG.BUFFER_THRESHOLD and time_since_last_trigger == 0 or \
- time_since_last_trigger >= plexpy.CONFIG.BUFFER_WAIT:
- ap.set_session_buffer_trigger_time(session_key=self.get_session_key())
+ if current_buffer_count >= plexpy.CONFIG.BUFFER_THRESHOLD and time_since_last_trigger == 0 or \
+ time_since_last_trigger >= plexpy.CONFIG.BUFFER_WAIT:
+ self.ap.set_session_buffer_trigger_time(session_key=self.get_session_key())
- # Retrieve the session data from our temp table
- db_session = ap.get_session_by_key(session_key=self.get_session_key())
-
- plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_buffer'})
+ self.put_notification('on_buffer')
def on_error(self):
- if self.is_valid_session():
- logger.debug("Tautulli ActivityHandler :: Session %s encountered an error." % str(self.get_session_key()))
+ logger.debug("Tautulli ActivityHandler :: Session %s encountered an error." % str(self.get_session_key()))
- # Update the session state and viewOffset
- self.update_db_session()
+ self.update_db_session()
- # Retrieve the session data from our temp table
- ap = activity_processor.ActivityProcessor()
- db_session = ap.get_session_by_key(session_key=self.get_session_key())
+ self.put_notification('on_error')
- plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_error'})
+ def on_change(self):
+ logger.debug("Tautulli ActivityHandler :: Session %s has changed transcode decision." % str(self.get_session_key()))
+
+ self.update_db_session()
+
+ self.put_notification('on_change')
+
+ def on_intro(self):
+ if self.get_live_session():
+ logger.debug("Tautulli ActivityHandler :: Session %s intro marker reached." % str(self.get_session_key()))
+
+ self.put_notification('on_intro')
+
+ def on_credits(self):
+ if self.get_live_session():
+ logger.debug("Tautulli ActivityHandler :: Session %s credits marker reached." % str(self.get_session_key()))
+ self.put_notification('on_credits')
+
+ def on_watched(self):
+ logger.debug("Tautulli ActivityHandler :: Session %s watched." % str(self.get_session_key()))
+
+ watched_notifiers = notification_handler.get_notify_state_enabled(
+ session=self.db_session, notify_action='on_watched', notified=False)
+
+ for d in watched_notifiers:
+ self.put_notification('on_watched', notifier_id=d['notifier_id'])
# This function receives events from our websocket connection
def process(self):
if self.is_valid_session():
- ap = activity_processor.ActivityProcessor()
- db_session = ap.get_session_by_key(session_key=self.get_session_key())
+ self.get_db_session()
this_state = self.timeline['state']
this_rating_key = str(self.timeline['ratingKey'])
@@ -294,27 +284,27 @@ class ActivityHandler(object):
this_live_uuid = this_key.split('/')[-1] if this_key.startswith('/livetv/sessions') else None
# If we already have this session in the temp table, check for state changes
- if db_session:
+ if self.db_session:
# Re-schedule the callback to reset the 5 minutes timer
schedule_callback('session_key-{}'.format(self.get_session_key()),
func=force_stop_stream,
- args=[self.get_session_key(), db_session['full_title'], db_session['user']],
+ args=[self.get_session_key(), self.db_session['full_title'], self.db_session['user']],
minutes=5)
- last_state = db_session['state']
- last_rating_key = str(db_session['rating_key'])
- last_live_uuid = db_session['live_uuid']
- last_transcode_key = db_session['transcode_key'].split('/')[-1]
- last_paused = db_session['last_paused']
- last_rating_key_websocket = db_session['rating_key_websocket']
- last_guid = db_session['guid']
+ last_state = self.db_session['state']
+ last_rating_key = str(self.db_session['rating_key'])
+ last_live_uuid = self.db_session['live_uuid']
+ last_transcode_key = self.db_session['transcode_key'].split('/')[-1]
+ last_paused = self.db_session['last_paused']
+ last_rating_key_websocket = self.db_session['rating_key_websocket']
+ last_guid = self.db_session['guid']
this_guid = last_guid
# Check guid for live TV metadata every 60 seconds
- if db_session['live'] and helpers.timestamp() - db_session['stopped'] > 60:
- metadata = self.get_metadata(skip_cache=True)
- if metadata:
- this_guid = metadata['guid']
+ if self.db_session['live'] and helpers.timestamp() - self.db_session['stopped'] > 60:
+ self.get_metadata(skip_cache=True)
+ if self.metadata:
+ this_guid = self.metadata['guid']
# Make sure the same item is being played
if (this_rating_key == last_rating_key
@@ -325,7 +315,7 @@ class ActivityHandler(object):
if this_state == 'playing':
# Update the session in our temp session table
# if the last set temporary stopped time exceeds 60 seconds
- if helpers.timestamp() - db_session['stopped'] > 60:
+ if helpers.timestamp() - self.db_session['stopped'] > 60:
self.update_db_session()
# Start our state checks
@@ -356,33 +346,65 @@ class ActivityHandler(object):
self.on_stop(force_stop=True)
self.on_start()
- # Monitor if the stream has reached the watch percentage for notifications
- # The only purpose of this is for notifications
- if not db_session['watched'] and this_state != 'buffering':
- progress_percent = helpers.get_percent(self.timeline['viewOffset'], db_session['duration'])
- watched_percent = {'movie': plexpy.CONFIG.MOVIE_WATCHED_PERCENT,
- 'episode': plexpy.CONFIG.TV_WATCHED_PERCENT,
- 'track': plexpy.CONFIG.MUSIC_WATCHED_PERCENT,
- 'clip': plexpy.CONFIG.TV_WATCHED_PERCENT
- }
-
- if progress_percent >= watched_percent.get(db_session['media_type'], 101):
- logger.debug("Tautulli ActivityHandler :: Session %s watched."
- % str(self.get_session_key()))
- ap.set_watched(session_key=self.get_session_key())
-
- watched_notifiers = notification_handler.get_notify_state_enabled(
- session=db_session, notify_action='on_watched', notified=False)
-
- for d in watched_notifiers:
- plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(),
- 'notifier_id': d['notifier_id'],
- 'notify_action': 'on_watched'})
+ # Check for stream offset notifications
+ self.check_markers()
+ self.check_watched()
else:
# We don't have this session in our table yet, start a new one.
if this_state != 'buffering':
self.on_start()
+
+ def check_markers(self):
+ # Monitor if the stream has reached the intro or credit marker offsets
+ self.get_metadata()
+
+ intro_markers, credits_markers = [], []
+ for marker in self.metadata['markers']:
+ if marker['type'] == 'intro':
+ intro_markers.append(marker)
+ elif marker['type'] == 'credits':
+ credits_markers.append(marker)
+
+ self._check_marker('intro', intro_markers)
+ self._check_marker('credits', credits_markers)
+
+ def _check_marker(self, marker_type, markers):
+ if self.db_session[marker_type] < len(markers):
+ marker = markers[self.db_session[marker_type]]
+
+ # Websocket events only fire every 10 seconds
+ # Check if the marker is within 10 seconds of the current viewOffset
+ if marker['start_time_offset'] - 10000 <= self.timeline['viewOffset'] <= marker['end_time_offset']:
+ set_func = getattr(self.ap, 'set_{}'.format(marker_type))
+ callback_func = getattr(self, 'on_{}'.format(marker_type))
+
+ set_func(session_key=self.get_session_key())
+
+ if self.timeline['viewOffset'] < marker['start_time_offset']:
+ # Schedule a callback for the exact offset of the marker
+ schedule_callback(
+ 'session_key-{}-{}-{}'.format(self.get_session_key(), marker_type, self.db_session[marker_type]),
+ func=callback_func,
+ milliseconds=marker['start_time_offset'] - self.timeline['viewOffset']
+ )
+ else:
+ callback_func()
+
+ def check_watched(self):
+ # Monitor if the stream has reached the watch percentage for notifications
+ if not self.db_session['watched'] and self.timeline['state'] != 'buffering':
+ progress_percent = helpers.get_percent(self.timeline['viewOffset'], self.db_session['duration'])
+ watched_percent = {
+ 'movie': plexpy.CONFIG.MOVIE_WATCHED_PERCENT,
+ 'episode': plexpy.CONFIG.TV_WATCHED_PERCENT,
+ 'track': plexpy.CONFIG.MUSIC_WATCHED_PERCENT,
+ 'clip': plexpy.CONFIG.TV_WATCHED_PERCENT
+ }
+
+ if progress_percent >= watched_percent.get(self.db_session['media_type'], 101):
+ self.ap.set_watched(session_key=self.get_session_key())
+ self.on_watched()
class TimelineHandler(object):
diff --git a/plexpy/activity_processor.py b/plexpy/activity_processor.py
index 4608e4c8..e110ea64 100644
--- a/plexpy/activity_processor.py
+++ b/plexpy/activity_processor.py
@@ -660,8 +660,18 @@ class ActivityProcessor(object):
self.db.action('UPDATE sessions SET write_attempts = ? WHERE session_key = ?',
[session['write_attempts'] + 1, session_key])
+ def set_intro(self, session_key=None):
+ self.db.action('UPDATE sessions SET intro = intro + 1 '
+ 'WHERE session_key = ?',
+ [session_key])
+
+ def set_credits(self, session_key=None):
+ self.db.action('UPDATE sessions SET credits = credits + 1 '
+ 'WHERE session_key = ?',
+ [session_key])
+
def set_watched(self, session_key=None):
- self.db.action('UPDATE sessions SET watched = ?'
+ self.db.action('UPDATE sessions SET watched = ? '
'WHERE session_key = ?',
[1, session_key])
From 71bc0631559e6630610afd570afc2ec7551c12bc Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Thu, 16 Feb 2023 11:31:35 -0800
Subject: [PATCH 014/113] Add notification triggers for intro/credit markers
---
plexpy/__init__.py | 29 ++++++++++++++++++++++++++++-
plexpy/common.py | 8 ++++++++
plexpy/notification_handler.py | 24 ++++++++++++++++++++++++
plexpy/notifiers.py | 16 ++++++++++++++++
4 files changed, 76 insertions(+), 1 deletion(-)
diff --git a/plexpy/__init__.py b/plexpy/__init__.py
index 003259bc..873b46f0 100644
--- a/plexpy/__init__.py
+++ b/plexpy/__init__.py
@@ -754,19 +754,22 @@ def dbcheck():
'agent_id INTEGER, agent_name TEXT, agent_label TEXT, friendly_name TEXT, notifier_config TEXT, '
'on_play INTEGER DEFAULT 0, on_stop INTEGER DEFAULT 0, on_pause INTEGER DEFAULT 0, '
'on_resume INTEGER DEFAULT 0, on_change INTEGER DEFAULT 0, on_buffer INTEGER DEFAULT 0, '
- 'on_error INTEGER DEFAULT 0, on_watched INTEGER DEFAULT 0, on_created INTEGER DEFAULT 0, '
+ 'on_error INTEGER DEFAULT 0, on_intro INTEGER DEFAULT 0, on_credits INTEGER DEFAULT 0, '
+ 'on_watched INTEGER DEFAULT 0, on_created INTEGER DEFAULT 0, '
'on_extdown INTEGER DEFAULT 0, on_intdown INTEGER DEFAULT 0, '
'on_extup INTEGER DEFAULT 0, on_intup INTEGER DEFAULT 0, on_pmsupdate INTEGER DEFAULT 0, '
'on_concurrent INTEGER DEFAULT 0, on_newdevice INTEGER DEFAULT 0, on_plexpyupdate INTEGER DEFAULT 0, '
'on_plexpydbcorrupt INTEGER DEFAULT 0, '
'on_play_subject TEXT, on_stop_subject TEXT, on_pause_subject TEXT, '
'on_resume_subject TEXT, on_change_subject TEXT, on_buffer_subject TEXT, on_error_subject TEXT, '
+ 'on_intro_subject TEXT, on_credits_subject TEXT, '
'on_watched_subject TEXT, on_created_subject TEXT, on_extdown_subject TEXT, on_intdown_subject TEXT, '
'on_extup_subject TEXT, on_intup_subject TEXT, on_pmsupdate_subject TEXT, '
'on_concurrent_subject TEXT, on_newdevice_subject TEXT, on_plexpyupdate_subject TEXT, '
'on_plexpydbcorrupt_subject TEXT, '
'on_play_body TEXT, on_stop_body TEXT, on_pause_body TEXT, '
'on_resume_body TEXT, on_change_body TEXT, on_buffer_body TEXT, on_error_body TEXT, '
+ 'on_intro_body TEXT, on_credits_body TEXT, '
'on_watched_body TEXT, on_created_body TEXT, on_extdown_body TEXT, on_intdown_body TEXT, '
'on_extup_body TEXT, on_intup_body TEXT, on_pmsupdate_body TEXT, '
'on_concurrent_body TEXT, on_newdevice_body TEXT, on_plexpyupdate_body TEXT, '
@@ -2384,6 +2387,30 @@ def dbcheck():
'ALTER TABLE notifiers ADD COLUMN on_error_body TEXT'
)
+ # Upgrade notifiers table from earlier versions
+ try:
+ c_db.execute('SELECT on_intro FROM notifiers')
+ except sqlite3.OperationalError:
+ logger.debug("Altering database. Updating database table notifiers.")
+ c_db.execute(
+ 'ALTER TABLE notifiers ADD COLUMN on_intro INTEGER DEFAULT 0'
+ )
+ c_db.execute(
+ 'ALTER TABLE notifiers ADD COLUMN on_intro_subject TEXT'
+ )
+ c_db.execute(
+ 'ALTER TABLE notifiers ADD COLUMN on_intro_body TEXT'
+ )
+ c_db.execute(
+ 'ALTER TABLE notifiers ADD COLUMN on_credits INTEGER DEFAULT 0'
+ )
+ c_db.execute(
+ 'ALTER TABLE notifiers ADD COLUMN on_credits_subject TEXT'
+ )
+ c_db.execute(
+ 'ALTER TABLE notifiers ADD COLUMN on_credits_body TEXT'
+ )
+
# Upgrade tvmaze_lookup table from earlier versions
try:
c_db.execute('SELECT rating_key FROM tvmaze_lookup')
diff --git a/plexpy/common.py b/plexpy/common.py
index ad91ee87..65de4810 100644
--- a/plexpy/common.py
+++ b/plexpy/common.py
@@ -416,6 +416,7 @@ NOTIFICATION_PARAMETERS = [
{'name': 'Progress Duration (sec)', 'type': 'int', 'value': 'progress_duration_sec', 'description': 'The last reported offset (in seconds) of the stream.'},
{'name': 'Progress Time', 'type': 'str', 'value': 'progress_time', 'description': 'The last reported offset (in time format) of the stream.'},
{'name': 'Progress Percent', 'type': 'int', 'value': 'progress_percent', 'description': 'The last reported progress percent of the stream.'},
+ {'name': 'View Offset (ms)', 'type': 'int', 'value': 'view_offset', 'description': 'The current view offset (in milliseconds) for the stream.'},
{'name': 'Transcode Decision', 'type': 'str', 'value': 'transcode_decision', 'description': 'The transcode decision of the stream.'},
{'name': 'Container Decision', 'type': 'str', 'value': 'container_decision', 'description': 'The container transcode decision of the stream.'},
{'name': 'Video Decision', 'type': 'str', 'value': 'video_decision', 'description': 'The video transcode decision of the stream.'},
@@ -426,6 +427,12 @@ NOTIFICATION_PARAMETERS = [
{'name': 'Optimized Version Profile', 'type': 'str', 'value': 'optimized_version_profile', 'description': 'The optimized version profile of the stream.'},
{'name': 'Synced Version', 'type': 'int', 'value': 'synced_version', 'description': 'If the stream is an synced version.', 'example': '0 or 1'},
{'name': 'Live', 'type': 'int', 'value': 'live', 'description': 'If the stream is live TV.', 'example': '0 or 1'},
+ {'name': 'Intro Marker Start Time', 'type': 'int', 'value': 'intro_marker_start', 'description': 'The intro marker start time offset in milliseconds.'},
+ {'name': 'Intro Marker End Time', 'type': 'int', 'value': 'intro_marker_end', 'description': 'The intro marker end time offset in milliseconds.'},
+ {'name': 'Credits Marker First', 'type': 'int', 'value': 'credits_marker_first', 'description': 'If the credits marker is the first marker.', 'example': '0 or 1'},
+ {'name': 'Credits Marker Final', 'type': 'int', 'value': 'credits_marker_final', 'description': 'If the credits marker is the final marker.', 'example': '0 or 1'},
+ {'name': 'Credits Marker Start Time', 'type': 'int', 'value': 'credits_marker_start', 'description': 'The credits marker start time offset in milliseconds.'},
+ {'name': 'Credits Marker End Time', 'type': 'int', 'value': 'credits_marker_end', 'description': 'The credits marker end time offset in milliseconds.'},
{'name': 'Channel Call Sign', 'type': 'str', 'value': 'channel_call_sign', 'description': 'The Live TV channel call sign.'},
{'name': 'Channel Identifier', 'type': 'str', 'value': 'channel_identifier', 'description': 'The Live TV channel number.'},
{'name': 'Channel Thumb', 'type': 'str', 'value': 'channel_thumb', 'description': 'The URL for the Live TV channel logo.'},
@@ -540,6 +547,7 @@ NOTIFICATION_PARAMETERS = [
{'name': 'User Rating', 'type': 'float', 'value': 'user_rating', 'description': 'The user (star) rating (out of 10) for the item.'},
{'name': 'Duration', 'type': 'int', 'value': 'duration', 'description': 'The duration (in minutes) for the item.'},
{'name': 'Duration (sec)', 'type': 'int', 'value': 'duration_sec', 'description': 'The duration (in seconds) for the item.'},
+ {'name': 'Duration (ms)', 'type': 'int', 'value': 'duration_ms', 'description': 'The duration (in milliseconds) for the item.'},
{'name': 'Poster URL', 'type': 'str', 'value': 'poster_url', 'description': 'A URL for the movie, TV show, or album poster.'},
{'name': 'Plex ID', 'type': 'str', 'value': 'plex_id', 'description': 'The Plex ID for the item.', 'example': 'e.g. 5d7769a9594b2b001e6a6b7e'},
{'name': 'Plex URL', 'type': 'str', 'value': 'plex_url', 'description': 'The Plex URL to your server for the item.'},
diff --git a/plexpy/notification_handler.py b/plexpy/notification_handler.py
index 6b55c5b7..b9940b70 100644
--- a/plexpy/notification_handler.py
+++ b/plexpy/notification_handler.py
@@ -583,6 +583,8 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
notify_params.update(media_info)
notify_params.update(media_part_info)
+ metadata = pmsconnect.PmsConnect().get_metadata_details(rating_key=rating_key)
+
child_metadata = grandchild_metadata = []
for key in kwargs.pop('child_keys', []):
child = pmsconnect.PmsConnect().get_metadata_details(rating_key=key)
@@ -938,6 +940,20 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
and audience_rating:
audience_rating = helpers.get_percent(notify_params['audience_rating'], 10)
+ intro_markers, credits_markers = [], []
+ for marker in metadata['markers']:
+ if marker['type'] == 'intro':
+ intro_markers.append(marker)
+ elif marker['type'] == 'credits':
+ credits_markers.append(marker)
+
+ intro_marker = defaultdict(int)
+ credits_marker = defaultdict(int)
+ if notify_action == 'on_intro' and intro_markers and notify_params['intro'] < len(intro_markers):
+ intro_marker = intro_markers[notify_params['intro']]
+ if notify_action == 'on_credits' and credits_markers and notify_params['credits'] < len(credits_markers):
+ credits_marker = credits_markers[notify_params['credits']]
+
now = arrow.now()
now_iso = now.isocalendar()
@@ -1005,6 +1021,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
'progress_duration_sec': view_offset_sec,
'progress_time': arrow.get(view_offset_sec).format(duration_format),
'progress_percent': helpers.get_percent(view_offset_sec, duration_sec),
+ 'view_offset': session.get('view_offset', 0),
'initial_stream': notify_params['initial_stream'],
'transcode_decision': transcode_decision,
'container_decision': notify_params['container_decision'],
@@ -1016,6 +1033,12 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
'optimized_version_profile': notify_params['optimized_version_profile'],
'synced_version': notify_params['synced_version'],
'live': notify_params['live'],
+ 'intro_marker_start': intro_marker['start_time_offset'],
+ 'intro_marker_end': intro_marker['end_time_offset'],
+ 'credits_marker_first': int(bool(credits_marker and notify_params['credits'] == 0)),
+ 'credits_marker_final': int(credits_marker['final']),
+ 'credits_marker_start': credits_marker['start_time_offset'],
+ 'credits_marker_end': credits_marker['end_time_offset'],
'channel_call_sign': notify_params['channel_call_sign'],
'channel_identifier': notify_params['channel_identifier'],
'channel_thumb': notify_params['channel_thumb'],
@@ -1132,6 +1155,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
'user_rating': notify_params['user_rating'],
'duration': duration,
'duration_sec': duration_sec,
+ 'duration_ms': notify_params['duration'],
'poster_title': notify_params['poster_title'],
'poster_url': notify_params['poster_url'],
'plex_id': notify_params['plex_id'],
diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py
index e3ef6862..383f4908 100644
--- a/plexpy/notifiers.py
+++ b/plexpy/notifiers.py
@@ -340,6 +340,22 @@ def available_notification_actions(agent_id=None):
'icon': 'fa-exchange-alt',
'media_types': ('movie', 'episode', 'track')
},
+ {'label': 'Intro Marker',
+ 'name': 'on_intro',
+ 'description': 'Trigger a notification when a video stream reaches any intro marker.',
+ 'subject': 'Tautulli ({server_name})',
+ 'body': '{user} ({player}) has reached an intro marker for {title}.',
+ 'icon': 'fa-bookmark',
+ 'media_types': ('episode',)
+ },
+ {'label': 'Credits Marker',
+ 'name': 'on_credits',
+ 'description': 'Trigger a notification when a video stream reaches any credits marker.',
+ 'subject': 'Tautulli ({server_name})',
+ 'body': '{user} ({player}) has reached a credits marker for {title}.',
+ 'icon': 'fa-bookmark',
+ 'media_types': ('movie', 'episode')
+ },
{'label': 'Watched',
'name': 'on_watched',
'description': 'Trigger a notification when a video stream reaches the specified watch percentage.',
From 97af214ac1bcff2de971a3c2bdeb9ff1b4a4edb1 Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Thu, 16 Feb 2023 16:19:29 -0800
Subject: [PATCH 015/113] Handle seeking through intro/credits markers
---
plexpy/__init__.py | 11 +++++++-
plexpy/activity_handler.py | 54 +++++++++++++++++-------------------
plexpy/activity_processor.py | 17 ++++++------
3 files changed, 44 insertions(+), 38 deletions(-)
diff --git a/plexpy/__init__.py b/plexpy/__init__.py
index 873b46f0..09c16586 100644
--- a/plexpy/__init__.py
+++ b/plexpy/__init__.py
@@ -657,7 +657,7 @@ def dbcheck():
'live INTEGER, live_uuid TEXT, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT, '
'secure INTEGER, relayed INTEGER, '
'buffer_count INTEGER DEFAULT 0, buffer_last_triggered INTEGER, last_paused INTEGER, '
- 'watched INTEGER DEFAULT 0, intro INTEGER DEFAULT 0, credits INTEGER DEFAULT 0, '
+ 'watched INTEGER DEFAULT 0, intro INTEGER DEFAULT 0, credits INTEGER DEFAULT 0, marker INTEGER DEFAULT 0, '
'initial_stream INTEGER DEFAULT 1, write_attempts INTEGER DEFAULT 0, raw_stream_info TEXT, '
'rating_key_websocket TEXT)'
)
@@ -1417,6 +1417,15 @@ def dbcheck():
'ALTER TABLE sessions ADD COLUMN credits INTEGER DEFAULT 0'
)
+ # Upgrade sessions table from earlier versions
+ try:
+ c_db.execute('SELECT marker FROM sessions')
+ except sqlite3.OperationalError:
+ logger.debug(u"Altering database. Updating database table sessions.")
+ c_db.execute(
+ 'ALTER TABLE sessions ADD COLUMN marker INTEGER DEFAULT 0'
+ )
+
# Upgrade session_history table from earlier versions
try:
c_db.execute('SELECT reference_id FROM session_history')
diff --git a/plexpy/activity_handler.py b/plexpy/activity_handler.py
index a89ccb98..3b5a283f 100644
--- a/plexpy/activity_handler.py
+++ b/plexpy/activity_handler.py
@@ -250,16 +250,16 @@ class ActivityHandler(object):
self.put_notification('on_change')
- def on_intro(self):
+ def on_intro(self, marker):
if self.get_live_session():
logger.debug("Tautulli ActivityHandler :: Session %s intro marker reached." % str(self.get_session_key()))
- self.put_notification('on_intro')
+ self.put_notification('on_intro', marker=marker)
- def on_credits(self):
+ def on_credits(self, marker):
if self.get_live_session():
logger.debug("Tautulli ActivityHandler :: Session %s credits marker reached." % str(self.get_session_key()))
- self.put_notification('on_credits')
+ self.put_notification('on_credits', marker=marker)
def on_watched(self):
logger.debug("Tautulli ActivityHandler :: Session %s watched." % str(self.get_session_key()))
@@ -359,37 +359,33 @@ class ActivityHandler(object):
# Monitor if the stream has reached the intro or credit marker offsets
self.get_metadata()
- intro_markers, credits_markers = [], []
- for marker in self.metadata['markers']:
- if marker['type'] == 'intro':
- intro_markers.append(marker)
- elif marker['type'] == 'credits':
- credits_markers.append(marker)
-
- self._check_marker('intro', intro_markers)
- self._check_marker('credits', credits_markers)
-
- def _check_marker(self, marker_type, markers):
- if self.db_session[marker_type] < len(markers):
- marker = markers[self.db_session[marker_type]]
+ marker_flag = False
+ for marker_idx, marker in enumerate(self.metadata['markers'], start=1):
# Websocket events only fire every 10 seconds
# Check if the marker is within 10 seconds of the current viewOffset
if marker['start_time_offset'] - 10000 <= self.timeline['viewOffset'] <= marker['end_time_offset']:
- set_func = getattr(self.ap, 'set_{}'.format(marker_type))
- callback_func = getattr(self, 'on_{}'.format(marker_type))
+ marker_flag = True
- set_func(session_key=self.get_session_key())
+ if self.db_session['marker'] != marker_idx:
+ self.ap.set_marker(session_key=self.get_session_key(), marker_idx=marker_idx, marker_type=marker['type'])
+ callback_func = getattr(self, 'on_{}'.format(marker['type']))
- if self.timeline['viewOffset'] < marker['start_time_offset']:
- # Schedule a callback for the exact offset of the marker
- schedule_callback(
- 'session_key-{}-{}-{}'.format(self.get_session_key(), marker_type, self.db_session[marker_type]),
- func=callback_func,
- milliseconds=marker['start_time_offset'] - self.timeline['viewOffset']
- )
- else:
- callback_func()
+ if self.timeline['viewOffset'] < marker['start_time_offset']:
+ # Schedule a callback for the exact offset of the marker
+ schedule_callback(
+ 'session_key-{}-marker-{}'.format(self.get_session_key(), marker_idx),
+ func=callback_func,
+ args=[marker],
+ milliseconds=marker['start_time_offset'] - self.timeline['viewOffset']
+ )
+ else:
+ callback_func(marker)
+
+ break
+
+ if not marker_flag:
+ self.ap.set_marker(session_key=self.get_session_key(), marker_idx=0)
def check_watched(self):
# Monitor if the stream has reached the watch percentage for notifications
diff --git a/plexpy/activity_processor.py b/plexpy/activity_processor.py
index e110ea64..c821d23d 100644
--- a/plexpy/activity_processor.py
+++ b/plexpy/activity_processor.py
@@ -660,15 +660,16 @@ class ActivityProcessor(object):
self.db.action('UPDATE sessions SET write_attempts = ? WHERE session_key = ?',
[session['write_attempts'] + 1, session_key])
- def set_intro(self, session_key=None):
- self.db.action('UPDATE sessions SET intro = intro + 1 '
+ def set_marker(self, session_key=None, marker_idx=None, marker_type=None):
+ if marker_type == 'intro':
+ args = [1, 0]
+ elif marker_type == 'credits':
+ args = [0, 1]
+ else:
+ args = [0, 0]
+ self.db.action('UPDATE sessions SET intro = ?, credits = ?, marker = ? '
'WHERE session_key = ?',
- [session_key])
-
- def set_credits(self, session_key=None):
- self.db.action('UPDATE sessions SET credits = credits + 1 '
- 'WHERE session_key = ?',
- [session_key])
+ args + [marker_idx, session_key])
def set_watched(self, session_key=None):
self.db.action('UPDATE sessions SET watched = ? '
From 9be3bbbf0f85eb33a7b6d09d17af52355cbcdadb Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Thu, 16 Feb 2023 16:20:25 -0800
Subject: [PATCH 016/113] Update marker notification parameters
---
plexpy/common.py | 10 ++++------
plexpy/notification_handler.py | 24 +++++-------------------
plexpy/pmsconnect.py | 10 +++++++++-
3 files changed, 18 insertions(+), 26 deletions(-)
diff --git a/plexpy/common.py b/plexpy/common.py
index 65de4810..cf1180dc 100644
--- a/plexpy/common.py
+++ b/plexpy/common.py
@@ -427,12 +427,10 @@ NOTIFICATION_PARAMETERS = [
{'name': 'Optimized Version Profile', 'type': 'str', 'value': 'optimized_version_profile', 'description': 'The optimized version profile of the stream.'},
{'name': 'Synced Version', 'type': 'int', 'value': 'synced_version', 'description': 'If the stream is an synced version.', 'example': '0 or 1'},
{'name': 'Live', 'type': 'int', 'value': 'live', 'description': 'If the stream is live TV.', 'example': '0 or 1'},
- {'name': 'Intro Marker Start Time', 'type': 'int', 'value': 'intro_marker_start', 'description': 'The intro marker start time offset in milliseconds.'},
- {'name': 'Intro Marker End Time', 'type': 'int', 'value': 'intro_marker_end', 'description': 'The intro marker end time offset in milliseconds.'},
- {'name': 'Credits Marker First', 'type': 'int', 'value': 'credits_marker_first', 'description': 'If the credits marker is the first marker.', 'example': '0 or 1'},
- {'name': 'Credits Marker Final', 'type': 'int', 'value': 'credits_marker_final', 'description': 'If the credits marker is the final marker.', 'example': '0 or 1'},
- {'name': 'Credits Marker Start Time', 'type': 'int', 'value': 'credits_marker_start', 'description': 'The credits marker start time offset in milliseconds.'},
- {'name': 'Credits Marker End Time', 'type': 'int', 'value': 'credits_marker_end', 'description': 'The credits marker end time offset in milliseconds.'},
+ {'name': 'Marker Start Time', 'type': 'int', 'value': 'marker_start', 'description': 'The intro or credits marker start time offset in milliseconds.'},
+ {'name': 'Marker End Time', 'type': 'int', 'value': 'marker_end', 'description': 'The intro or credits marker end time offset in milliseconds.'},
+ {'name': 'Credits Marker First', 'type': 'int', 'value': 'credits_marker_first', 'description': 'If the marker is the first credits marker.', 'example': '0 or 1'},
+ {'name': 'Credits Marker Final', 'type': 'int', 'value': 'credits_marker_final', 'description': 'If the marker is the final credits marker.', 'example': '0 or 1'},
{'name': 'Channel Call Sign', 'type': 'str', 'value': 'channel_call_sign', 'description': 'The Live TV channel call sign.'},
{'name': 'Channel Identifier', 'type': 'str', 'value': 'channel_identifier', 'description': 'The Live TV channel number.'},
{'name': 'Channel Thumb', 'type': 'str', 'value': 'channel_thumb', 'description': 'The URL for the Live TV channel logo.'},
diff --git a/plexpy/notification_handler.py b/plexpy/notification_handler.py
index b9940b70..c92415e5 100644
--- a/plexpy/notification_handler.py
+++ b/plexpy/notification_handler.py
@@ -940,19 +940,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
and audience_rating:
audience_rating = helpers.get_percent(notify_params['audience_rating'], 10)
- intro_markers, credits_markers = [], []
- for marker in metadata['markers']:
- if marker['type'] == 'intro':
- intro_markers.append(marker)
- elif marker['type'] == 'credits':
- credits_markers.append(marker)
-
- intro_marker = defaultdict(int)
- credits_marker = defaultdict(int)
- if notify_action == 'on_intro' and intro_markers and notify_params['intro'] < len(intro_markers):
- intro_marker = intro_markers[notify_params['intro']]
- if notify_action == 'on_credits' and credits_markers and notify_params['credits'] < len(credits_markers):
- credits_marker = credits_markers[notify_params['credits']]
+ marker = kwargs.pop('marker', defaultdict(int))
now = arrow.now()
now_iso = now.isocalendar()
@@ -1033,12 +1021,10 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
'optimized_version_profile': notify_params['optimized_version_profile'],
'synced_version': notify_params['synced_version'],
'live': notify_params['live'],
- 'intro_marker_start': intro_marker['start_time_offset'],
- 'intro_marker_end': intro_marker['end_time_offset'],
- 'credits_marker_first': int(bool(credits_marker and notify_params['credits'] == 0)),
- 'credits_marker_final': int(credits_marker['final']),
- 'credits_marker_start': credits_marker['start_time_offset'],
- 'credits_marker_end': credits_marker['end_time_offset'],
+ 'marker_start': marker['start_time_offset'],
+ 'marker_end': marker['end_time_offset'],
+ 'credits_marker_first': int(marker['first']),
+ 'credits_marker_final': int(marker['final']),
'channel_call_sign': notify_params['channel_call_sign'],
'channel_identifier': notify_params['channel_identifier'],
'channel_thumb': notify_params['channel_thumb'],
diff --git a/plexpy/pmsconnect.py b/plexpy/pmsconnect.py
index 3a7a2450..cc433027 100644
--- a/plexpy/pmsconnect.py
+++ b/plexpy/pmsconnect.py
@@ -776,12 +776,20 @@ class PmsConnect(object):
guids.append(helpers.get_xml_attr(guid, 'id'))
if metadata_main.getElementsByTagName('Marker'):
+ first = None
for marker in metadata_main.getElementsByTagName('Marker'):
+ marker_type = helpers.get_xml_attr(marker, 'type')
+ if marker_type == 'credits':
+ if first is None:
+ first = True
+ elif first is True:
+ first = False
markers.append({
- 'id': helpers.get_xml_attr(marker, 'id'),
+ 'id': helpers.cast_to_int(helpers.get_xml_attr(marker, 'id')),
'type': helpers.get_xml_attr(marker, 'type'),
'start_time_offset': helpers.cast_to_int(helpers.get_xml_attr(marker, 'startTimeOffset')),
'end_time_offset': helpers.cast_to_int(helpers.get_xml_attr(marker, 'endTimeOffset')),
+ 'first': first if marker_type == 'credits' else False,
'final': helpers.bool_true(helpers.get_xml_attr(marker, 'final'))
})
From 599c54c9e10ce7293319f172fab4def78702a86b Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Thu, 16 Feb 2023 17:02:30 -0800
Subject: [PATCH 017/113] Refactor activity handler
---
plexpy/activity_handler.py | 482 ++++++++++++++++++-------------------
1 file changed, 229 insertions(+), 253 deletions(-)
diff --git a/plexpy/activity_handler.py b/plexpy/activity_handler.py
index 3b5a283f..5ed69e10 100644
--- a/plexpy/activity_handler.py
+++ b/plexpy/activity_handler.py
@@ -53,38 +53,33 @@ class ActivityHandler(object):
def __init__(self, timeline):
self.ap = activity_processor.ActivityProcessor()
self.timeline = timeline
+
+ self.session_key = None
+ self.rating_key = None
+
+ self.is_valid_session = ('sessionKey' in self.timeline and str(self.timeline['sessionKey']).isdigit())
+ if self.is_valid_session:
+ self.session_key = int(self.timeline['sessionKey'])
+ self.rating_key = str(self.timeline['ratingKey'])
+
+ self.key = self.timeline.get('key')
+ self.state = self.timeline.get('state')
+ self.view_offset = self.timeline.get('viewOffset')
+ self.transcode_key = self.timeline.get('transcodeSession', '')
+
self.db_session = None
self.session = None
self.metadata = None
- def is_valid_session(self):
- if 'sessionKey' in self.timeline:
- if str(self.timeline['sessionKey']).isdigit():
- return True
-
- return False
-
- def get_session_key(self):
- if self.is_valid_session():
- return int(self.timeline['sessionKey'])
-
- return None
-
- def get_rating_key(self):
- if self.is_valid_session():
- return self.timeline['ratingKey']
-
- return None
-
def get_db_session(self):
# Retrieve the session data from our temp table
- self.db_session = self.ap.get_session_by_key(session_key=self.get_session_key())
+ self.db_session = self.ap.get_session_by_key(session_key=self.session_key)
def get_metadata(self, skip_cache=False):
if self.metadata is None:
- cache_key = None if skip_cache else self.get_session_key()
+ cache_key = None if skip_cache else self.session_key
pms_connect = pmsconnect.PmsConnect()
- metadata = pms_connect.get_metadata_details(rating_key=self.get_rating_key(), cache_key=cache_key)
+ metadata = pms_connect.get_metadata_details(rating_key=self.rating_key, cache_key=cache_key)
if metadata:
self.metadata = metadata
@@ -95,12 +90,12 @@ class ActivityHandler(object):
if session_list:
for session in session_list['sessions']:
- if int(session['session_key']) == self.get_session_key():
+ if int(session['session_key']) == self.session_key:
# Live sessions don't have rating keys in sessions
# Get it from the websocket data
if not session['rating_key']:
- session['rating_key'] = self.get_rating_key()
- session['rating_key_websocket'] = self.get_rating_key()
+ session['rating_key'] = self.rating_key
+ session['rating_key_websocket'] = self.rating_key
self.session = session
return session
@@ -116,9 +111,9 @@ class ActivityHandler(object):
self.get_db_session()
def set_session_state(self):
- self.ap.set_session_state(session_key=self.get_session_key(),
- state=self.timeline['state'],
- view_offset=self.timeline['viewOffset'],
+ self.ap.set_session_state(session_key=self.session_key,
+ state=self.state,
+ view_offset=self.view_offset,
stopped=helpers.timestamp())
def put_notification(self, notify_action, **kwargs):
@@ -148,19 +143,19 @@ class ActivityHandler(object):
self.update_db_session(notify=True)
# Schedule a callback to force stop a stale stream 5 minutes later
- schedule_callback('session_key-{}'.format(self.get_session_key()),
+ schedule_callback('session_key-{}'.format(self.session_key),
func=force_stop_stream,
- args=[self.get_session_key(), self.session['full_title'], self.session['username']],
+ args=[self.session_key, self.session['full_title'], self.session['username']],
minutes=5)
self.check_markers()
def on_stop(self, force_stop=False):
logger.debug("Tautulli ActivityHandler :: Session %s %sstopped."
- % (str(self.get_session_key()), 'force ' if force_stop else ''))
+ % (str(self.session_key), 'force ' if force_stop else ''))
# Set the session last_paused timestamp
- self.ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=None)
+ self.ap.set_session_last_paused(session_key=self.session_key, timestamp=None)
# Update the session state and viewOffset
# Set force_stop to true to disable the state set
@@ -173,25 +168,25 @@ class ActivityHandler(object):
if row_id:
self.put_notification('on_stop')
- schedule_callback('session_key-{}'.format(self.get_session_key()), remove_job=True)
+ schedule_callback('session_key-{}'.format(self.session_key), remove_job=True)
# Remove the session from our temp session table
logger.debug("Tautulli ActivityHandler :: Removing sessionKey %s ratingKey %s from session queue"
- % (str(self.get_session_key()), str(self.get_rating_key())))
+ % (str(self.session_key), str(self.rating_key)))
self.ap.delete_session(row_id=row_id)
- delete_metadata_cache(self.get_session_key())
+ delete_metadata_cache(self.session_key)
else:
- schedule_callback('session_key-{}'.format(self.get_session_key()),
+ schedule_callback('session_key-{}'.format(self.session_key),
func=force_stop_stream,
- args=[self.get_session_key(), self.db_session['full_title'], self.db_session['user']],
+ args=[self.session_key, self.db_session['full_title'], self.db_session['user']],
seconds=30)
def on_pause(self, still_paused=False):
if not still_paused:
- logger.debug("Tautulli ActivityHandler :: Session %s paused." % str(self.get_session_key()))
+ logger.debug("Tautulli ActivityHandler :: Session %s paused." % str(self.session_key))
# Set the session last_paused timestamp
- self.ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=helpers.timestamp())
+ self.ap.set_session_last_paused(session_key=self.session_key, timestamp=helpers.timestamp())
self.update_db_session()
@@ -199,52 +194,52 @@ class ActivityHandler(object):
self.put_notification('on_pause')
def on_resume(self):
- logger.debug("Tautulli ActivityHandler :: Session %s resumed." % str(self.get_session_key()))
+ logger.debug("Tautulli ActivityHandler :: Session %s resumed." % str(self.session_key))
# Set the session last_paused timestamp
- self.ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=None)
+ self.ap.set_session_last_paused(session_key=self.session_key, timestamp=None)
self.update_db_session()
self.put_notification('on_resume')
def on_buffer(self):
- logger.debug("Tautulli ActivityHandler :: Session %s is buffering." % self.get_session_key())
+ logger.debug("Tautulli ActivityHandler :: Session %s is buffering." % self.session_key)
# Increment our buffer count
- self.ap.increment_session_buffer_count(session_key=self.get_session_key())
+ self.ap.increment_session_buffer_count(session_key=self.session_key)
# Get our current buffer count
- current_buffer_count = self.ap.get_session_buffer_count(self.get_session_key())
+ current_buffer_count = self.ap.get_session_buffer_count(self.session_key)
logger.debug("Tautulli ActivityHandler :: Session %s buffer count is %s." %
- (self.get_session_key(), current_buffer_count))
+ (self.session_key, current_buffer_count))
# Get our last triggered time
- buffer_last_triggered = self.ap.get_session_buffer_trigger_time(self.get_session_key())
+ buffer_last_triggered = self.ap.get_session_buffer_trigger_time(self.session_key)
self.update_db_session()
time_since_last_trigger = 0
if buffer_last_triggered:
logger.debug("Tautulli ActivityHandler :: Session %s buffer last triggered at %s." %
- (self.get_session_key(), buffer_last_triggered))
+ (self.session_key, buffer_last_triggered))
time_since_last_trigger = helpers.timestamp() - int(buffer_last_triggered)
if current_buffer_count >= plexpy.CONFIG.BUFFER_THRESHOLD and time_since_last_trigger == 0 or \
time_since_last_trigger >= plexpy.CONFIG.BUFFER_WAIT:
- self.ap.set_session_buffer_trigger_time(session_key=self.get_session_key())
+ self.ap.set_session_buffer_trigger_time(session_key=self.session_key)
self.put_notification('on_buffer')
def on_error(self):
- logger.debug("Tautulli ActivityHandler :: Session %s encountered an error." % str(self.get_session_key()))
+ logger.debug("Tautulli ActivityHandler :: Session %s encountered an error." % str(self.session_key))
self.update_db_session()
self.put_notification('on_error')
def on_change(self):
- logger.debug("Tautulli ActivityHandler :: Session %s has changed transcode decision." % str(self.get_session_key()))
+ logger.debug("Tautulli ActivityHandler :: Session %s has changed transcode decision." % str(self.session_key))
self.update_db_session()
@@ -252,17 +247,17 @@ class ActivityHandler(object):
def on_intro(self, marker):
if self.get_live_session():
- logger.debug("Tautulli ActivityHandler :: Session %s intro marker reached." % str(self.get_session_key()))
+ logger.debug("Tautulli ActivityHandler :: Session %s intro marker reached." % str(self.session_key))
self.put_notification('on_intro', marker=marker)
def on_credits(self, marker):
if self.get_live_session():
- logger.debug("Tautulli ActivityHandler :: Session %s credits marker reached." % str(self.get_session_key()))
+ logger.debug("Tautulli ActivityHandler :: Session %s credits marker reached." % str(self.session_key))
self.put_notification('on_credits', marker=marker)
def on_watched(self):
- logger.debug("Tautulli ActivityHandler :: Session %s watched." % str(self.get_session_key()))
+ logger.debug("Tautulli ActivityHandler :: Session %s watched." % str(self.session_key))
watched_notifiers = notification_handler.get_notify_state_enabled(
session=self.db_session, notify_action='on_watched', notified=False)
@@ -272,88 +267,85 @@ class ActivityHandler(object):
# This function receives events from our websocket connection
def process(self):
- if self.is_valid_session():
- self.get_db_session()
+ if not self.is_valid_session:
+ return
+
+ self.get_db_session()
- this_state = self.timeline['state']
- this_rating_key = str(self.timeline['ratingKey'])
- this_key = self.timeline['key']
- this_transcode_key = self.timeline.get('transcodeSession', '')
+ if not self.db_session:
+ # We don't have this session in our table yet, start a new one.
+ if self.state != 'buffering':
+ self.on_start()
+ return
- # Get the live tv session uuid
- this_live_uuid = this_key.split('/')[-1] if this_key.startswith('/livetv/sessions') else None
+ # If we already have this session in the temp table, check for state changes
+ # Re-schedule the callback to reset the 5 minutes timer
+ schedule_callback('session_key-{}'.format(self.session_key),
+ func=force_stop_stream,
+ args=[self.session_key, self.db_session['full_title'], self.db_session['user']],
+ minutes=5)
- # If we already have this session in the temp table, check for state changes
- if self.db_session:
- # Re-schedule the callback to reset the 5 minutes timer
- schedule_callback('session_key-{}'.format(self.get_session_key()),
- func=force_stop_stream,
- args=[self.get_session_key(), self.db_session['full_title'], self.db_session['user']],
- minutes=5)
+ last_state = self.db_session['state']
+ last_rating_key = str(self.db_session['rating_key'])
+ last_live_uuid = self.db_session['live_uuid']
+ last_transcode_key = self.db_session['transcode_key'].split('/')[-1]
+ last_paused = self.db_session['last_paused']
+ last_rating_key_websocket = self.db_session['rating_key_websocket']
+ last_guid = self.db_session['guid']
- last_state = self.db_session['state']
- last_rating_key = str(self.db_session['rating_key'])
- last_live_uuid = self.db_session['live_uuid']
- last_transcode_key = self.db_session['transcode_key'].split('/')[-1]
- last_paused = self.db_session['last_paused']
- last_rating_key_websocket = self.db_session['rating_key_websocket']
- last_guid = self.db_session['guid']
+ # Get the live tv session uuid
+ this_live_uuid = self.key.split('/')[-1] if self.key.startswith('/livetv/sessions') else None
- this_guid = last_guid
- # Check guid for live TV metadata every 60 seconds
- if self.db_session['live'] and helpers.timestamp() - self.db_session['stopped'] > 60:
- self.get_metadata(skip_cache=True)
- if self.metadata:
- this_guid = self.metadata['guid']
+ this_guid = last_guid
+ # Check guid for live TV metadata every 60 seconds
+ if self.db_session['live'] and helpers.timestamp() - self.db_session['stopped'] > 60:
+ self.get_metadata(skip_cache=True)
+ if self.metadata:
+ this_guid = self.metadata['guid']
- # Make sure the same item is being played
- if (this_rating_key == last_rating_key
- or this_rating_key == last_rating_key_websocket
- or this_live_uuid == last_live_uuid) \
- and this_guid == last_guid:
- # Update the session state and viewOffset
- if this_state == 'playing':
- # Update the session in our temp session table
- # if the last set temporary stopped time exceeds 60 seconds
- if helpers.timestamp() - self.db_session['stopped'] > 60:
- self.update_db_session()
+ # Make sure the same item is being played
+ if (self.rating_key == last_rating_key
+ or self.rating_key == last_rating_key_websocket
+ or this_live_uuid == last_live_uuid) \
+ and this_guid == last_guid:
+ # Update the session state and viewOffset
+ if self.state == 'playing':
+ # Update the session in our temp session table
+ # if the last set temporary stopped time exceeds 60 seconds
+ if helpers.timestamp() - self.db_session['stopped'] > 60:
+ self.update_db_session()
- # Start our state checks
- if this_state != last_state:
- if this_state == 'paused':
- self.on_pause()
- elif last_paused and this_state == 'playing':
- self.on_resume()
- elif this_state == 'stopped':
- self.on_stop()
- elif this_state == 'error':
- self.on_error()
+ # Start our state checks
+ if self.state != last_state:
+ if self.state == 'paused':
+ self.on_pause()
+ elif last_paused and self.state == 'playing':
+ self.on_resume()
+ elif self.state == 'stopped':
+ self.on_stop()
+ elif self.state == 'error':
+ self.on_error()
- elif this_state == 'paused':
- # Update the session last_paused timestamp
- self.on_pause(still_paused=True)
+ elif self.state == 'paused':
+ # Update the session last_paused timestamp
+ self.on_pause(still_paused=True)
- if this_state == 'buffering':
- self.on_buffer()
+ if self.state == 'buffering':
+ self.on_buffer()
- if this_transcode_key != last_transcode_key and this_state != 'stopped':
- self.on_change()
+ if self.transcode_key != last_transcode_key and self.state != 'stopped':
+ self.on_change()
- # If a client doesn't register stop events (I'm looking at you PHT!) check if the ratingKey has changed
- else:
- # Manually stop and start
- # Set force_stop so that we don't overwrite our last viewOffset
- self.on_stop(force_stop=True)
- self.on_start()
+ # If a client doesn't register stop events (I'm looking at you PHT!) check if the ratingKey has changed
+ else:
+ # Manually stop and start
+ # Set force_stop so that we don't overwrite our last viewOffset
+ self.on_stop(force_stop=True)
+ self.on_start()
- # Check for stream offset notifications
- self.check_markers()
- self.check_watched()
-
- else:
- # We don't have this session in our table yet, start a new one.
- if this_state != 'buffering':
- self.on_start()
+ # Check for stream offset notifications
+ self.check_markers()
+ self.check_watched()
def check_markers(self):
# Monitor if the stream has reached the intro or credit marker offsets
@@ -364,20 +356,20 @@ class ActivityHandler(object):
for marker_idx, marker in enumerate(self.metadata['markers'], start=1):
# Websocket events only fire every 10 seconds
# Check if the marker is within 10 seconds of the current viewOffset
- if marker['start_time_offset'] - 10000 <= self.timeline['viewOffset'] <= marker['end_time_offset']:
+ if marker['start_time_offset'] - 10000 <= self.view_offset <= marker['end_time_offset']:
marker_flag = True
if self.db_session['marker'] != marker_idx:
- self.ap.set_marker(session_key=self.get_session_key(), marker_idx=marker_idx, marker_type=marker['type'])
+ self.ap.set_marker(session_key=self.session_key, marker_idx=marker_idx, marker_type=marker['type'])
callback_func = getattr(self, 'on_{}'.format(marker['type']))
- if self.timeline['viewOffset'] < marker['start_time_offset']:
+ if self.view_offset < marker['start_time_offset']:
# Schedule a callback for the exact offset of the marker
schedule_callback(
- 'session_key-{}-marker-{}'.format(self.get_session_key(), marker_idx),
+ 'session_key-{}-marker-{}'.format(self.session_key, marker_idx),
func=callback_func,
args=[marker],
- milliseconds=marker['start_time_offset'] - self.timeline['viewOffset']
+ milliseconds=marker['start_time_offset'] - self.view_offset
)
else:
callback_func(marker)
@@ -385,7 +377,7 @@ class ActivityHandler(object):
break
if not marker_flag:
- self.ap.set_marker(session_key=self.get_session_key(), marker_idx=0)
+ self.ap.set_marker(session_key=self.session_key, marker_idx=0)
def check_watched(self):
# Monitor if the stream has reached the watch percentage for notifications
@@ -399,7 +391,7 @@ class ActivityHandler(object):
}
if progress_percent >= watched_percent.get(self.db_session['media_type'], 101):
- self.ap.set_watched(session_key=self.get_session_key())
+ self.ap.set_watched(session_key=self.session_key)
self.on_watched()
@@ -408,121 +400,106 @@ class TimelineHandler(object):
def __init__(self, timeline):
self.timeline = timeline
- def is_item(self):
- if 'itemID' in self.timeline:
- return True
+ self.rating_key = None
- return False
+ self.is_item = ('itemID' in self.timeline)
+ if self.is_item:
+ self.rating_key = int(self.timeline['itemID'])
- def get_rating_key(self):
- if self.is_item():
- return int(self.timeline['itemID'])
-
- return None
-
- def get_metadata(self):
- pms_connect = pmsconnect.PmsConnect()
- metadata = pms_connect.get_metadata_details(self.get_rating_key())
-
- if metadata:
- return metadata
-
- return None
+ self.parent_rating_key = helpers.cast_to_int(self.timeline.get('parentItemID')) or None
+ self.grandparent_rating_key = helpers.cast_to_int(self.timeline.get('rootItemID')) or None
+ self.identifier = self.timeline.get('identifier')
+ self.state_type = self.timeline.get('state')
+ self.media_type = common.MEDIA_TYPE_VALUES.get(self.timeline.get('type'))
+ self.section_id = helpers.cast_to_int(self.timeline.get('sectionID', 0))
+ self.title = self.timeline.get('title', 'Unknown')
+ self.metadata_state = self.timeline.get('metadataState')
+ self.media_state = self.timeline.get('mediaState')
+ self.queue_size = self.timeline.get('queueSize')
# This function receives events from our websocket connection
def process(self):
- if self.is_item():
- global RECENTLY_ADDED_QUEUE
+ if not self.is_item:
+ return
+
+ # Return if it is not a library event (i.e. DVR EPG event)
+ if self.identifier != 'com.plexapp.plugins.library':
+ return
- rating_key = self.get_rating_key()
- parent_rating_key = helpers.cast_to_int(self.timeline.get('parentItemID')) or None
- grandparent_rating_key = helpers.cast_to_int(self.timeline.get('rootItemID')) or None
+ global RECENTLY_ADDED_QUEUE
- identifier = self.timeline.get('identifier')
- state_type = self.timeline.get('state')
- media_type = common.MEDIA_TYPE_VALUES.get(self.timeline.get('type'))
- section_id = helpers.cast_to_int(self.timeline.get('sectionID', 0))
- title = self.timeline.get('title', 'Unknown')
- metadata_state = self.timeline.get('metadataState')
- media_state = self.timeline.get('mediaState')
- queue_size = self.timeline.get('queueSize')
+ # Add a new media item to the recently added queue
+ if self.media_type and self.section_id > 0 and self.state_type == 0 and self.metadata_state == 'created':
- # Return if it is not a library event (i.e. DVR EPG event)
- if identifier != 'com.plexapp.plugins.library':
- return
+ if self.media_type in ('episode', 'track'):
+ grandparent_set = RECENTLY_ADDED_QUEUE.get(self.grandparent_rating_key, set())
+ grandparent_set.add(self.parent_rating_key)
+ RECENTLY_ADDED_QUEUE[self.grandparent_rating_key] = grandparent_set
- # Add a new media item to the recently added queue
- if media_type and section_id > 0 and state_type == 0 and metadata_state == 'created':
+ parent_set = RECENTLY_ADDED_QUEUE.get(self.parent_rating_key, set())
+ parent_set.add(self.rating_key)
+ RECENTLY_ADDED_QUEUE[self.parent_rating_key] = parent_set
- if media_type in ('episode', 'track'):
- grandparent_set = RECENTLY_ADDED_QUEUE.get(grandparent_rating_key, set())
- grandparent_set.add(parent_rating_key)
- RECENTLY_ADDED_QUEUE[grandparent_rating_key] = grandparent_set
+ RECENTLY_ADDED_QUEUE[self.rating_key] = {self.grandparent_rating_key}
- parent_set = RECENTLY_ADDED_QUEUE.get(parent_rating_key, set())
- parent_set.add(rating_key)
- RECENTLY_ADDED_QUEUE[parent_rating_key] = parent_set
+ logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s, grandparent %s) "
+ "added to recently added queue."
+ % (self.title, str(self.rating_key), str(self.grandparent_rating_key)))
- RECENTLY_ADDED_QUEUE[rating_key] = {grandparent_rating_key}
+ # Schedule a callback to clear the recently added queue
+ schedule_callback('rating_key-{}'.format(self.grandparent_rating_key),
+ func=clear_recently_added_queue,
+ args=[self.grandparent_rating_key, self.title],
+ seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY)
- logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s, grandparent %s) "
- "added to recently added queue."
- % (title, str(rating_key), str(grandparent_rating_key)))
+ elif self.media_type in ('season', 'album'):
+ parent_set = RECENTLY_ADDED_QUEUE.get(self.parent_rating_key, set())
+ parent_set.add(self.rating_key)
+ RECENTLY_ADDED_QUEUE[self.parent_rating_key] = parent_set
- # Schedule a callback to clear the recently added queue
- schedule_callback('rating_key-{}'.format(grandparent_rating_key),
- func=clear_recently_added_queue,
- args=[grandparent_rating_key, title],
- seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY)
+ logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s , parent %s) "
+ "added to recently added queue."
+ % (self.title, str(self.rating_key), str(self.parent_rating_key)))
- elif media_type in ('season', 'album'):
- parent_set = RECENTLY_ADDED_QUEUE.get(parent_rating_key, set())
- parent_set.add(rating_key)
- RECENTLY_ADDED_QUEUE[parent_rating_key] = parent_set
+ # Schedule a callback to clear the recently added queue
+ schedule_callback('rating_key-{}'.format(self.parent_rating_key),
+ func=clear_recently_added_queue,
+ args=[self.parent_rating_key, self.title],
+ seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY)
- logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s , parent %s) "
- "added to recently added queue."
- % (title, str(rating_key), str(parent_rating_key)))
-
- # Schedule a callback to clear the recently added queue
- schedule_callback('rating_key-{}'.format(parent_rating_key),
- func=clear_recently_added_queue,
- args=[parent_rating_key, title],
- seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY)
-
- elif media_type in ('movie', 'show', 'artist'):
- queue_set = RECENTLY_ADDED_QUEUE.get(rating_key, set())
- RECENTLY_ADDED_QUEUE[rating_key] = queue_set
-
- logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s) "
- "added to recently added queue."
- % (title, str(rating_key)))
-
- # Schedule a callback to clear the recently added queue
- schedule_callback('rating_key-{}'.format(rating_key),
- func=clear_recently_added_queue,
- args=[rating_key, title],
- seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY)
-
- # A movie, show, or artist is done processing
- elif media_type in ('movie', 'show', 'artist') and section_id > 0 and \
- state_type == 5 and metadata_state is None and queue_size is None and \
- rating_key in RECENTLY_ADDED_QUEUE:
+ elif self.media_type in ('movie', 'show', 'artist'):
+ queue_set = RECENTLY_ADDED_QUEUE.get(self.rating_key, set())
+ RECENTLY_ADDED_QUEUE[self.rating_key] = queue_set
logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s) "
- "done processing metadata."
- % (title, str(rating_key)))
+ "added to recently added queue."
+ % (self.title, str(self.rating_key)))
- # An item was deleted, make sure it is removed from the queue
- elif state_type == 9 and metadata_state == 'deleted':
- if rating_key in RECENTLY_ADDED_QUEUE and not RECENTLY_ADDED_QUEUE[rating_key]:
- logger.debug("Tautulli TimelineHandler :: Library item %s "
- "removed from recently added queue."
- % str(rating_key))
- del_keys(rating_key)
+ # Schedule a callback to clear the recently added queue
+ schedule_callback('rating_key-{}'.format(self.rating_key),
+ func=clear_recently_added_queue,
+ args=[self.rating_key, self.title],
+ seconds=plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY)
- # Remove the callback if the item is removed
- schedule_callback('rating_key-{}'.format(rating_key), remove_job=True)
+ # A movie, show, or artist is done processing
+ elif self.media_type in ('movie', 'show', 'artist') and self.section_id > 0 and \
+ self.state_type == 5 and self.metadata_state is None and self.queue_size is None and \
+ self.rating_key in RECENTLY_ADDED_QUEUE:
+
+ logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s) "
+ "done processing metadata."
+ % (self.title, str(self.rating_key)))
+
+ # An item was deleted, make sure it is removed from the queue
+ elif self.state_type == 9 and self.metadata_state == 'deleted':
+ if self.rating_key in RECENTLY_ADDED_QUEUE and not RECENTLY_ADDED_QUEUE[self.rating_key]:
+ logger.debug("Tautulli TimelineHandler :: Library item %s "
+ "removed from recently added queue."
+ % str(self.rating_key))
+ del_keys(self.rating_key)
+
+ # Remove the callback if the item is removed
+ schedule_callback('rating_key-{}'.format(self.rating_key), remove_job=True)
class ReachabilityHandler(object):
@@ -530,10 +507,7 @@ class ReachabilityHandler(object):
def __init__(self, data):
self.data = data
- def is_reachable(self):
- if 'reachability' in self.data:
- return self.data['reachability']
- return False
+ self.is_reachable = self.data.get('reachable', False)
def remote_access_enabled(self):
pms_connect = pmsconnect.PmsConnect()
@@ -552,42 +526,44 @@ class ReachabilityHandler(object):
return
# Do nothing if remote access is still up and hasn't changed
- if self.is_reachable() and plexpy.PLEX_REMOTE_ACCESS_UP:
+ if self.is_reachable and plexpy.PLEX_REMOTE_ACCESS_UP:
return
pms_connect = pmsconnect.PmsConnect()
server_response = pms_connect.get_server_response()
- if server_response:
- # Waiting for port mapping
- if server_response['mapping_state'] == 'waiting':
- logger.warn("Tautulli ReachabilityHandler :: Remote access waiting for port mapping.")
+ if not server_response:
+ return
- elif plexpy.PLEX_REMOTE_ACCESS_UP is not False and server_response['reason']:
- logger.warn("Tautulli ReachabilityHandler :: Remote access failed: %s" % server_response['reason'])
- logger.info("Tautulli ReachabilityHandler :: Plex remote access is down.")
+ # Waiting for port mapping
+ if server_response['mapping_state'] == 'waiting':
+ logger.warn("Tautulli ReachabilityHandler :: Remote access waiting for port mapping.")
- plexpy.PLEX_REMOTE_ACCESS_UP = False
+ elif plexpy.PLEX_REMOTE_ACCESS_UP is not False and server_response['reason']:
+ logger.warn("Tautulli ReachabilityHandler :: Remote access failed: %s" % server_response['reason'])
+ logger.info("Tautulli ReachabilityHandler :: Plex remote access is down.")
- if not ACTIVITY_SCHED.get_job('on_extdown'):
- logger.debug("Tautulli ReachabilityHandler :: Scheduling remote access down callback in %d seconds.",
- plexpy.CONFIG.NOTIFY_REMOTE_ACCESS_THRESHOLD)
- schedule_callback('on_extdown', func=self.on_extdown, args=[server_response],
- seconds=plexpy.CONFIG.NOTIFY_REMOTE_ACCESS_THRESHOLD)
+ plexpy.PLEX_REMOTE_ACCESS_UP = False
- elif plexpy.PLEX_REMOTE_ACCESS_UP is False and not server_response['reason']:
- logger.info("Tautulli ReachabilityHandler :: Plex remote access is back up.")
+ if not ACTIVITY_SCHED.get_job('on_extdown'):
+ logger.debug("Tautulli ReachabilityHandler :: Scheduling remote access down callback in %d seconds.",
+ plexpy.CONFIG.NOTIFY_REMOTE_ACCESS_THRESHOLD)
+ schedule_callback('on_extdown', func=self.on_extdown, args=[server_response],
+ seconds=plexpy.CONFIG.NOTIFY_REMOTE_ACCESS_THRESHOLD)
- plexpy.PLEX_REMOTE_ACCESS_UP = True
+ elif plexpy.PLEX_REMOTE_ACCESS_UP is False and not server_response['reason']:
+ logger.info("Tautulli ReachabilityHandler :: Plex remote access is back up.")
- if ACTIVITY_SCHED.get_job('on_extdown'):
- logger.debug("Tautulli ReachabilityHandler :: Cancelling scheduled remote access down callback.")
- schedule_callback('on_extdown', remove_job=True)
- else:
- self.on_extup(server_response)
+ plexpy.PLEX_REMOTE_ACCESS_UP = True
- elif plexpy.PLEX_REMOTE_ACCESS_UP is None:
- plexpy.PLEX_REMOTE_ACCESS_UP = self.is_reachable()
+ if ACTIVITY_SCHED.get_job('on_extdown'):
+ logger.debug("Tautulli ReachabilityHandler :: Cancelling scheduled remote access down callback.")
+ schedule_callback('on_extdown', remove_job=True)
+ else:
+ self.on_extup(server_response)
+
+ elif plexpy.PLEX_REMOTE_ACCESS_UP is None:
+ plexpy.PLEX_REMOTE_ACCESS_UP = self.is_reachable
def del_keys(key):
From 7cc78d448d6d3f907d203203be0d20d2a359f275 Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Thu, 16 Feb 2023 17:06:46 -0800
Subject: [PATCH 018/113] Simplify set marker in database
---
plexpy/activity_processor.py | 9 ++-------
1 file changed, 2 insertions(+), 7 deletions(-)
diff --git a/plexpy/activity_processor.py b/plexpy/activity_processor.py
index c821d23d..dcd1f138 100644
--- a/plexpy/activity_processor.py
+++ b/plexpy/activity_processor.py
@@ -661,15 +661,10 @@ class ActivityProcessor(object):
[session['write_attempts'] + 1, session_key])
def set_marker(self, session_key=None, marker_idx=None, marker_type=None):
- if marker_type == 'intro':
- args = [1, 0]
- elif marker_type == 'credits':
- args = [0, 1]
- else:
- args = [0, 0]
+ marker_args = [int(marker_type == 'intro'), int(marker_type == 'credits')]
self.db.action('UPDATE sessions SET intro = ?, credits = ?, marker = ? '
'WHERE session_key = ?',
- args + [marker_idx, session_key])
+ marker_args + [marker_idx, session_key])
def set_watched(self, session_key=None):
self.db.action('UPDATE sessions SET watched = ? '
From 9c6b8f1af5bd4f1e0c32a85d178253ae14014fd9 Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Fri, 17 Feb 2023 09:28:44 -0800
Subject: [PATCH 019/113] Simplify metadata credits marker first flag
---
plexpy/pmsconnect.py | 12 +++++-------
1 file changed, 5 insertions(+), 7 deletions(-)
diff --git a/plexpy/pmsconnect.py b/plexpy/pmsconnect.py
index cc433027..347de513 100644
--- a/plexpy/pmsconnect.py
+++ b/plexpy/pmsconnect.py
@@ -780,17 +780,15 @@ class PmsConnect(object):
for marker in metadata_main.getElementsByTagName('Marker'):
marker_type = helpers.get_xml_attr(marker, 'type')
if marker_type == 'credits':
- if first is None:
- first = True
- elif first is True:
- first = False
+ first = bool(first is None)
+ final = helpers.bool_true(helpers.get_xml_attr(marker, 'final'))
markers.append({
'id': helpers.cast_to_int(helpers.get_xml_attr(marker, 'id')),
- 'type': helpers.get_xml_attr(marker, 'type'),
+ 'type': marker_type,
'start_time_offset': helpers.cast_to_int(helpers.get_xml_attr(marker, 'startTimeOffset')),
'end_time_offset': helpers.cast_to_int(helpers.get_xml_attr(marker, 'endTimeOffset')),
- 'first': first if marker_type == 'credits' else False,
- 'final': helpers.bool_true(helpers.get_xml_attr(marker, 'final'))
+ 'first': first if marker_type == 'credits' else None,
+ 'final': final if marker_type == 'credits' else None
})
if metadata_type == 'movie':
From 32bb98e8c1a0d3b790e269e28c15d259628e00d1 Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Fri, 17 Feb 2023 09:29:08 -0800
Subject: [PATCH 020/113] Update get_metadata_details docs
---
plexpy/webserve.py | 18 ++++++++++++++++++
1 file changed, 18 insertions(+)
diff --git a/plexpy/webserve.py b/plexpy/webserve.py
index 8ad3e664..963f2ce1 100644
--- a/plexpy/webserve.py
+++ b/plexpy/webserve.py
@@ -5339,6 +5339,24 @@ class WebInterface(object):
"last_viewed_at": "1462165717",
"library_name": "TV Shows",
"live": 0,
+ "markers": [
+ {
+ "id": 908,
+ "type": "credits",
+ "start_time_offset": 2923863,
+ "end_time_offset": 2998197,
+ "first": true,
+ "final": true
+ },
+ {
+ "id": 908,
+ "type": "intro",
+ "start_time_offset": 1622,
+ "end_time_offset": 109135,
+ "first": null,
+ "final": null
+ }
+ ],
"media_index": "1",
"media_info": [
{
From 6b0b3a476fbfde4cdf3dbfa617ff8080cdf28234 Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Fri, 17 Feb 2023 10:01:57 -0800
Subject: [PATCH 021/113] Add support for commercial marker triggers
---
plexpy/__init__.py | 13 +++++++++++--
plexpy/activity_handler.py | 11 +++++++++--
plexpy/activity_processor.py | 8 ++++++--
plexpy/notification_handler.py | 4 ++--
plexpy/notifiers.py | 8 ++++++++
5 files changed, 36 insertions(+), 8 deletions(-)
diff --git a/plexpy/__init__.py b/plexpy/__init__.py
index 09c16586..d3de0815 100644
--- a/plexpy/__init__.py
+++ b/plexpy/__init__.py
@@ -656,8 +656,8 @@ def dbcheck():
'synced_version INTEGER, synced_version_profile TEXT, '
'live INTEGER, live_uuid TEXT, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT, '
'secure INTEGER, relayed INTEGER, '
- 'buffer_count INTEGER DEFAULT 0, buffer_last_triggered INTEGER, last_paused INTEGER, '
- 'watched INTEGER DEFAULT 0, intro INTEGER DEFAULT 0, credits INTEGER DEFAULT 0, marker INTEGER DEFAULT 0, '
+ 'buffer_count INTEGER DEFAULT 0, buffer_last_triggered INTEGER, last_paused INTEGER, watched INTEGER DEFAULT 0, '
+ 'intro INTEGER DEFAULT 0, credits INTEGER DEFAULT 0, commercial INTEGER DEFAULT 0, marker INTEGER DEFAULT 0, '
'initial_stream INTEGER DEFAULT 1, write_attempts INTEGER DEFAULT 0, raw_stream_info TEXT, '
'rating_key_websocket TEXT)'
)
@@ -1417,6 +1417,15 @@ def dbcheck():
'ALTER TABLE sessions ADD COLUMN credits INTEGER DEFAULT 0'
)
+ # Upgrade sessions table from earlier versions
+ try:
+ c_db.execute('SELECT commercial FROM sessions')
+ except sqlite3.OperationalError:
+ logger.debug(u"Altering database. Updating database table sessions.")
+ c_db.execute(
+ 'ALTER TABLE sessions ADD COLUMN commercial INTEGER DEFAULT 0'
+ )
+
# Upgrade sessions table from earlier versions
try:
c_db.execute('SELECT marker FROM sessions')
diff --git a/plexpy/activity_handler.py b/plexpy/activity_handler.py
index 5ed69e10..8933c1ac 100644
--- a/plexpy/activity_handler.py
+++ b/plexpy/activity_handler.py
@@ -247,13 +247,20 @@ class ActivityHandler(object):
def on_intro(self, marker):
if self.get_live_session():
- logger.debug("Tautulli ActivityHandler :: Session %s intro marker reached." % str(self.session_key))
+ logger.debug("Tautulli ActivityHandler :: Session %s reached intro marker." % str(self.session_key))
self.put_notification('on_intro', marker=marker)
+ def on_commercial(self, marker):
+ if self.get_live_session():
+ logger.debug("Tautulli ActivityHandler :: Session %s reached commercial marker." % str(self.session_key))
+
+ self.put_notification('on_commercial', marker=marker)
+
def on_credits(self, marker):
if self.get_live_session():
- logger.debug("Tautulli ActivityHandler :: Session %s credits marker reached." % str(self.session_key))
+ logger.debug("Tautulli ActivityHandler :: Session %s reached credits marker." % str(self.session_key))
+
self.put_notification('on_credits', marker=marker)
def on_watched(self):
diff --git a/plexpy/activity_processor.py b/plexpy/activity_processor.py
index dcd1f138..a8d8cdd4 100644
--- a/plexpy/activity_processor.py
+++ b/plexpy/activity_processor.py
@@ -661,8 +661,12 @@ class ActivityProcessor(object):
[session['write_attempts'] + 1, session_key])
def set_marker(self, session_key=None, marker_idx=None, marker_type=None):
- marker_args = [int(marker_type == 'intro'), int(marker_type == 'credits')]
- self.db.action('UPDATE sessions SET intro = ?, credits = ?, marker = ? '
+ marker_args = [
+ int(marker_type == 'intro'),
+ int(marker_type == 'commercial'),
+ int(marker_type == 'credits')
+ ]
+ self.db.action('UPDATE sessions SET intro = ?, commercial = ?, credits = ?, marker = ? '
'WHERE session_key = ?',
marker_args + [marker_idx, session_key])
diff --git a/plexpy/notification_handler.py b/plexpy/notification_handler.py
index c92415e5..4b727c5d 100644
--- a/plexpy/notification_handler.py
+++ b/plexpy/notification_handler.py
@@ -1023,8 +1023,8 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
'live': notify_params['live'],
'marker_start': marker['start_time_offset'],
'marker_end': marker['end_time_offset'],
- 'credits_marker_first': int(marker['first']),
- 'credits_marker_final': int(marker['final']),
+ 'credits_marker_first': helpers.cast_to_int(marker['first']),
+ 'credits_marker_final': helpers.cast_to_int(marker['final']),
'channel_call_sign': notify_params['channel_call_sign'],
'channel_identifier': notify_params['channel_identifier'],
'channel_thumb': notify_params['channel_thumb'],
diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py
index 383f4908..d717c8f6 100644
--- a/plexpy/notifiers.py
+++ b/plexpy/notifiers.py
@@ -348,6 +348,14 @@ def available_notification_actions(agent_id=None):
'icon': 'fa-bookmark',
'media_types': ('episode',)
},
+ {'label': 'Commercial Marker',
+ 'name': 'on_credits',
+ 'description': 'Trigger a notification when a video stream reaches any commercial marker.',
+ 'subject': 'Tautulli ({server_name})',
+ 'body': '{user} ({player}) has reached a commercial marker for {title}.',
+ 'icon': 'fa-bookmark',
+ 'media_types': ('movie', 'episode')
+ },
{'label': 'Credits Marker',
'name': 'on_credits',
'description': 'Trigger a notification when a video stream reaches any credits marker.',
From 87d3c0ae8172ca65f3168d999bdbdb920265be14 Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Fri, 17 Feb 2023 18:54:52 -0800
Subject: [PATCH 022/113] Fix missing on_commercial columns in database
---
plexpy/__init__.py | 22 +++++++++++++++++++---
plexpy/notifiers.py | 2 +-
2 files changed, 20 insertions(+), 4 deletions(-)
diff --git a/plexpy/__init__.py b/plexpy/__init__.py
index d3de0815..2827c5fa 100644
--- a/plexpy/__init__.py
+++ b/plexpy/__init__.py
@@ -754,7 +754,8 @@ def dbcheck():
'agent_id INTEGER, agent_name TEXT, agent_label TEXT, friendly_name TEXT, notifier_config TEXT, '
'on_play INTEGER DEFAULT 0, on_stop INTEGER DEFAULT 0, on_pause INTEGER DEFAULT 0, '
'on_resume INTEGER DEFAULT 0, on_change INTEGER DEFAULT 0, on_buffer INTEGER DEFAULT 0, '
- 'on_error INTEGER DEFAULT 0, on_intro INTEGER DEFAULT 0, on_credits INTEGER DEFAULT 0, '
+ 'on_error INTEGER DEFAULT 0, '
+ 'on_intro INTEGER DEFAULT 0, on_credits INTEGER DEFAULT 0, on_commercial INTEGER DEFAULT 0, '
'on_watched INTEGER DEFAULT 0, on_created INTEGER DEFAULT 0, '
'on_extdown INTEGER DEFAULT 0, on_intdown INTEGER DEFAULT 0, '
'on_extup INTEGER DEFAULT 0, on_intup INTEGER DEFAULT 0, on_pmsupdate INTEGER DEFAULT 0, '
@@ -762,14 +763,14 @@ def dbcheck():
'on_plexpydbcorrupt INTEGER DEFAULT 0, '
'on_play_subject TEXT, on_stop_subject TEXT, on_pause_subject TEXT, '
'on_resume_subject TEXT, on_change_subject TEXT, on_buffer_subject TEXT, on_error_subject TEXT, '
- 'on_intro_subject TEXT, on_credits_subject TEXT, '
+ 'on_intro_subject TEXT, on_credits_subject TEXT, on_commercial_subject TEXT,'
'on_watched_subject TEXT, on_created_subject TEXT, on_extdown_subject TEXT, on_intdown_subject TEXT, '
'on_extup_subject TEXT, on_intup_subject TEXT, on_pmsupdate_subject TEXT, '
'on_concurrent_subject TEXT, on_newdevice_subject TEXT, on_plexpyupdate_subject TEXT, '
'on_plexpydbcorrupt_subject TEXT, '
'on_play_body TEXT, on_stop_body TEXT, on_pause_body TEXT, '
'on_resume_body TEXT, on_change_body TEXT, on_buffer_body TEXT, on_error_body TEXT, '
- 'on_intro_body TEXT, on_credits_body TEXT, '
+ 'on_intro_body TEXT, on_credits_body TEXT, on_commercial_body TEXT, '
'on_watched_body TEXT, on_created_body TEXT, on_extdown_body TEXT, on_intdown_body TEXT, '
'on_extup_body TEXT, on_intup_body TEXT, on_pmsupdate_body TEXT, '
'on_concurrent_body TEXT, on_newdevice_body TEXT, on_plexpyupdate_body TEXT, '
@@ -2429,6 +2430,21 @@ def dbcheck():
'ALTER TABLE notifiers ADD COLUMN on_credits_body TEXT'
)
+ # Upgrade notifiers table from earlier versions
+ try:
+ c_db.execute('SELECT on_commercial FROM notifiers')
+ except sqlite3.OperationalError:
+ logger.debug("Altering database. Updating database table notifiers.")
+ c_db.execute(
+ 'ALTER TABLE notifiers ADD COLUMN on_commercial INTEGER DEFAULT 0'
+ )
+ c_db.execute(
+ 'ALTER TABLE notifiers ADD COLUMN on_commercial_subject TEXT'
+ )
+ c_db.execute(
+ 'ALTER TABLE notifiers ADD COLUMN on_commercial_body TEXT'
+ )
+
# Upgrade tvmaze_lookup table from earlier versions
try:
c_db.execute('SELECT rating_key FROM tvmaze_lookup')
diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py
index d717c8f6..f0f02bf0 100644
--- a/plexpy/notifiers.py
+++ b/plexpy/notifiers.py
@@ -349,7 +349,7 @@ def available_notification_actions(agent_id=None):
'media_types': ('episode',)
},
{'label': 'Commercial Marker',
- 'name': 'on_credits',
+ 'name': 'on_commercial',
'description': 'Trigger a notification when a video stream reaches any commercial marker.',
'subject': 'Tautulli ({server_name})',
'body': '{user} ({player}) has reached a commercial marker for {title}.',
From 6807cebe51311f599a32f5df421c2dccc6e0c498 Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Fri, 17 Feb 2023 18:55:28 -0800
Subject: [PATCH 023/113] Strip whitespace from condition values
---
plexpy/notification_handler.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/plexpy/notification_handler.py b/plexpy/notification_handler.py
index 4b727c5d..197314fc 100644
--- a/plexpy/notification_handler.py
+++ b/plexpy/notification_handler.py
@@ -294,7 +294,7 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
# Cast the condition values to the correct type
try:
if parameter_type == 'str':
- values = ['' if v == '~' else str(v).lower() for v in values]
+ values = ['' if v == '~' else str(v).strip().lower() for v in values]
elif parameter_type == 'int':
values = [helpers.cast_to_int(v) for v in values]
@@ -313,7 +313,7 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
# Cast the parameter value to the correct type
try:
if parameter_type == 'str':
- parameter_value = str(parameter_value).lower()
+ parameter_value = str(parameter_value).strip().lower()
elif parameter_type == 'int':
parameter_value = helpers.cast_to_int(parameter_value)
From c2abfce8e1bcc3a1f57a76a98e642eaa263edf6f Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Sun, 19 Feb 2023 17:41:48 -0800
Subject: [PATCH 024/113] Save credits markers offsets to session history
---
plexpy/__init__.py | 15 ++++++++++++++-
plexpy/activity_processor.py | 12 +++++++++++-
2 files changed, 25 insertions(+), 2 deletions(-)
diff --git a/plexpy/__init__.py b/plexpy/__init__.py
index 2827c5fa..d0aed7cf 100644
--- a/plexpy/__init__.py
+++ b/plexpy/__init__.py
@@ -715,7 +715,8 @@ def dbcheck():
'art TEXT, media_type TEXT, year INTEGER, originally_available_at TEXT, added_at INTEGER, updated_at INTEGER, '
'last_viewed_at INTEGER, content_rating TEXT, summary TEXT, tagline TEXT, rating TEXT, '
'duration INTEGER DEFAULT 0, guid TEXT, directors TEXT, writers TEXT, actors TEXT, genres TEXT, studio TEXT, '
- 'labels TEXT, live INTEGER DEFAULT 0, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT)'
+ 'labels TEXT, live INTEGER DEFAULT 0, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT, '
+ 'marker_credits_first INTEGER DEFAULT NULL, marker_credits_final INTEGER DEFAULT NULL)'
)
# users table :: This table keeps record of the friends list
@@ -1564,6 +1565,18 @@ def dbcheck():
'ALTER TABLE session_history_metadata ADD COLUMN channel_thumb TEXT'
)
+ # Upgrade session_history_metadata table from earlier versions
+ try:
+ c_db.execute('SELECT marker_credits_first FROM session_history_metadata')
+ except sqlite3.OperationalError:
+ logger.debug("Altering database. Updating database table session_history_metadata.")
+ c_db.execute(
+ 'ALTER TABLE session_history_metadata ADD COLUMN marker_credits_first INTEGER DEFAULT NULL'
+ )
+ c_db.execute(
+ 'ALTER TABLE session_history_metadata ADD COLUMN marker_credits_final INTEGER DEFAULT NULL'
+ )
+
# Upgrade session_history_media_info table from earlier versions
try:
c_db.execute('SELECT transcode_decision FROM session_history_media_info')
diff --git a/plexpy/activity_processor.py b/plexpy/activity_processor.py
index a8d8cdd4..d55c6738 100644
--- a/plexpy/activity_processor.py
+++ b/plexpy/activity_processor.py
@@ -490,6 +490,14 @@ class ActivityProcessor(object):
genres = ";".join(metadata['genres'])
labels = ";".join(metadata['labels'])
+ marker_credits_first = None
+ marker_credits_final = None
+ for marker in metadata['markers']:
+ if marker['first']:
+ marker_credits_first = marker['start_time_offset']
+ if marker['final']:
+ marker_credits_final = marker['start_time_offset']
+
# logger.debug("Tautulli ActivityProcessor :: Attempting to write to sessionKey %s session_history_metadata table..."
# % session['session_key'])
keys = {'id': last_id}
@@ -528,7 +536,9 @@ class ActivityProcessor(object):
'live': session['live'],
'channel_call_sign': media_info.get('channel_call_sign', ''),
'channel_identifier': media_info.get('channel_identifier', ''),
- 'channel_thumb': media_info.get('channel_thumb', '')
+ 'channel_thumb': media_info.get('channel_thumb', ''),
+ 'marker_credits_first': marker_credits_first,
+ 'marker_credits_final': marker_credits_final
}
# logger.debug("Tautulli ActivityProcessor :: Writing sessionKey %s session_history_metadata transaction..."
From b1dd28e39b648482cb55a0c6b8a827760b4aea90 Mon Sep 17 00:00:00 2001
From: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
Date: Mon, 20 Feb 2023 16:33:19 -0800
Subject: [PATCH 025/113] Add setting to change video watched completion
behaviour
---
data/interfaces/default/settings.html | 14 ++++++++++++++
plexpy/config.py | 4 +++-
2 files changed, 17 insertions(+), 1 deletion(-)
diff --git a/data/interfaces/default/settings.html b/data/interfaces/default/settings.html
index f7c24211..fd234da2 100644
--- a/data/interfaces/default/settings.html
+++ b/data/interfaces/default/settings.html
@@ -213,6 +213,20 @@
Set the percentage for a music track to be considered as listened. Minimum 50, Maximum 95.
+