Merge branch 'nightly' into dependabot/pip/nightly/importlib-resources-5.12.0

This commit is contained in:
JonnyWong16 2023-03-02 20:50:07 -08:00 committed by GitHub
commit 029f3bcf79
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
32 changed files with 1436 additions and 716 deletions

View file

@ -70,7 +70,7 @@ jobs:
password: ${{ secrets.GHCR_TOKEN }}
- name: Docker Build and Push
uses: docker/build-push-action@v3
uses: docker/build-push-action@v4
if: success()
with:
context: .

View file

@ -79,7 +79,6 @@ select.form-control {
color: #eee !important;
border: 0px solid #444 !important;
background: #555 !important;
padding: 1px 2px;
transition: background-color .3s;
}
.selectize-control.form-control .selectize-input {
@ -87,7 +86,6 @@ select.form-control {
align-items: center;
flex-wrap: wrap;
margin-bottom: 4px;
padding-left: 5px;
}
.selectize-control.form-control.selectize-pms-ip .selectize-input {
padding-left: 12px !important;

View file

@ -12,6 +12,7 @@ data :: Usable parameters (if not applicable for media type, blank value will be
== Global keys ==
rating_key Returns the unique identifier for the media item.
media_type Returns the type of media. Either 'movie', 'show', 'season', 'episode', 'artist', 'album', or 'track'.
sub_media_type Returns the subtype of media. Either 'movie', 'show', 'season', 'episode', 'artist', 'album', or 'track'.
art Returns the location of the item's artwork
title Returns the name of the movie, show, episode, artist, album, or track.
edition_title Returns the edition title of a movie.
@ -213,7 +214,7 @@ DOCUMENTATION :: END
% if _session['user_group'] == 'admin':
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
% endif
% elif data['media_type'] in ('artist', 'album', 'track', 'playlist', 'photo_album', 'photo', 'clip'):
% elif data['media_type'] in ('artist', 'album', 'track', 'playlist', 'photo_album', 'photo', 'clip') or data['sub_media_type'] in ('artist', 'album', 'track'):
<div class="summary-poster-face-track" style="background-image: url(${page('pms_image_proxy', data['thumb'], data['rating_key'], 300, 300, fallback='cover')});">
<div class="summary-poster-face-overlay">
<span></span>
@ -267,7 +268,7 @@ DOCUMENTATION :: END
<h1><a href="${page('info', data['parent_rating_key'])}">${data['parent_title']}</a></h1>
<h2>${data['title']}</h2>
% elif data['media_type'] == 'track':
<h1><a href="${page('info', data['grandparent_rating_key'])}">${data['original_title'] or data['grandparent_title']}</a></h1>
<h1><a href="${page('info', data['grandparent_rating_key'])}">${data['grandparent_title']}</a></h1>
<h2><a href="${page('info', data['parent_rating_key'])}">${data['parent_title']}</a> - ${data['title']}</h2>
<h3 class="hidden-xs">T${data['media_index']}</h3>
% elif data['media_type'] in ('photo', 'clip'):
@ -283,14 +284,14 @@ DOCUMENTATION :: END
padding_height = ''
if data['media_type'] == 'movie' or data['live']:
padding_height = 'height: 305px;'
elif data['media_type'] in ('show', 'season', 'collection'):
padding_height = 'height: 270px;'
elif data['media_type'] == 'episode':
padding_height = 'height: 70px;'
elif data['media_type'] in ('artist', 'album', 'playlist', 'photo_album', 'photo'):
elif data['media_type'] in ('artist', 'album', 'playlist', 'photo_album', 'photo') or data['sub_media_type'] in ('artist', 'album', 'track'):
padding_height = 'height: 150px;'
elif data['media_type'] in ('track', 'clip'):
padding_height = 'height: 180px;'
elif data['media_type'] == 'episode':
padding_height = 'height: 70px;'
elif data['media_type'] in ('show', 'season', 'collection'):
padding_height = 'height: 270px;'
%>
<div class="summary-content-padding hidden-xs hidden-sm" style="${padding_height}">
% if data['media_type'] in ('movie', 'episode', 'track', 'clip'):
@ -369,6 +370,11 @@ DOCUMENTATION :: END
Studio <strong> ${data['studio']}</strong>
% endif
</div>
<div class="summary-content-details-tag">
% if data['media_type'] == 'track' and data['original_title']:
Track Artists <strong> ${data['original_title']}</strong>
% endif
</div>
<div class="summary-content-details-tag">
% if data['media_type'] == 'movie':
Year <strong> ${data['year']}</strong>
@ -548,7 +554,7 @@ DOCUMENTATION :: END
</div>
</div>
% endif
% if data['media_type'] in ('movie', 'show', 'season', 'episode', 'artist', 'album', 'track'):
% if data['media_type'] in ('movie', 'show', 'season', 'episode', 'artist', 'album', 'track', 'collection', 'playlist'):
<div class="col-md-12">
<div class="table-card-header">
<div class="header-bar">
@ -812,7 +818,7 @@ DOCUMENTATION :: END
% elif data['media_type'] == 'album':
${data['parent_title']}<br />${data['title']}
% elif data['media_type'] == 'track':
${data['original_title'] or data['grandparent_title']}<br />${data['title']}<br />${data['parent_title']}
${data['grandparent_title']}<br />${data['title']}<br />${data['parent_title']}
% endif
</strong>
</p>
@ -931,13 +937,16 @@ DOCUMENTATION :: END
});
</script>
% endif
% if data['media_type'] in ('movie', 'show', 'season', 'episode', 'artist', 'album', 'track'):
% if data['media_type'] in ('movie', 'show', 'season', 'episode', 'artist', 'album', 'track', 'collection', 'playlist'):
<script>
// Populate watch time stats
$.ajax({
url: 'item_watch_time_stats',
async: true,
data: { rating_key: "${data['rating_key']}" },
data: {
rating_key: "${data['rating_key']}",
media_type: "${data['media_type']}"
},
complete: function(xhr, status) {
$("#watch-time-stats").html(xhr.responseText);
}
@ -946,7 +955,10 @@ DOCUMENTATION :: END
$.ajax({
url: 'item_user_stats',
async: true,
data: { rating_key: "${data['rating_key']}" },
data: {
rating_key: "${data['rating_key']}",
media_type: "${data['media_type']}"
},
complete: function(xhr, status) {
$("#user-stats").html(xhr.responseText);
}

View file

@ -160,6 +160,16 @@ DOCUMENTATION :: END
% endif
</div>
</a>
<div class="item-children-instance-text-wrapper poster-item">
<h3>
<a href="${page('info', child['rating_key'])}" title="${child['title']}">${child['title']}</a>
</h3>
% if media_type == 'collection':
<h3 class="text-muted">
<a class="text-muted" href="${page('info', child['parent_rating_key'])}" title="${child['parent_title']}">${child['parent_title']}</a>
</h3>
% endif
</div>
% elif child['media_type'] == 'episode':
<a href="${page('info', child['rating_key'])}" title="Episode ${child['media_index']}">
<div class="item-children-poster">
@ -179,6 +189,29 @@ DOCUMENTATION :: END
<h3>
<a href="${page('info', child['rating_key'])}" title="${child['title']}">${child['title']}</a>
</h3>
% if media_type == 'collection':
<h3 class="text-muted">
<a href="${page('info', child['grandparent_rating_key'])}" title="${child['grandparent_title']}">${child['grandparent_title']}</a>
</h3>
<h3 class="text-muted">
<a href="${page('info', child['parent_rating_key'])}" title="${child['parent_title']}">${short_season(child['parent_title'])}</a>
&middot; <a href="${page('info', child['rating_key'])}" title="Episode ${child['media_index']}">E${child['media_index']}</a>
</h3>
% endif
</div>
% elif child['media_type'] == 'artist':
<a href="${page('info', child['rating_key'])}" title="${child['title']}">
<div class="item-children-poster">
<div class="item-children-poster-face cover-item" style="background-image: url(${page('pms_image_proxy', child['thumb'], child['rating_key'], 300, 300, fallback='cover')});"></div>
% if _session['user_group'] == 'admin':
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
% endif
</div>
</a>
<div class="item-children-instance-text-wrapper cover-item">
<h3>
<a href="${page('info', child['rating_key'])}" title="${child['title']}">${child['title']}</a>
</h3>
</div>
% elif child['media_type'] == 'album':
<a href="${page('info', child['rating_key'])}" title="${child['title']}">
@ -193,6 +226,11 @@ DOCUMENTATION :: END
<h3>
<a href="${page('info', child['rating_key'])}" title="${child['title']}">${child['title']}</a>
</h3>
% if media_type == 'collection':
<h3 class="text-muted">
<a class="text-muted" href="${page('info', child['parent_rating_key'])}" title="${child['parent_title']}">${child['parent_title']}</a>
</h3>
% endif
</div>
% elif child['media_type'] == 'track':
<% e = 'even' if loop.index % 2 == 0 else 'odd' %>
@ -205,7 +243,15 @@ DOCUMENTATION :: END
${child['title']}
</span>
</a>
% if child['original_title']:
% if media_type == 'collection':
-
<a href="${page('info', child['grandparent_rating_key'])}" title="${child['grandparent_title']}">
<span class="thumb-tooltip" data-toggle="popover" data-img="${page('pms_image_proxy', child['grandparent_thumb'], child['grandparent_rating_key'], 300, 300, fallback='cover')}" data-height="80" data-width="80">
${child['grandparent_title']}
</span>
</a>
<span class="text-muted"> (<a class="no-highlight" href="${page('info', child['parent_rating_key'])}" title="${child['parent_title']}">${child['parent_title']}</a>)</span>
% elif child['original_title']:
<span class="text-muted"> - ${child['original_title']}</span>
% endif
</span>

View file

@ -32,7 +32,12 @@ collections_table_options = {
if (rowData['smart']) {
smart = '<span class="media-type-tooltip" data-toggle="tooltip" title="Smart Collection"><i class="fa fa-cog fa-fw"></i></span>&nbsp;'
}
var thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="' + page('pms_image_proxy', rowData['thumb'], rowData['ratingKey'], 300, 450, null, null, null, 'poster') + '" data-height="120" data-width="80">' + rowData['title'] + '</span>';
console.log(rowData['subtype'])
if (rowData['subtype'] === 'artist' || rowData['subtype'] === 'album' || rowData['subtype'] === 'track') {
var thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="' + page('pms_image_proxy', rowData['thumb'], rowData['ratingKey'], 300, 300, null, null, null, 'cover') + '" data-height="80" data-width="80">' + rowData['title'] + '</span>';
} else {
var thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="' + page('pms_image_proxy', rowData['thumb'], rowData['ratingKey'], 300, 450, null, null, null, 'poster') + '" data-height="120" data-width="80">' + rowData['title'] + '</span>';
}
$(td).html(smart + '<a href="' + page('info', rowData['ratingKey']) + '">' + thumb_popover + '</a>');
}
},

View file

@ -142,8 +142,10 @@
<div class="row">
<div class="col-md-12">
<select class="form-control" id="${item['name']}" name="${item['name']}">
% if item['select_all']:
<option value="select-all">Select All</option>
<option value="remove-all">Remove All</option>
% endif
% if isinstance(item['select_options'], dict):
% for section, options in item['select_options'].items():
<optgroup label="${section}">
@ -153,7 +155,9 @@
</optgroup>
% endfor
% else:
% if item['select_all']:
<option value="border-all"></option>
% endif
% for option in sorted(item['select_options'], key=lambda x: x['text'].lower()):
<option value="${option['value']}">${option['text']}</option>
% endfor

View file

@ -134,8 +134,10 @@
<div class="row">
<div class="col-md-12">
<select class="form-control" id="${item['name']}" name="${item['name']}">
% if item['select_all']:
<option value="select-all">Select All</option>
<option value="remove-all">Remove All</option>
% endif
% if isinstance(item['select_options'], dict):
% for section, options in item['select_options'].items():
<optgroup label="${section}">
@ -145,7 +147,9 @@
</optgroup>
% endfor
% else:
% if item['select_all']:
<option value="border-all"></option>
% endif
% for option in sorted(item['select_options'], key=lambda x: x['text'].lower()):
<option value="${option['value']}">${option['text']}</option>
% endfor
@ -719,6 +723,12 @@
pushoverPriority();
});
var $pushover_sound = $('#pushover_sound').selectize({
create: true
});
var pushover_sound = $pushover_sound[0].selectize;
pushover_sound.setValue(${json.dumps(next((c['value'] for c in notifier['config_options'] if c['name'] == 'pushover_sound'), [])) | n});
% elif notifier['agent_name'] == 'plexmobileapp':
var $plexmobileapp_user_ids = $('#plexmobileapp_user_ids').selectize({
plugins: ['remove_button'],

View file

@ -213,6 +213,20 @@
</div>
<p class="help-block">Set the percentage for a music track to be considered as listened. Minimum 50, Maximum 95.</p>
</div>
<div class="form-group">
<label for="music_watched_percent">Video Watched Completion Behaviour</label>
<div class="row">
<div class="col-md-7">
<select class="form-control" id="watched_marker" name="watched_marker">
<option value="0" ${'selected' if config['watched_marker'] == 0 else ''}>At selected threshold percentage</option>
<option value="1" ${'selected' if config['watched_marker'] == 1 else ''}>At final credits marker position</option>
<option value="2" ${'selected' if config['watched_marker'] == 2 else ''}>At first credits marker position</option>
<option value="3" ${'selected' if config['watched_marker'] == 3 else ''}>Earliest between threshold percent and first credits marker</option>
</select>
</div>
</div>
<p class="help-block">Decide whether to use end credits markers to determine the 'watched' state of video items. When markers are not available the selected threshold percentage will be used.</p>
</div>
<div class="form-group advanced-setting">
<label>Flush Temporary Sessions</label>
<p class="help-block">

View file

@ -1,121 +0,0 @@
#!/usr/bin/python
###############################################################################
# Formatting filter for urllib2's HTTPHandler(debuglevel=1) output
# Copyright (c) 2013, Analytics Pros
#
# This project is free software, distributed under the BSD license.
# Analytics Pros offers consulting and integration services if your firm needs
# assistance in strategy, implementation, or auditing existing work.
###############################################################################
import sys, re, os
from io import StringIO
class BufferTranslator(object):
""" Provides a buffer-compatible interface for filtering buffer content.
"""
parsers = []
def __init__(self, output):
self.output = output
self.encoding = getattr(output, 'encoding', None)
def write(self, content):
content = self.translate(content)
self.output.write(content)
@staticmethod
def stripslashes(content):
return content.decode('string_escape')
@staticmethod
def addslashes(content):
return content.encode('string_escape')
def translate(self, line):
for pattern, method in self.parsers:
match = pattern.match(line)
if match:
return method(match)
return line
class LineBufferTranslator(BufferTranslator):
""" Line buffer implementation supports translation of line-format input
even when input is not already line-buffered. Caches input until newlines
occur, and then dispatches translated input to output buffer.
"""
def __init__(self, *a, **kw):
self._linepending = []
super(LineBufferTranslator, self).__init__(*a, **kw)
def write(self, _input):
lines = _input.splitlines(True)
for i in range(0, len(lines)):
last = i
if lines[i].endswith('\n'):
prefix = len(self._linepending) and ''.join(self._linepending) or ''
self.output.write(self.translate(prefix + lines[i]))
del self._linepending[0:]
last = -1
if last >= 0:
self._linepending.append(lines[ last ])
def __del__(self):
if len(self._linepending):
self.output.write(self.translate(''.join(self._linepending)))
class HTTPTranslator(LineBufferTranslator):
""" Translates output from |urllib2| HTTPHandler(debuglevel = 1) into
HTTP-compatible, readible text structures for human analysis.
"""
RE_LINE_PARSER = re.compile(r'^(?:([a-z]+):)\s*(\'?)([^\r\n]*)\2(?:[\r\n]*)$')
RE_LINE_BREAK = re.compile(r'(\r?\n|(?:\\r)?\\n)')
RE_HTTP_METHOD = re.compile(r'^(POST|GET|HEAD|DELETE|PUT|TRACE|OPTIONS)')
RE_PARAMETER_SPACER = re.compile(r'&([a-z0-9]+)=')
@classmethod
def spacer(cls, line):
return cls.RE_PARAMETER_SPACER.sub(r' &\1= ', line)
def translate(self, line):
parsed = self.RE_LINE_PARSER.match(line)
if parsed:
value = parsed.group(3)
stage = parsed.group(1)
if stage == 'send': # query string is rendered here
return '\n# HTTP Request:\n' + self.stripslashes(value)
elif stage == 'reply':
return '\n\n# HTTP Response:\n' + self.stripslashes(value)
elif stage == 'header':
return value + '\n'
else:
return value
return line
def consume(outbuffer = None): # Capture standard output
sys.stdout = HTTPTranslator(outbuffer or sys.stdout)
return sys.stdout
if __name__ == '__main__':
consume(sys.stdout).write(sys.stdin.read())
print('\n')
# vim: set nowrap tabstop=4 shiftwidth=4 softtabstop=0 expandtab textwidth=0 filetype=python foldmethod=indent foldcolumn=4

View file

@ -1,424 +0,0 @@
from future.moves.urllib.request import urlopen, build_opener, install_opener
from future.moves.urllib.request import Request, HTTPSHandler
from future.moves.urllib.error import URLError, HTTPError
from future.moves.urllib.parse import urlencode
import random
import datetime
import time
import uuid
import hashlib
import socket
def generate_uuid(basedata=None):
""" Provides a _random_ UUID with no input, or a UUID4-format MD5 checksum of any input data provided """
if basedata is None:
return str(uuid.uuid4())
elif isinstance(basedata, str):
checksum = hashlib.md5(str(basedata).encode('utf-8')).hexdigest()
return '%8s-%4s-%4s-%4s-%12s' % (
checksum[0:8], checksum[8:12], checksum[12:16], checksum[16:20], checksum[20:32])
class Time(datetime.datetime):
""" Wrappers and convenience methods for processing various time representations """
@classmethod
def from_unix(cls, seconds, milliseconds=0):
""" Produce a full |datetime.datetime| object from a Unix timestamp """
base = list(time.gmtime(seconds))[0:6]
base.append(milliseconds * 1000) # microseconds
return cls(*base)
@classmethod
def to_unix(cls, timestamp):
""" Wrapper over time module to produce Unix epoch time as a float """
if not isinstance(timestamp, datetime.datetime):
raise TypeError('Time.milliseconds expects a datetime object')
base = time.mktime(timestamp.timetuple())
return base
@classmethod
def milliseconds_offset(cls, timestamp, now=None):
""" Offset time (in milliseconds) from a |datetime.datetime| object to now """
if isinstance(timestamp, (int, float)):
base = timestamp
else:
base = cls.to_unix(timestamp)
base = base + (timestamp.microsecond / 1000000)
if now is None:
now = time.time()
return (now - base) * 1000
class HTTPRequest(object):
""" URL Construction and request handling abstraction.
This is not intended to be used outside this module.
Automates mapping of persistent state (i.e. query parameters)
onto transcient datasets for each query.
"""
endpoint = 'https://www.google-analytics.com/collect'
@staticmethod
def debug():
""" Activate debugging on urllib2 """
handler = HTTPSHandler(debuglevel=1)
opener = build_opener(handler)
install_opener(opener)
# Store properties for all requests
def __init__(self, user_agent=None, *args, **opts):
self.user_agent = user_agent or 'Analytics Pros - Universal Analytics (Python)'
@classmethod
def fixUTF8(cls, data): # Ensure proper encoding for UA's servers...
""" Convert all strings to UTF-8 """
for key in data:
if isinstance(data[key], str):
data[key] = data[key].encode('utf-8')
return data
# Apply stored properties to the given dataset & POST to the configured endpoint
def send(self, data):
request = Request(
self.endpoint + '?' + urlencode(self.fixUTF8(data)).encode('utf-8'),
headers={
'User-Agent': self.user_agent
}
)
self.open(request)
def open(self, request):
try:
return urlopen(request)
except HTTPError as e:
return False
except URLError as e:
self.cache_request(request)
return False
def cache_request(self, request):
# TODO: implement a proper caching mechanism here for re-transmitting hits
# record = (Time.now(), request.get_full_url(), request.get_data(), request.headers)
pass
class HTTPPost(HTTPRequest):
# Apply stored properties to the given dataset & POST to the configured endpoint
def send(self, data):
request = Request(
self.endpoint,
data=urlencode(self.fixUTF8(data)).encode('utf-8'),
headers={
'User-Agent': self.user_agent
}
)
self.open(request)
class Tracker(object):
""" Primary tracking interface for Universal Analytics """
params = None
parameter_alias = {}
valid_hittypes = ('pageview', 'event', 'social', 'screenview', 'transaction', 'item', 'exception', 'timing')
@classmethod
def alias(cls, typemap, base, *names):
""" Declare an alternate (humane) name for a measurement protocol parameter """
cls.parameter_alias[base] = (typemap, base)
for i in names:
cls.parameter_alias[i] = (typemap, base)
@classmethod
def coerceParameter(cls, name, value=None):
if isinstance(name, str) and name[0] == '&':
return name[1:], str(value)
elif name in cls.parameter_alias:
typecast, param_name = cls.parameter_alias.get(name)
return param_name, typecast(value)
else:
raise KeyError('Parameter "{0}" is not recognized'.format(name))
def payload(self, data):
for key, value in data.items():
try:
yield self.coerceParameter(key, value)
except KeyError:
continue
option_sequence = {
'pageview': [(str, 'dp')],
'event': [(str, 'ec'), (str, 'ea'), (str, 'el'), (int, 'ev')],
'social': [(str, 'sn'), (str, 'sa'), (str, 'st')],
'timing': [(str, 'utc'), (str, 'utv'), (str, 'utt'), (str, 'utl')]
}
@classmethod
def consume_options(cls, data, hittype, args):
""" Interpret sequential arguments related to known hittypes based on declared structures """
opt_position = 0
data['t'] = hittype # integrate hit type parameter
if hittype in cls.option_sequence:
for expected_type, optname in cls.option_sequence[hittype]:
if opt_position < len(args) and isinstance(args[opt_position], expected_type):
data[optname] = args[opt_position]
opt_position += 1
@classmethod
def hittime(cls, timestamp=None, age=None, milliseconds=None):
""" Returns an integer represeting the milliseconds offset for a given hit (relative to now) """
if isinstance(timestamp, (int, float)):
return int(Time.milliseconds_offset(Time.from_unix(timestamp, milliseconds=milliseconds)))
if isinstance(timestamp, datetime.datetime):
return int(Time.milliseconds_offset(timestamp))
if isinstance(age, (int, float)):
return int(age * 1000) + (milliseconds or 0)
@property
def account(self):
return self.params.get('tid', None)
def __init__(self, account, name=None, client_id=None, hash_client_id=False, user_id=None, user_agent=None,
use_post=True):
if use_post is False:
self.http = HTTPRequest(user_agent=user_agent)
else:
self.http = HTTPPost(user_agent=user_agent)
self.params = {'v': 1, 'tid': account}
if client_id is None:
client_id = generate_uuid()
self.params['cid'] = client_id
self.hash_client_id = hash_client_id
if user_id is not None:
self.params['uid'] = user_id
def set_timestamp(self, data):
""" Interpret time-related options, apply queue-time parameter as needed """
if 'hittime' in data: # an absolute timestamp
data['qt'] = self.hittime(timestamp=data.pop('hittime', None))
if 'hitage' in data: # a relative age (in seconds)
data['qt'] = self.hittime(age=data.pop('hitage', None))
def send(self, hittype, *args, **data):
""" Transmit HTTP requests to Google Analytics using the measurement protocol """
if hittype not in self.valid_hittypes:
raise KeyError('Unsupported Universal Analytics Hit Type: {0}'.format(repr(hittype)))
self.set_timestamp(data)
self.consume_options(data, hittype, args)
for item in args: # process dictionary-object arguments of transcient data
if isinstance(item, dict):
for key, val in self.payload(item):
data[key] = val
for k, v in self.params.items(): # update only absent parameters
if k not in data:
data[k] = v
data = dict(self.payload(data))
if self.hash_client_id:
data['cid'] = generate_uuid(data['cid'])
# Transmit the hit to Google...
self.http.send(data)
# Setting persistent attibutes of the session/hit/etc (inc. custom dimensions/metrics)
def set(self, name, value=None):
if isinstance(name, dict):
for key, value in name.items():
try:
param, value = self.coerceParameter(key, value)
self.params[param] = value
except KeyError:
pass
elif isinstance(name, str):
try:
param, value = self.coerceParameter(name, value)
self.params[param] = value
except KeyError:
pass
def __getitem__(self, name):
param, value = self.coerceParameter(name, None)
return self.params.get(param, None)
def __setitem__(self, name, value):
param, value = self.coerceParameter(name, value)
self.params[param] = value
def __delitem__(self, name):
param, value = self.coerceParameter(name, None)
if param in self.params:
del self.params[param]
def safe_unicode(obj):
""" Safe convertion to the Unicode string version of the object """
try:
return str(obj)
except UnicodeDecodeError:
return obj.decode('utf-8')
# Declaring name mappings for Measurement Protocol parameters
MAX_CUSTOM_DEFINITIONS = 200
MAX_EC_LISTS = 11 # 1-based index
MAX_EC_PRODUCTS = 11 # 1-based index
MAX_EC_PROMOTIONS = 11 # 1-based index
Tracker.alias(int, 'v', 'protocol-version')
Tracker.alias(safe_unicode, 'cid', 'client-id', 'clientId', 'clientid')
Tracker.alias(safe_unicode, 'tid', 'trackingId', 'account')
Tracker.alias(safe_unicode, 'uid', 'user-id', 'userId', 'userid')
Tracker.alias(safe_unicode, 'uip', 'user-ip', 'userIp', 'ipaddr')
Tracker.alias(safe_unicode, 'ua', 'userAgent', 'userAgentOverride', 'user-agent')
Tracker.alias(safe_unicode, 'dp', 'page', 'path')
Tracker.alias(safe_unicode, 'dt', 'title', 'pagetitle', 'pageTitle' 'page-title')
Tracker.alias(safe_unicode, 'dl', 'location')
Tracker.alias(safe_unicode, 'dh', 'hostname')
Tracker.alias(safe_unicode, 'sc', 'sessioncontrol', 'session-control', 'sessionControl')
Tracker.alias(safe_unicode, 'dr', 'referrer', 'referer')
Tracker.alias(int, 'qt', 'queueTime', 'queue-time')
Tracker.alias(safe_unicode, 't', 'hitType', 'hittype')
Tracker.alias(int, 'aip', 'anonymizeIp', 'anonIp', 'anonymize-ip')
Tracker.alias(safe_unicode, 'ds', 'dataSource', 'data-source')
# Campaign attribution
Tracker.alias(safe_unicode, 'cn', 'campaign', 'campaignName', 'campaign-name')
Tracker.alias(safe_unicode, 'cs', 'source', 'campaignSource', 'campaign-source')
Tracker.alias(safe_unicode, 'cm', 'medium', 'campaignMedium', 'campaign-medium')
Tracker.alias(safe_unicode, 'ck', 'keyword', 'campaignKeyword', 'campaign-keyword')
Tracker.alias(safe_unicode, 'cc', 'content', 'campaignContent', 'campaign-content')
Tracker.alias(safe_unicode, 'ci', 'campaignId', 'campaignID', 'campaign-id')
# Technical specs
Tracker.alias(safe_unicode, 'sr', 'screenResolution', 'screen-resolution', 'resolution')
Tracker.alias(safe_unicode, 'vp', 'viewport', 'viewportSize', 'viewport-size')
Tracker.alias(safe_unicode, 'de', 'encoding', 'documentEncoding', 'document-encoding')
Tracker.alias(int, 'sd', 'colors', 'screenColors', 'screen-colors')
Tracker.alias(safe_unicode, 'ul', 'language', 'user-language', 'userLanguage')
# Mobile app
Tracker.alias(safe_unicode, 'an', 'appName', 'app-name', 'app')
Tracker.alias(safe_unicode, 'cd', 'contentDescription', 'screenName', 'screen-name', 'content-description')
Tracker.alias(safe_unicode, 'av', 'appVersion', 'app-version', 'version')
Tracker.alias(safe_unicode, 'aid', 'appID', 'appId', 'application-id', 'app-id', 'applicationId')
Tracker.alias(safe_unicode, 'aiid', 'appInstallerId', 'app-installer-id')
# Ecommerce
Tracker.alias(safe_unicode, 'ta', 'affiliation', 'transactionAffiliation', 'transaction-affiliation')
Tracker.alias(safe_unicode, 'ti', 'transaction', 'transactionId', 'transaction-id')
Tracker.alias(float, 'tr', 'revenue', 'transactionRevenue', 'transaction-revenue')
Tracker.alias(float, 'ts', 'shipping', 'transactionShipping', 'transaction-shipping')
Tracker.alias(float, 'tt', 'tax', 'transactionTax', 'transaction-tax')
Tracker.alias(safe_unicode, 'cu', 'currency', 'transactionCurrency',
'transaction-currency') # Currency code, e.g. USD, EUR
Tracker.alias(safe_unicode, 'in', 'item-name', 'itemName')
Tracker.alias(float, 'ip', 'item-price', 'itemPrice')
Tracker.alias(float, 'iq', 'item-quantity', 'itemQuantity')
Tracker.alias(safe_unicode, 'ic', 'item-code', 'sku', 'itemCode')
Tracker.alias(safe_unicode, 'iv', 'item-variation', 'item-category', 'itemCategory', 'itemVariation')
# Events
Tracker.alias(safe_unicode, 'ec', 'event-category', 'eventCategory', 'category')
Tracker.alias(safe_unicode, 'ea', 'event-action', 'eventAction', 'action')
Tracker.alias(safe_unicode, 'el', 'event-label', 'eventLabel', 'label')
Tracker.alias(int, 'ev', 'event-value', 'eventValue', 'value')
Tracker.alias(int, 'ni', 'noninteractive', 'nonInteractive', 'noninteraction', 'nonInteraction')
# Social
Tracker.alias(safe_unicode, 'sa', 'social-action', 'socialAction')
Tracker.alias(safe_unicode, 'sn', 'social-network', 'socialNetwork')
Tracker.alias(safe_unicode, 'st', 'social-target', 'socialTarget')
# Exceptions
Tracker.alias(safe_unicode, 'exd', 'exception-description', 'exceptionDescription', 'exDescription')
Tracker.alias(int, 'exf', 'exception-fatal', 'exceptionFatal', 'exFatal')
# User Timing
Tracker.alias(safe_unicode, 'utc', 'timingCategory', 'timing-category')
Tracker.alias(safe_unicode, 'utv', 'timingVariable', 'timing-variable')
Tracker.alias(float, 'utt', 'time', 'timingTime', 'timing-time')
Tracker.alias(safe_unicode, 'utl', 'timingLabel', 'timing-label')
Tracker.alias(float, 'dns', 'timingDNS', 'timing-dns')
Tracker.alias(float, 'pdt', 'timingPageLoad', 'timing-page-load')
Tracker.alias(float, 'rrt', 'timingRedirect', 'timing-redirect')
Tracker.alias(safe_unicode, 'tcp', 'timingTCPConnect', 'timing-tcp-connect')
Tracker.alias(safe_unicode, 'srt', 'timingServerResponse', 'timing-server-response')
# Custom dimensions and metrics
for i in range(0, 200):
Tracker.alias(safe_unicode, 'cd{0}'.format(i), 'dimension{0}'.format(i))
Tracker.alias(int, 'cm{0}'.format(i), 'metric{0}'.format(i))
# Content groups
for i in range(0, 5):
Tracker.alias(safe_unicode, 'cg{0}'.format(i), 'contentGroup{0}'.format(i))
# Enhanced Ecommerce
Tracker.alias(str, 'pa') # Product action
Tracker.alias(str, 'tcc') # Coupon code
Tracker.alias(str, 'pal') # Product action list
Tracker.alias(int, 'cos') # Checkout step
Tracker.alias(str, 'col') # Checkout step option
Tracker.alias(str, 'promoa') # Promotion action
for product_index in range(1, MAX_EC_PRODUCTS):
Tracker.alias(str, 'pr{0}id'.format(product_index)) # Product SKU
Tracker.alias(str, 'pr{0}nm'.format(product_index)) # Product name
Tracker.alias(str, 'pr{0}br'.format(product_index)) # Product brand
Tracker.alias(str, 'pr{0}ca'.format(product_index)) # Product category
Tracker.alias(str, 'pr{0}va'.format(product_index)) # Product variant
Tracker.alias(str, 'pr{0}pr'.format(product_index)) # Product price
Tracker.alias(int, 'pr{0}qt'.format(product_index)) # Product quantity
Tracker.alias(str, 'pr{0}cc'.format(product_index)) # Product coupon code
Tracker.alias(int, 'pr{0}ps'.format(product_index)) # Product position
for custom_index in range(MAX_CUSTOM_DEFINITIONS):
Tracker.alias(str, 'pr{0}cd{1}'.format(product_index, custom_index)) # Product custom dimension
Tracker.alias(int, 'pr{0}cm{1}'.format(product_index, custom_index)) # Product custom metric
for list_index in range(1, MAX_EC_LISTS):
Tracker.alias(str, 'il{0}pi{1}id'.format(list_index, product_index)) # Product impression SKU
Tracker.alias(str, 'il{0}pi{1}nm'.format(list_index, product_index)) # Product impression name
Tracker.alias(str, 'il{0}pi{1}br'.format(list_index, product_index)) # Product impression brand
Tracker.alias(str, 'il{0}pi{1}ca'.format(list_index, product_index)) # Product impression category
Tracker.alias(str, 'il{0}pi{1}va'.format(list_index, product_index)) # Product impression variant
Tracker.alias(int, 'il{0}pi{1}ps'.format(list_index, product_index)) # Product impression position
Tracker.alias(int, 'il{0}pi{1}pr'.format(list_index, product_index)) # Product impression price
for custom_index in range(MAX_CUSTOM_DEFINITIONS):
Tracker.alias(str, 'il{0}pi{1}cd{2}'.format(list_index, product_index,
custom_index)) # Product impression custom dimension
Tracker.alias(int, 'il{0}pi{1}cm{2}'.format(list_index, product_index,
custom_index)) # Product impression custom metric
for list_index in range(1, MAX_EC_LISTS):
Tracker.alias(str, 'il{0}nm'.format(list_index)) # Product impression list name
for promotion_index in range(1, MAX_EC_PROMOTIONS):
Tracker.alias(str, 'promo{0}id'.format(promotion_index)) # Promotion ID
Tracker.alias(str, 'promo{0}nm'.format(promotion_index)) # Promotion name
Tracker.alias(str, 'promo{0}cr'.format(promotion_index)) # Promotion creative
Tracker.alias(str, 'promo{0}ps'.format(promotion_index)) # Promotion position
# Shortcut for creating trackers
def create(account, *args, **kwargs):
return Tracker(account, *args, **kwargs)
# vim: set nowrap tabstop=4 shiftwidth=4 softtabstop=0 expandtab textwidth=0 filetype=python foldmethod=indent foldcolumn=4

View file

@ -1 +0,0 @@
from . import Tracker

3
lib/ga4mp/__init__.py Normal file
View file

@ -0,0 +1,3 @@
from ga4mp.ga4mp import GtagMP, FirebaseMP
__all__ = ['GtagMP','FirebaseMP']

44
lib/ga4mp/event.py Normal file
View file

@ -0,0 +1,44 @@
from ga4mp.item import Item
class Event(dict):
def __init__(self, name):
self.set_event_name(name)
def set_event_name(self, name):
if len(name) > 40:
raise ValueError("Event name cannot exceed 40 characters.")
self["name"] = name
def get_event_name(self):
return self.get("name")
def set_event_param(self, name, value):
# Series of checks to comply with GA4 event collection limits: https://support.google.com/analytics/answer/9267744
if len(name) > 40:
raise ValueError("Event parameter name cannot exceed 40 characters.")
if name in ["page_location", "page_referrer", "page_title"] and len(str(value)) > 300:
raise ValueError("Event parameter value for page info cannot exceed 300 characters.")
if name not in ["page_location", "page_referrer", "page_title"] and len(str(value)) > 100:
raise ValueError("Event parameter value cannot exceed 100 characters.")
if "params" not in self.keys():
self["params"] = {}
if len(self["params"]) >= 100:
raise RuntimeError("Event cannot contain more than 100 parameters.")
self["params"][name] = value
def get_event_params(self):
return self.get("params")
def delete_event_param(self, name):
# Since only 25 event parameters are allowed, this will allow the user to delete a parameter if necessary.
self["params"].pop(name, None)
def create_new_item(self, item_id=None, item_name=None):
return Item(item_id=item_id, item_name=item_name)
def add_item_to_event(self, item):
if not isinstance(item, dict):
raise ValueError("'item' must be an instance of a dictionary.")
if "items" not in self["params"].keys():
self.set_event_param("items", [])
self["params"]["items"].append(item)

416
lib/ga4mp/ga4mp.py Normal file
View file

@ -0,0 +1,416 @@
###############################################################################
# Google Analytics 4 Measurement Protocol for Python
# Copyright (c) 2022, Adswerve
#
# This project is free software, distributed under the BSD license.
# Adswerve offers consulting and integration services if your firm needs
# assistance in strategy, implementation, or auditing existing work.
###############################################################################
import json
import logging
import urllib.request
import time
import datetime
import random
from ga4mp.utils import params_dict
from ga4mp.event import Event
from ga4mp.store import BaseStore, DictStore
import os, sys
sys.path.append(
os.path.normpath(os.path.join(os.path.dirname(__file__), ".."))
)
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
class BaseGa4mp(object):
"""
Parent class that provides an interface for sending data to Google Analytics, supporting the GA4 Measurement Protocol.
Parameters
----------
api_secret : string
Generated through the Google Analytics UI. To create a new secret, navigate in the Google Analytics UI to: Admin > Data Streams >
[choose your stream] > Measurement Protocol API Secrets > Create
See Also
--------
* Measurement Protocol (Google Analytics 4): https://developers.google.com/analytics/devguides/collection/protocol/ga4
Examples
--------
# Initialize tracking object for gtag usage
>>> ga = gtagMP(api_secret = "API_SECRET", measurement_id = "MEASUREMENT_ID", client_id="CLIENT_ID")
# Initialize tracking object for Firebase usage
>>> ga = firebaseMP(api_secret = "API_SECRET", firebase_app_id = "FIREBASE_APP_ID", app_instance_id="APP_INSTANCE_ID")
# Build an event
>>> event_type = 'new_custom_event'
>>> event_parameters = {'parameter_key_1': 'parameter_1', 'parameter_key_2': 'parameter_2'}
>>> event = {'name': event_type, 'params': event_parameters }
>>> events = [event]
# Send a custom event to GA4 immediately
>>> ga.send(events)
# Postponed send of a custom event to GA4
>>> ga.send(events, postpone=True)
>>> ga.postponed_send()
"""
def __init__(self, api_secret, store: BaseStore = None):
self._initialization_time = time.time() # used for both session_id and calculating engagement time
self.api_secret = api_secret
self._event_list = []
assert store is None or isinstance(store, BaseStore), "if supplied, store must be an instance of BaseStore"
self.store = store or DictStore()
self._check_store_requirements()
self._base_domain = "https://www.google-analytics.com/mp/collect"
self._validation_domain = "https://www.google-analytics.com/debug/mp/collect"
def _check_store_requirements(self):
# Store must contain "session_id" and "last_interaction_time_msec" in order for tracking to work properly.
if self.store.get_session_parameter("session_id") is None:
self.store.set_session_parameter(name="session_id", value=int(self._initialization_time))
# Note: "last_interaction_time_msec" factors into the required "engagement_time_msec" event parameter.
self.store.set_session_parameter(name="last_interaction_time_msec", value=int(self._initialization_time * 1000))
def create_new_event(self, name):
return Event(name=name)
def send(self, events, validation_hit=False, postpone=False, date=None):
"""
Method to send an http post request to google analytics with the specified events.
Parameters
----------
events : List[Dict]
A list of dictionaries of the events to be sent to Google Analytics. The list of dictionaries should adhere
to the following format:
[{'name': 'level_end',
'params' : {'level_name': 'First',
'success': 'True'}
},
{'name': 'level_up',
'params': {'character': 'John Madden',
'level': 'First'}
}]
validation_hit : bool, optional
Boolean to depict if events should be tested against the Measurement Protocol Validation Server, by default False
postpone : bool, optional
Boolean to depict if provided event list should be postponed, by default False
date : datetime
Python datetime object for sending a historical event at the given date. Date cannot be in the future.
"""
# check for any missing or invalid parameters among automatically collected and recommended event types
self._check_params(events)
self._check_date_not_in_future(date)
self._add_session_id_and_engagement_time(events)
if postpone is True:
# build event list to send later
for event in events:
event["_timestamp_micros"] = self._get_timestamp(time.time())
self._event_list.append(event)
else:
# batch events into sets of 25 events, the maximum allowed.
batched_event_list = [
events[event : event + 25] for event in range(0, len(events), 25)
]
# send http post request
self._http_post(
batched_event_list, validation_hit=validation_hit, date=date
)
def postponed_send(self):
"""
Method to send the events provided to Ga4mp.send(events,postpone=True)
"""
for event in self._event_list:
self._http_post([event], postpone=True)
# clear event_list for future use
self._event_list = []
def append_event_to_params_dict(self, new_name_and_parameters):
"""
Method to append event name and parameters key-value pairing(s) to parameters dictionary.
Parameters
----------
new_name_and_parameters : Dict
A dictionary with one key-value pair representing a new type of event to be sent to Google Analytics.
The dictionary should adhere to the following format:
{'new_name': ['new_param_1', 'new_param_2', 'new_param_3']}
"""
params_dict.update(new_name_and_parameters)
def _http_post(self, batched_event_list, validation_hit=False, postpone=False, date=None):
"""
Method to send http POST request to google-analytics.
Parameters
----------
batched_event_list : List[List[Dict]]
List of List of events. Places initial event payload into a list to send http POST in batches.
validation_hit : bool, optional
Boolean to depict if events should be tested against the Measurement Protocol Validation Server, by default False
postpone : bool, optional
Boolean to depict if provided event list should be postponed, by default False
date : datetime
Python datetime object for sending a historical event at the given date. Date cannot be in the future.
Timestamp micros supports up to 48 hours of backdating.
If date is specified, postpone must be False or an assertion will be thrown.
"""
self._check_date_not_in_future(date)
status_code = None # Default set to know if batch loop does not work and to bound status_code
# set domain
domain = self._base_domain
if validation_hit is True:
domain = self._validation_domain
logger.info(f"Sending POST to: {domain}")
# loop through events in batches of 25
batch_number = 1
for batch in batched_event_list:
# url and request slightly differ by subclass
url = self._build_url(domain=domain)
request = self._build_request(batch=batch)
self._add_user_props_to_hit(request)
# make adjustments for postponed hit
request["events"] = (
{"name": batch["name"], "params": batch["params"]}
if (postpone)
else batch
)
if date is not None:
logger.info(f"Setting event timestamp to: {date}")
assert (
postpone is False
), "Cannot send postponed historical hit, ensure postpone=False"
ts = self._datetime_to_timestamp(date)
ts_micro = self._get_timestamp(ts)
request["timestamp_micros"] = int(ts_micro)
logger.info(f"Timestamp of request is: {request['timestamp_micros']}")
if postpone:
# add timestamp to hit
request["timestamp_micros"] = batch["_timestamp_micros"]
req = urllib.request.Request(url)
req.add_header("Content-Type", "application/json; charset=utf-8")
jsondata = json.dumps(request)
json_data_as_bytes = jsondata.encode("utf-8") # needs to be bytes
req.add_header("Content-Length", len(json_data_as_bytes))
result = urllib.request.urlopen(req, json_data_as_bytes)
status_code = result.status
logger.info(f"Batch Number: {batch_number}")
logger.info(f"Status code: {status_code}")
batch_number += 1
return status_code
def _check_params(self, events):
"""
Method to check whether the provided event payload parameters align with supported parameters.
Parameters
----------
events : List[Dict]
A list of dictionaries of the events to be sent to Google Analytics. The list of dictionaries should adhere
to the following format:
[{'name': 'level_end',
'params' : {'level_name': 'First',
'success': 'True'}
},
{'name': 'level_up',
'params': {'character': 'John Madden',
'level': 'First'}
}]
"""
# check to make sure it's a list of dictionaries with the right keys
assert type(events) == list, "events should be a list"
for event in events:
assert isinstance(event, dict), "each event should be an instance of a dictionary"
assert "name" in event, 'each event should have a "name" key'
assert "params" in event, 'each event should have a "params" key'
# check for any missing or invalid parameters
for e in events:
event_name = e["name"]
event_params = e["params"]
if event_name in params_dict.keys():
for parameter in params_dict[event_name]:
if parameter not in event_params.keys():
logger.warning(
f"WARNING: Event parameters do not match event type.\nFor {event_name} event type, the correct parameter(s) are {params_dict[event_name]}.\nThe parameter '{parameter}' triggered this warning.\nFor a breakdown of currently supported event types and their parameters go here: https://support.google.com/analytics/answer/9267735\n"
)
def _add_session_id_and_engagement_time(self, events):
"""
Method to add the session_id and engagement_time_msec parameter to all events.
"""
for event in events:
current_time_in_milliseconds = int(time.time() * 1000)
event_params = event["params"]
if "session_id" not in event_params.keys():
event_params["session_id"] = self.store.get_session_parameter("session_id")
if "engagement_time_msec" not in event_params.keys():
last_interaction_time = self.store.get_session_parameter("last_interaction_time_msec")
event_params["engagement_time_msec"] = current_time_in_milliseconds - last_interaction_time if current_time_in_milliseconds > last_interaction_time else 0
self.store.set_session_parameter(name="last_interaction_time_msec", value=current_time_in_milliseconds)
def _add_user_props_to_hit(self, hit):
"""
Method is a helper function to add user properties to outgoing hits.
Parameters
----------
hit : dict
"""
for key in self.store.get_all_user_properties():
try:
if key in ["user_id", "non_personalized_ads"]:
hit.update({key: self.store.get_user_property(key)})
else:
if "user_properties" not in hit.keys():
hit.update({"user_properties": {}})
hit["user_properties"].update(
{key: {"value": self.store.get_user_property(key)}}
)
except:
logger.info(f"Failed to add user property to outgoing hit: {key}")
def _get_timestamp(self, timestamp):
"""
Method returns UNIX timestamp in microseconds for postponed hits.
Parameters
----------
None
"""
return int(timestamp * 1e6)
def _datetime_to_timestamp(self, dt):
"""
Private method to convert a datetime object into a timestamp
Parameters
----------
dt : datetime
A datetime object in any format
Returns
-------
timestamp
A UNIX timestamp in milliseconds
"""
return time.mktime(dt.timetuple())
def _check_date_not_in_future(self, date):
"""
Method to check that provided date is not in the future.
Parameters
----------
date : datetime
Python datetime object
"""
if date is None:
pass
else:
assert (
date <= datetime.datetime.now()
), "Provided date cannot be in the future"
def _build_url(self, domain):
raise NotImplementedError("Subclass should be using this function, but it was called through the base class instead.")
def _build_request(self, batch):
raise NotImplementedError("Subclass should be using this function, but it was called through the base class instead.")
class GtagMP(BaseGa4mp):
"""
Subclass for users of gtag. See `Ga4mp` parent class for examples.
Parameters
----------
measurement_id : string
The identifier for a Data Stream. Found in the Google Analytics UI under: Admin > Data Streams > [choose your stream] > Measurement ID (top-right)
client_id : string
A unique identifier for a client, representing a specific browser/device.
"""
def __init__(self, api_secret, measurement_id, client_id,):
super().__init__(api_secret)
self.measurement_id = measurement_id
self.client_id = client_id
def _build_url(self, domain):
return f"{domain}?measurement_id={self.measurement_id}&api_secret={self.api_secret}"
def _build_request(self, batch):
return {"client_id": self.client_id, "events": batch}
def random_client_id(self):
"""
Utility function for generating a new client ID matching the typical format of 10 random digits and the UNIX timestamp in seconds, joined by a period.
"""
return "%0.10d" % random.randint(0,9999999999) + "." + str(int(time.time()))
class FirebaseMP(BaseGa4mp):
"""
Subclass for users of Firebase. See `Ga4mp` parent class for examples.
Parameters
----------
firebase_app_id : string
The identifier for a Firebase app. Found in the Firebase console under: Project Settings > General > Your Apps > App ID.
app_instance_id : string
A unique identifier for a Firebase app instance.
* Android - getAppInstanceId() - https://firebase.google.com/docs/reference/android/com/google/firebase/analytics/FirebaseAnalytics#public-taskstring-getappinstanceid
* Kotlin - getAppInstanceId() - https://firebase.google.com/docs/reference/kotlin/com/google/firebase/analytics/FirebaseAnalytics#getappinstanceid
* Swift - appInstanceID() - https://firebase.google.com/docs/reference/swift/firebaseanalytics/api/reference/Classes/Analytics#appinstanceid
* Objective-C - appInstanceID - https://firebase.google.com/docs/reference/ios/firebaseanalytics/api/reference/Classes/FIRAnalytics#+appinstanceid
* C++ - GetAnalyticsInstanceId() - https://firebase.google.com/docs/reference/cpp/namespace/firebase/analytics#getanalyticsinstanceid
* Unity - GetAnalyticsInstanceIdAsync() - https://firebase.google.com/docs/reference/unity/class/firebase/analytics/firebase-analytics#getanalyticsinstanceidasync
"""
def __init__(self, api_secret, firebase_app_id, app_instance_id):
super().__init__(api_secret)
self.firebase_app_id = firebase_app_id
self.app_instance_id = app_instance_id
def _build_url(self, domain):
return f"{domain}?firebase_app_id={self.firebase_app_id}&api_secret={self.api_secret}"
def _build_request(self, batch):
return {"app_instance_id": self.app_instance_id, "events": batch}

11
lib/ga4mp/item.py Normal file
View file

@ -0,0 +1,11 @@
class Item(dict):
def __init__(self, item_id=None, item_name=None):
if item_id is None and item_name is None:
raise ValueError("At least one of 'item_id' and 'item_name' is required.")
if item_id is not None:
self.set_parameter("item_id", str(item_id))
if item_name is not None:
self.set_parameter("item_name", item_name)
def set_parameter(self, name, value):
self[name] = value

116
lib/ga4mp/store.py Normal file
View file

@ -0,0 +1,116 @@
import json
import logging
from pathlib import Path
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
class BaseStore(dict):
def __init__(self):
self.update([("user_properties", {}),("session_parameters", {})])
def save(self):
raise NotImplementedError("Subclass should be using this function, but it was called through the base class instead.")
def _check_exists(self, key):
# Helper function to make sure a key exists before trying to work with values within it.
if key not in self.keys():
self[key] = {}
def _set(self, param_type, name, value):
# Helper function to set a single parameter (user or session or other).
self._check_exists(key=param_type)
self[param_type][name] = value
def _get_one(self, param_type, name):
# Helper function to get a single parameter value (user or session).
self._check_exists(key=param_type)
return self[param_type].get(name, None)
def _get_all(self, param_type=None):
# Helper function to get all user or session parameters - or the entire dictionary if not specified.
if param_type is not None:
return self[param_type]
else:
return self
# While redundant, the following make sure the distinction between session and user items is easier for the end user.
def set_user_property(self, name, value):
self._set(param_type="user_properties", name=name, value=value)
def get_user_property(self, name):
return self._get_one(param_type="user_properties", name=name)
def get_all_user_properties(self):
return self._get_all(param_type="user_properties")
def clear_user_properties(self):
self["user_properties"] = {}
def set_session_parameter(self, name, value):
self._set(param_type="session_parameters", name=name, value=value)
def get_session_parameter(self, name):
return self._get_one(param_type="session_parameters", name=name)
def get_all_session_parameters(self):
return self._get_all(param_type="session_parameters")
def clear_session_parameters(self):
self["session_parameters"] = {}
# Similar functions for other items the user wants to store that don't fit the other two categories.
def set_other_parameter(self, name, value):
self._set(param_type="other", name=name, value=value)
def get_other_parameter(self, name):
return self._get_one(param_type="other", name=name)
def get_all_other_parameters(self):
return self._get_all(param_type="other")
def clear_other_parameters(self):
self["other"] = {}
class DictStore(BaseStore):
# Class for working with dictionaries that persist for the life of the class.
def __init__(self, data: dict = None):
super().__init__()
if data:
self.update(data)
def save(self):
# Give the user back what's in the dictionary so they can decide how to save it.
self._get_all()
class FileStore(BaseStore):
# Class for working with dictionaries that get saved to a JSON file.
def __init__(self, data_location: str = None):
super().__init__()
self.data_location = data_location
try:
self._load_file(data_location)
except:
logger.info(f"Failed to find file at location: {data_location}")
def _load_file(self):
# Function to get data from the object's initialized location.
# If the provided or stored data_location exists, read the file and overwrite the object's contents.
if Path(self.data_location).exists():
with open(self.data_location, "r") as json_file:
self = json.load(json_file)
# If the data_location doesn't exist, try to create a new starter JSON file at the location given.
else:
starter_dict = '{"user_properties":{}, "session_parameters":{}}'
starter_json = json.loads(starter_dict)
Path(self.data_location).touch()
with open(self.data_location, "w") as json_file:
json.dumps(starter_json, json_file)
def save(self):
# Function to save the current dictionary to a JSON file at the object's initialized location.
try:
with open(self.data_location, "w") as outfile:
json.dump(self, outfile)
except:
logger.info(f"Failed to save file at location: {self.data_location}")

392
lib/ga4mp/utils.py Normal file
View file

@ -0,0 +1,392 @@
# all automatically collected and recommended event types
params_dict = {
"ad_click": [
"ad_event_id"
],
"ad_exposure": [
"firebase_screen",
"firebase_screen_id",
"firebase_screen_class",
"exposure_time",
],
"ad_impression": [
"ad_event_id"
],
"ad_query": [
"ad_event_id"
],
"ad_reward": [
"ad_unit_id",
"reward_type",
"reward_value"
],
"add_payment_info": [
"coupon",
"currency",
"items",
"payment_type",
"value"
],
"add_shipping_info": [
"coupon",
"currency",
"items",
"shipping_tier",
"value"
],
"add_to_cart": [
"currency",
"items",
"value"
],
"add_to_wishlist": [
"currency",
"items",
"value"
],
"adunit_exposure": [
"firebase_screen",
"firebase_screen_id",
"firebase_screen_class",
"exposure_time",
],
"app_clear_data": [],
"app_exception": [
"fatal",
"timestamp",
"engagement_time_msec"
],
"app_remove": [],
"app_store_refund": [
"product_id",
"value",
"currency",
"quantity"
],
"app_store_subscription_cancel": [
"product_id",
"price",
"value",
"currency",
"cancellation_reason",
],
"app_store_subscription_convert": [
"product_id",
"price",
"value",
"currency",
"quantity",
],
"app_store_subscription_renew": [
"product_id",
"price",
"value",
"currency",
"quantity",
"renewal_count",
],
"app_update": [
"previous_app_version"
],
"begin_checkout": [
"coupon",
"currency",
"items",
"value"
],
"click": [],
"dynamic_link_app_open": [
"source",
"medium",
"campaign",
"link_id",
"accept_time"
],
"dynamic_link_app_update": [
"source",
"medium",
"campaign",
"link_id",
"accept_time",
],
"dynamic_link_first_open": [
"source",
"medium",
"campaign",
"link_id",
"accept_time",
],
"earn_virtual_currency": [
"virtual_currency_name",
"value"
],
"error": [
"firebase_error",
"firebase_error_value"
],
"file_download": [
"file_extension",
"file_name",
"link_classes",
"link_domain",
"link_id",
"link_text",
"link_url",
],
"firebase_campaign": [
"source",
"medium",
"campaign",
"term",
"content",
"gclid",
"aclid",
"cp1",
"anid",
"click_timestamp",
"campaign_info_source",
],
"firebase_in_app_message_action": [
"message_name",
"message_device_time",
"message_id",
],
"firebase_in_app_message_dismiss": [
"message_name",
"message_device_time",
"message_id",
],
"firebase_in_app_message_impression": [
"message_name",
"message_device_time",
"message_id",
],
"first_open": [
"previous_gmp_app_id",
"updated_with_analytics",
"previous_first_open_count",
"system_app",
"system_app_update",
"deferred_analytics_collection",
"reset_analytics_cause",
"engagement_time_msec",
],
"first_visit": [],
"generate_lead": [
"value",
"currency"
],
"in_app_purchase": [
"product_id",
"price",
"value",
"currency",
"quantity",
"subscription",
"free_trial",
"introductory_price",
],
"join_group": [
"group_id"
],
"level_end": [
"level_name",
"success"
],
"level_start": [
"level_name"
],
"level_up": [
"character",
"level"
],
"login": [
"method"
],
"notification_dismiss": [
"message_name",
"message_time",
"message_device_time",
"message_id",
"topic",
"label",
"message_channel",
],
"notification_foreground": [
"message_name",
"message_time",
"message_device_time",
"message_id",
"topic",
"label",
"message_channel",
"message_type",
],
"notification_open": [
"message_name",
"message_time",
"message_device_time",
"message_id",
"topic",
"label",
"message_channel",
],
"notification_receive": [
"message_name",
"message_time",
"message_device_time",
"message_id",
"topic",
"label",
"message_channel",
"message_type",
],
"notification_send": [
"message_name",
"message_time",
"message_device_time",
"message_id",
"topic",
"label",
"message_channel",
],
"os_update": [
"previous_os_version"
],
"page_view": [
"page_location",
"page_referrer"
],
"post_score": [
"level",
"character",
"score"
],
"purchase": [
"affiliation",
"coupon",
"currency",
"items",
"transaction_id",
"shipping",
"tax",
"value",
],
"refund": [
"transaction_id",
"value",
"currency",
"tax",
"shipping",
"items"
],
"remove_from_cart": [
"currency",
"items",
"value"
],
"screen_view": [
"firebase_screen",
"firebase_screen_class",
"firebase_screen_id",
"firebase_previous_screen",
"firebase_previous_class",
"firebase_previous_id",
"engagement_time_msec",
],
"scroll": [],
"search": [
"search_term"
],
"select_content": [
"content_type",
"item_id"
],
"select_item": [
"items",
"item_list_name",
"item_list_id"
],
"select_promotion": [
"items",
"promotion_id",
"promotion_name",
"creative_name",
"creative_slot",
"location_id",
],
"session_start": [],
"share": [
"content_type",
"item_id"
],
"sign_up": [
"method"
],
"view_search_results": [
"search_term"
],
"spend_virtual_currency": [
"item_name",
"virtual_currency_name",
"value"
],
"tutorial_begin": [],
"tutorial_complete": [],
"unlock_achievement": [
"achievement_id"
],
"user_engagement": [
"engagement_time_msec"
],
"video_start": [
"video_current_time",
"video_duration",
"video_percent",
"video_provider",
"video_title",
"video_url",
"visible",
],
"video_progress": [
"video_current_time",
"video_duration",
"video_percent",
"video_provider",
"video_title",
"video_url",
"visible",
],
"video_complete": [
"video_current_time",
"video_duration",
"video_percent",
"video_provider",
"video_title",
"video_url",
"visible",
],
"view_cart": [
"currency",
"items",
"value"
],
"view_item": [
"currency",
"items",
"value"
],
"view_item_list": [
"items",
"item_list_name",
"item_list_id"
],
"view_promotion": [
"items",
"promotion_id",
"promotion_name",
"creative_name",
"creative_slot",
"location_id",
],
}

View file

@ -346,7 +346,7 @@ class FileHash:
return f'<FileHash mode: {self.mode} value: {self.value}>'
class Distribution:
class Distribution(metaclass=abc.ABCMeta):
"""A Python distribution package."""
@abc.abstractmethod

View file

@ -1,5 +1,5 @@
apscheduler==3.9.1.post1
importlib-metadata==5.2.0
importlib-metadata==6.0.0
importlib-resources==5.12.0
pyinstaller==5.7.0
pyopenssl==22.1.0

View file

@ -36,7 +36,7 @@ except ImportError:
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.triggers.interval import IntervalTrigger
from UniversalAnalytics import Tracker
from ga4mp import GtagMP
import pytz
PYTHON2 = sys.version_info[0] == 2
@ -578,12 +578,12 @@ def start():
# Send system analytics events
if not CONFIG.FIRST_RUN_COMPLETE:
analytics_event(category='system', action='install')
analytics_event(name='install')
elif _UPDATE:
analytics_event(category='system', action='update')
analytics_event(name='update')
analytics_event(category='system', action='start')
analytics_event(name='start')
_STARTED = True
@ -715,7 +715,8 @@ def dbcheck():
'art TEXT, media_type TEXT, year INTEGER, originally_available_at TEXT, added_at INTEGER, updated_at INTEGER, '
'last_viewed_at INTEGER, content_rating TEXT, summary TEXT, tagline TEXT, rating TEXT, '
'duration INTEGER DEFAULT 0, guid TEXT, directors TEXT, writers TEXT, actors TEXT, genres TEXT, studio TEXT, '
'labels TEXT, live INTEGER DEFAULT 0, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT)'
'labels TEXT, live INTEGER DEFAULT 0, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT, '
'marker_credits_first INTEGER DEFAULT NULL, marker_credits_final INTEGER DEFAULT NULL)'
)
# users table :: This table keeps record of the friends list
@ -1564,6 +1565,18 @@ def dbcheck():
'ALTER TABLE session_history_metadata ADD COLUMN channel_thumb TEXT'
)
# Upgrade session_history_metadata table from earlier versions
try:
c_db.execute('SELECT marker_credits_first FROM session_history_metadata')
except sqlite3.OperationalError:
logger.debug("Altering database. Updating database table session_history_metadata.")
c_db.execute(
'ALTER TABLE session_history_metadata ADD COLUMN marker_credits_first INTEGER DEFAULT NULL'
)
c_db.execute(
'ALTER TABLE session_history_metadata ADD COLUMN marker_credits_final INTEGER DEFAULT NULL'
)
# Upgrade session_history_media_info table from earlier versions
try:
c_db.execute('SELECT transcode_decision FROM session_history_media_info')
@ -2830,44 +2843,45 @@ def generate_uuid():
def initialize_tracker():
data = {
'dataSource': 'server',
'appName': common.PRODUCT,
'appVersion': common.RELEASE,
'appId': INSTALL_TYPE,
'appInstallerId': CONFIG.GIT_BRANCH,
'dimension1': '{} {}'.format(common.PLATFORM, common.PLATFORM_RELEASE), # App Platform
'dimension2': common.PLATFORM_LINUX_DISTRO, # Linux Distro
'dimension3': common.PYTHON_VERSION,
'userLanguage': SYS_LANGUAGE,
'documentEncoding': SYS_ENCODING,
'noninteractive': True
}
tracker = Tracker.create('UA-111522699-2', client_id=CONFIG.PMS_UUID, hash_client_id=True,
user_agent=common.USER_AGENT)
tracker.set(data)
tracker = GtagMP(
api_secret='Cl_LjAKUT26AS22YZwqaPw',
measurement_id='G-NH1M4BYM2P',
client_id=CONFIG.PMS_UUID
)
return tracker
def analytics_event(category, action, label=None, value=None, **kwargs):
data = {'category': category, 'action': action}
def analytics_event(name, **kwargs):
event = TRACKER.create_new_event(name=name)
event.set_event_param('name', common.PRODUCT)
event.set_event_param('version', common.RELEASE)
event.set_event_param('install', INSTALL_TYPE)
event.set_event_param('branch', CONFIG.GIT_BRANCH)
event.set_event_param('platform', common.PLATFORM)
event.set_event_param('platformRelease', common.PLATFORM_RELEASE)
event.set_event_param('platformVersion', common.PLATFORM_VERSION)
event.set_event_param('linuxDistro', common.PLATFORM_LINUX_DISTRO)
event.set_event_param('pythonVersion', common.PYTHON_VERSION)
event.set_event_param('language', SYS_LANGUAGE)
event.set_event_param('encoding', SYS_ENCODING)
event.set_event_param('timezone', str(SYS_TIMEZONE))
event.set_event_param('timezoneUTCOffset', f'UTC{SYS_UTC_OFFSET}')
if label is not None:
data['label'] = label
for key, value in kwargs.items():
event.set_event_param(key, value)
if value is not None:
data['value'] = value
plex_tv = plextv.PlexTV()
ip_address = plex_tv.get_public_ip(output_format='text')
geolocation = plex_tv.get_geoip_lookup(ip_address) or {}
if kwargs:
data.update(kwargs)
event.set_event_param('country', geolocation.get('country', 'Unknown'))
event.set_event_param('countryCode', geolocation.get('code', 'Unknown'))
if TRACKER:
try:
TRACKER.send('event', data)
TRACKER.send(events=[event])
except Exception as e:
logger.warn("Failed to send analytics event for category '%s', action '%s': %s" % (category, action, e))
logger.warn("Failed to send analytics event for name '%s': %s" % (name, e))
def check_folder_writable(folder, fallback, name):

View file

@ -110,11 +110,13 @@ class ActivityHandler(object):
self.set_session_state()
self.get_db_session()
def set_session_state(self):
self.ap.set_session_state(session_key=self.session_key,
state=self.state,
view_offset=self.view_offset,
stopped=helpers.timestamp())
def set_session_state(self, view_offset=None):
self.ap.set_session_state(
session_key=self.session_key,
state=self.state,
view_offset=view_offset or self.view_offset,
stopped=helpers.timestamp()
)
def put_notification(self, notify_action, **kwargs):
notification = {'stream_data': self.db_session.copy(), 'notify_action': notify_action}
@ -246,26 +248,34 @@ class ActivityHandler(object):
self.put_notification('on_change')
def on_intro(self, marker):
if self.get_live_session():
logger.debug("Tautulli ActivityHandler :: Session %s reached intro marker." % str(self.session_key))
logger.debug("Tautulli ActivityHandler :: Session %s reached intro marker." % str(self.session_key))
self.put_notification('on_intro', marker=marker)
self.set_session_state(view_offset=marker['start_time_offset'])
self.put_notification('on_intro', marker=marker)
def on_commercial(self, marker):
if self.get_live_session():
logger.debug("Tautulli ActivityHandler :: Session %s reached commercial marker." % str(self.session_key))
logger.debug("Tautulli ActivityHandler :: Session %s reached commercial marker." % str(self.session_key))
self.put_notification('on_commercial', marker=marker)
self.set_session_state(view_offset=marker['start_time_offset'])
self.put_notification('on_commercial', marker=marker)
def on_credits(self, marker):
if self.get_live_session():
logger.debug("Tautulli ActivityHandler :: Session %s reached credits marker." % str(self.session_key))
logger.debug("Tautulli ActivityHandler :: Session %s reached credits marker." % str(self.session_key))
self.put_notification('on_credits', marker=marker)
self.set_session_state(view_offset=marker['start_time_offset'])
def on_watched(self):
self.put_notification('on_credits', marker=marker)
def on_watched(self, marker=None):
logger.debug("Tautulli ActivityHandler :: Session %s watched." % str(self.session_key))
if marker:
self.set_session_state(view_offset=marker['start_time_offset'])
else:
self.update_db_session()
watched_notifiers = notification_handler.get_notify_state_enabled(
session=self.db_session, notify_action='on_watched', notified=False)
@ -368,38 +378,58 @@ class ActivityHandler(object):
if self.db_session['marker'] != marker_idx:
self.ap.set_marker(session_key=self.session_key, marker_idx=marker_idx, marker_type=marker['type'])
callback_func = getattr(self, 'on_{}'.format(marker['type']))
if self.view_offset < marker['start_time_offset']:
# Schedule a callback for the exact offset of the marker
schedule_callback(
'session_key-{}-marker-{}'.format(self.session_key, marker_idx),
func=callback_func,
func=self._marker_callback,
args=[marker],
milliseconds=marker['start_time_offset'] - self.view_offset
)
else:
callback_func(marker)
self._marker_callback(marker)
break
if not marker_flag:
self.ap.set_marker(session_key=self.session_key, marker_idx=0)
def check_watched(self):
# Monitor if the stream has reached the watch percentage for notifications
if not self.db_session['watched'] and self.timeline['state'] != 'buffering':
progress_percent = helpers.get_percent(self.timeline['viewOffset'], self.db_session['duration'])
watched_percent = {
'movie': plexpy.CONFIG.MOVIE_WATCHED_PERCENT,
'episode': plexpy.CONFIG.TV_WATCHED_PERCENT,
'track': plexpy.CONFIG.MUSIC_WATCHED_PERCENT,
'clip': plexpy.CONFIG.TV_WATCHED_PERCENT
}
def _marker_callback(self, marker):
if self.get_live_session():
# Reset ActivityProcessor object for new database thread
self.ap = activity_processor.ActivityProcessor()
if progress_percent >= watched_percent.get(self.db_session['media_type'], 101):
self.ap.set_watched(session_key=self.session_key)
self.on_watched()
if marker['type'] == 'intro':
self.on_intro(marker)
elif marker['type'] == 'commercial':
self.on_commercial(marker)
elif marker['type'] == 'credits':
self.on_credits(marker)
if not self.db_session['watched']:
if marker['final'] and plexpy.CONFIG.WATCHED_MARKER == 1:
self._marker_watched(marker)
elif marker['first'] and (plexpy.CONFIG.WATCHED_MARKER in (2, 3)):
self._marker_watched(marker)
def _marker_watched(self, marker):
if not self.db_session['watched']:
self._watched_callback(marker)
def check_watched(self):
if plexpy.CONFIG.WATCHED_MARKER == 1 or plexpy.CONFIG.WATCHED_MARKER == 2:
return
# Monitor if the stream has reached the watch percentage for notifications
if not self.db_session['watched'] and self.state != 'buffering' and helpers.check_watched(
self.db_session['media_type'], self.view_offset, self.db_session['duration']
):
self._watched_callback()
def _watched_callback(self, marker=None):
self.ap.set_watched(session_key=self.session_key)
self.on_watched(marker)
class TimelineHandler(object):

View file

@ -327,7 +327,7 @@ class ActivityProcessor(object):
# Get the last insert row id
last_id = self.db.last_insert_id()
new_session = prev_session = None
prev_progress_percent = media_watched_percent = 0
watched = False
if session['live']:
# Check if we should group the session, select the last guid from the user
@ -369,12 +369,11 @@ class ActivityProcessor(object):
'view_offset': result[1]['view_offset'],
'reference_id': result[1]['reference_id']}
watched_percent = {'movie': plexpy.CONFIG.MOVIE_WATCHED_PERCENT,
'episode': plexpy.CONFIG.TV_WATCHED_PERCENT,
'track': plexpy.CONFIG.MUSIC_WATCHED_PERCENT
}
prev_progress_percent = helpers.get_percent(prev_session['view_offset'], session['duration'])
media_watched_percent = watched_percent.get(session['media_type'], 0)
marker_first, marker_final = helpers.get_first_final_marker(metadata['markers'])
watched = helpers.check_watched(
session['media_type'], session['view_offset'], session['duration'],
marker_first, marker_final
)
query = 'UPDATE session_history SET reference_id = ? WHERE id = ? '
@ -384,8 +383,7 @@ class ActivityProcessor(object):
# else set the reference_id to the new id
if prev_session is None and new_session is None:
args = [last_id, last_id]
elif prev_progress_percent < media_watched_percent and \
prev_session['view_offset'] <= new_session['view_offset'] or \
elif watched and prev_session['view_offset'] <= new_session['view_offset'] or \
session['live'] and prev_session['guid'] == new_session['guid']:
args = [prev_session['reference_id'], new_session['id']]
else:
@ -490,6 +488,14 @@ class ActivityProcessor(object):
genres = ";".join(metadata['genres'])
labels = ";".join(metadata['labels'])
marker_credits_first = None
marker_credits_final = None
for marker in metadata['markers']:
if marker['first']:
marker_credits_first = marker['start_time_offset']
if marker['final']:
marker_credits_final = marker['start_time_offset']
# logger.debug("Tautulli ActivityProcessor :: Attempting to write to sessionKey %s session_history_metadata table..."
# % session['session_key'])
keys = {'id': last_id}
@ -528,7 +534,9 @@ class ActivityProcessor(object):
'live': session['live'],
'channel_call_sign': media_info.get('channel_call_sign', ''),
'channel_identifier': media_info.get('channel_identifier', ''),
'channel_thumb': media_info.get('channel_thumb', '')
'channel_thumb': media_info.get('channel_thumb', ''),
'marker_credits_first': marker_credits_first,
'marker_credits_final': marker_credits_final
}
# logger.debug("Tautulli ActivityProcessor :: Writing sessionKey %s session_history_metadata transaction..."

View file

@ -199,6 +199,7 @@ _CONFIG_DEFINITIONS = {
'UPGRADE_FLAG': (int, 'Advanced', 0),
'VERBOSE_LOGS': (int, 'Advanced', 1),
'VERIFY_SSL_CERT': (bool_int, 'Advanced', 1),
'WATCHED_MARKER': (int, 'Monitoring', 3),
'WEBSOCKET_MONITOR_PING_PONG': (int, 'Advanced', 0),
'WEBSOCKET_CONNECTION_ATTEMPTS': (int, 'Advanced', 5),
'WEBSOCKET_CONNECTION_TIMEOUT': (int, 'Advanced', 5),
@ -298,7 +299,8 @@ SETTINGS = [
'REFRESH_USERS_INTERVAL',
'SHOW_ADVANCED_SETTINGS',
'TIME_FORMAT',
'TV_WATCHED_PERCENT'
'TV_WATCHED_PERCENT',
'WATCHED_MARKER'
]
CHECKED_SETTINGS = [

View file

@ -1,4 +1,4 @@
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
# This file is part of Tautulli.
#
@ -99,8 +99,9 @@ class DataFactory(object):
'MIN(started) AS started',
'MAX(stopped) AS stopped',
'SUM(CASE WHEN stopped > 0 THEN (stopped - started) ELSE 0 END) - \
SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS duration',
SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS play_duration',
'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS paused_counter',
'session_history.view_offset',
'session_history.user_id',
'session_history.user',
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \
@ -139,6 +140,9 @@ class DataFactory(object):
'MAX((CASE WHEN (view_offset IS NULL OR view_offset = "") THEN 0.1 ELSE view_offset * 1.0 END) / \
(CASE WHEN (session_history_metadata.duration IS NULL OR session_history_metadata.duration = "") \
THEN 1.0 ELSE session_history_metadata.duration * 1.0 END) * 100) AS percent_complete',
'session_history_metadata.duration',
'session_history_metadata.marker_credits_first',
'session_history_metadata.marker_credits_final',
'session_history_media_info.transcode_decision',
'COUNT(*) AS group_count',
'GROUP_CONCAT(session_history.id) AS group_ids',
@ -159,8 +163,9 @@ class DataFactory(object):
'started',
'stopped',
'SUM(CASE WHEN stopped > 0 THEN (stopped - started) ELSE (strftime("%s", "now") - started) END) - \
SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS duration',
SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS play_duration',
'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS paused_counter',
'view_offset',
'user_id',
'user',
'(CASE WHEN friendly_name IS NULL OR TRIM(friendly_name) = "" \
@ -198,6 +203,9 @@ class DataFactory(object):
'MAX((CASE WHEN (view_offset IS NULL OR view_offset = "") THEN 0.1 ELSE view_offset * 1.0 END) / \
(CASE WHEN (duration IS NULL OR duration = "") \
THEN 1.0 ELSE duration * 1.0 END) * 100) AS percent_complete',
'duration',
'NULL AS marker_credits_first',
'NULL AS marker_credits_final',
'transcode_decision',
'NULL AS group_count',
'NULL AS group_ids',
@ -262,7 +270,7 @@ class DataFactory(object):
item['user_thumb'] = users_lookup.get(item['user_id'])
filter_duration += int(item['duration'])
filter_duration += int(item['play_duration'])
if item['media_type'] == 'episode' and item['parent_thumb']:
thumb = item['parent_thumb']
@ -274,7 +282,10 @@ class DataFactory(object):
if item['live']:
item['percent_complete'] = 100
if item['percent_complete'] >= watched_percent[item['media_type']]:
if helpers.check_watched(
item['media_type'], item['view_offset'], item['duration'],
item['marker_credits_first'], item['marker_credits_final']
):
watched_status = 1
elif item['percent_complete'] >= watched_percent[item['media_type']] / 2.0:
watched_status = 0.5
@ -297,7 +308,7 @@ class DataFactory(object):
'date': item['date'],
'started': item['started'],
'stopped': item['stopped'],
'duration': item['duration'],
'duration': item['play_duration'],
'paused_counter': item['paused_counter'],
'user_id': item['user_id'],
'user': item['user'],
@ -371,10 +382,6 @@ class DataFactory(object):
if user_id:
where_id += 'AND session_history.user_id = %s ' % user_id
movie_watched_percent = plexpy.CONFIG.MOVIE_WATCHED_PERCENT
tv_watched_percent = plexpy.CONFIG.TV_WATCHED_PERCENT
music_watched_percent = plexpy.CONFIG.MUSIC_WATCHED_PERCENT
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
sort_type = 'total_duration' if stats_type == 'duration' else 'total_plays'
@ -908,6 +915,43 @@ class DataFactory(object):
'rows': session.mask_session_info(top_platform, mask_metadata=False)})
elif stat == 'last_watched':
movie_watched_percent = plexpy.CONFIG.MOVIE_WATCHED_PERCENT
tv_watched_percent = plexpy.CONFIG.TV_WATCHED_PERCENT
if plexpy.CONFIG.WATCHED_MARKER == 1:
watched_threshold = (
'(CASE WHEN shm.marker_credits_final IS NULL '
'THEN sh._duration * (CASE WHEN sh.media_type = "movie" THEN %d ELSE %d END) / 100.0 '
'ELSE shm.marker_credits_final END) '
'AS watched_threshold'
) % (movie_watched_percent, tv_watched_percent)
watched_where = '_view_offset >= watched_threshold'
elif plexpy.CONFIG.WATCHED_MARKER == 2:
watched_threshold = (
'(CASE WHEN shm.marker_credits_first IS NULL '
'THEN sh._duration * (CASE WHEN sh.media_type = "movie" THEN %d ELSE %d END) / 100.0 '
'ELSE shm.marker_credits_first END) '
'AS watched_threshold'
) % (movie_watched_percent, tv_watched_percent)
watched_where = '_view_offset >= watched_threshold'
elif plexpy.CONFIG.WATCHED_MARKER == 3:
watched_threshold = (
'MIN('
'(CASE WHEN shm.marker_credits_first IS NULL '
'THEN sh._duration * (CASE WHEN sh.media_type = "movie" THEN %d ELSE %d END) / 100.0 '
'ELSE shm.marker_credits_first END), '
'sh._duration * (CASE WHEN sh.media_type = "movie" THEN %d ELSE %d END) / 100.0) '
'AS watched_threshold'
) % (movie_watched_percent, tv_watched_percent, movie_watched_percent, tv_watched_percent)
watched_where = '_view_offset >= watched_threshold'
else:
watched_threshold = 'NULL AS watched_threshold'
watched_where = (
'sh.media_type == "movie" AND percent_complete >= %d '
'OR sh.media_type == "episode" AND percent_complete >= %d'
) % (movie_watched_percent, tv_watched_percent)
last_watched = []
try:
query = 'SELECT sh.id, shm.title, shm.grandparent_title, shm.full_title, shm.year, ' \
@ -918,22 +962,25 @@ class DataFactory(object):
'(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = ""' \
' THEN u.username ELSE u.friendly_name END) ' \
' AS friendly_name, ' \
'MAX(sh.started) AS last_watch, ' \
'((CASE WHEN sh.view_offset IS NULL THEN 0.1 ELSE sh.view_offset * 1.0 END) / ' \
' (CASE WHEN shm.duration IS NULL THEN 1.0 ELSE shm.duration * 1.0 END) * 100) ' \
' AS percent_complete ' \
'FROM (SELECT *, MAX(id) FROM session_history ' \
'MAX(sh.started) AS last_watch, sh._view_offset, sh._duration, ' \
'(sh._view_offset / sh._duration * 100) AS percent_complete, ' \
'%s ' \
'FROM (SELECT *, MAX(session_history.id), ' \
' (CASE WHEN view_offset IS NULL THEN 0.1 ELSE view_offset * 1.0 END) AS _view_offset, ' \
' (CASE WHEN duration IS NULL THEN 1.0 ELSE duration * 1.0 END) AS _duration ' \
' FROM session_history ' \
' JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \
' WHERE session_history.stopped >= %s ' \
' AND (session_history.media_type = "movie" ' \
' OR session_history.media_type = "episode") %s ' \
' GROUP BY %s) AS sh ' \
'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \
'LEFT OUTER JOIN users AS u ON sh.user_id = u.user_id ' \
'WHERE sh.media_type == "movie" AND percent_complete >= %s ' \
' OR sh.media_type == "episode" AND percent_complete >= %s ' \
'WHERE %s ' \
'GROUP BY sh.id ' \
'ORDER BY last_watch DESC ' \
'LIMIT %s OFFSET %s' % (timestamp, where_id, group_by, movie_watched_percent, tv_watched_percent,
'LIMIT %s OFFSET %s' % (watched_threshold,
timestamp, where_id, group_by, watched_where,
stats_count, stats_start)
result = monitor_db.select(query)
except Exception as e:
@ -1141,7 +1188,7 @@ class DataFactory(object):
return library_stats
def get_watch_time_stats(self, rating_key=None, grouping=None, query_days=None):
def get_watch_time_stats(self, rating_key=None, media_type=None, grouping=None, query_days=None):
if rating_key is None:
return []
@ -1163,6 +1210,15 @@ class DataFactory(object):
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
if media_type in ('collection', 'playlist'):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_item_children(rating_key=rating_key, media_type=media_type)
rating_keys = [child['rating_key'] for child in result['children_list']]
else:
rating_keys = [rating_key]
rating_keys_arg = ','.join(['?'] * len(rating_keys))
for days in query_days:
timestamp_query = timestamp - days * 24 * 60 * 60
@ -1174,11 +1230,14 @@ class DataFactory(object):
'COUNT(DISTINCT %s) AS total_plays, section_id ' \
'FROM session_history ' \
'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \
'WHERE stopped >= %s ' \
'AND (session_history.grandparent_rating_key = ? ' \
'OR session_history.parent_rating_key = ? ' \
'OR session_history.rating_key = ?)' % (group_by, timestamp_query)
result = monitor_db.select(query, args=[rating_key, rating_key, rating_key])
'WHERE stopped >= ? ' \
'AND (session_history.grandparent_rating_key IN (%s) ' \
'OR session_history.parent_rating_key IN (%s) ' \
'OR session_history.rating_key IN (%s))' % (
group_by, rating_keys_arg, rating_keys_arg, rating_keys_arg
)
result = monitor_db.select(query, args=[timestamp_query] + rating_keys * 3)
else:
result = []
else:
@ -1188,10 +1247,13 @@ class DataFactory(object):
'COUNT(DISTINCT %s) AS total_plays, section_id ' \
'FROM session_history ' \
'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \
'WHERE (session_history.grandparent_rating_key = ? ' \
'OR session_history.parent_rating_key = ? ' \
'OR session_history.rating_key = ?)' % group_by
result = monitor_db.select(query, args=[rating_key, rating_key, rating_key])
'WHERE (session_history.grandparent_rating_key IN (%s) ' \
'OR session_history.parent_rating_key IN (%s) ' \
'OR session_history.rating_key IN (%s))' % (
group_by, rating_keys_arg, rating_keys_arg, rating_keys_arg
)
result = monitor_db.select(query, args=rating_keys * 3)
else:
result = []
except Exception as e:
@ -1220,7 +1282,7 @@ class DataFactory(object):
return item_watch_time_stats
def get_user_stats(self, rating_key=None, grouping=None):
def get_user_stats(self, rating_key=None, media_type=None, grouping=None):
if grouping is None:
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
@ -1232,6 +1294,15 @@ class DataFactory(object):
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
if media_type in ('collection', 'playlist'):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_item_children(rating_key=rating_key, media_type=media_type)
rating_keys = [child['rating_key'] for child in result['children_list']]
else:
rating_keys = [rating_key]
rating_keys_arg = ','.join(['?'] * len(rating_keys))
try:
if str(rating_key).isdigit():
query = 'SELECT (CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" ' \
@ -1243,12 +1314,15 @@ class DataFactory(object):
'FROM session_history ' \
'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \
'JOIN users ON users.user_id = session_history.user_id ' \
'WHERE (session_history.grandparent_rating_key = ? ' \
'OR session_history.parent_rating_key = ? ' \
'OR session_history.rating_key = ?) ' \
'WHERE (session_history.grandparent_rating_key IN (%s) ' \
'OR session_history.parent_rating_key IN (%s) ' \
'OR session_history.rating_key IN (%s)) ' \
'GROUP BY users.user_id ' \
'ORDER BY total_plays DESC, total_time DESC' % group_by
result = monitor_db.select(query, args=[rating_key, rating_key, rating_key])
'ORDER BY total_plays DESC, total_time DESC' % (
group_by, rating_keys_arg, rating_keys_arg, rating_keys_arg
)
result = monitor_db.select(query, args=rating_keys * 3)
else:
result = []
except Exception as e:

View file

@ -1733,3 +1733,43 @@ def short_season(title):
if title.startswith('Season ') and title[7:].isdigit():
return 'S%s' % title[7:]
return title
def get_first_final_marker(markers):
first = None
final = None
for marker in markers:
if marker['first']:
first = marker
if marker['final']:
final = marker
return first, final
def check_watched(media_type, view_offset, duration, marker_credits_first=None, marker_credits_final=None):
if isinstance(marker_credits_first, dict):
marker_credits_first = marker_credits_first['start_time_offset']
if isinstance(marker_credits_final, dict):
marker_credits_final = marker_credits_final['start_time_offset']
view_offset = cast_to_int(view_offset)
duration = cast_to_int(duration)
watched_percent = {
'movie': plexpy.CONFIG.MOVIE_WATCHED_PERCENT,
'episode': plexpy.CONFIG.TV_WATCHED_PERCENT,
'track': plexpy.CONFIG.MUSIC_WATCHED_PERCENT,
'clip': plexpy.CONFIG.TV_WATCHED_PERCENT
}
threshold = watched_percent.get(media_type, 0) / 100 * duration
if not threshold:
return False
if plexpy.CONFIG.WATCHED_MARKER == 1 and marker_credits_final:
return view_offset >= marker_credits_final
elif plexpy.CONFIG.WATCHED_MARKER == 2 and marker_credits_first:
return view_offset >= marker_credits_first
elif plexpy.CONFIG.WATCHED_MARKER == 3 and marker_credits_first:
return view_offset >= min(threshold, marker_credits_first)
else:
return view_offset >= threshold

View file

@ -971,7 +971,8 @@ class RecentlyAdded(Newsletter):
'description': 'Select the libraries to include in the newsletter.',
'name': 'newsletter_config_incl_libraries',
'input_type': 'selectize',
'select_options': self._get_sections_options()
'select_options': self._get_sections_options(),
'select_all': True
}
]

View file

@ -1435,21 +1435,24 @@ class EMAIL(Notifier):
'name': 'email_to',
'description': 'The email address(es) of the recipients.',
'input_type': 'selectize',
'select_options': user_emails_to
'select_options': user_emails_to,
'select_all': True
},
{'label': 'CC',
'value': self.config['cc'],
'name': 'email_cc',
'description': 'The email address(es) to CC.',
'input_type': 'selectize',
'select_options': user_emails_cc
'select_options': user_emails_cc,
'select_all': True
},
{'label': 'BCC',
'value': self.config['bcc'],
'name': 'email_bcc',
'description': 'The email address(es) to BCC.',
'input_type': 'selectize',
'select_options': user_emails_bcc
'select_options': user_emails_bcc,
'select_all': True
},
{'label': 'SMTP Server',
'value': self.config['smtp_server'],
@ -3216,31 +3219,34 @@ class PUSHOVER(Notifier):
return self.make_request('https://api.pushover.net/1/messages.json', headers=headers, data=data, files=files)
def get_sounds(self):
sounds = {
'': '',
'alien': 'Alien Alarm (long)',
'bike': 'Bike',
'bugle': 'Bugle',
'cashregister': 'Cash Register',
'classical': 'Classical',
'climb': 'Climb (long)',
'cosmic': 'Cosmic',
'echo': 'Pushover Echo (long)',
'falling': 'Falling',
'gamelan': 'Gamelan',
'incoming': 'Incoming',
'intermission': 'Intermission',
'magic': 'Magic',
'mechanical': 'Mechanical',
'none': 'None (silent)',
'persistent': 'Persistent (long)',
'pianobar': 'Piano Bar',
'pushover': 'Pushover (default)',
'siren': 'Siren',
'spacealarm': 'Space Alarm',
'tugboat': 'Tug Boat',
'updown': 'Up Down (long)'
}
sounds = [
{'value': '', 'text': ''},
{'value': 'alien', 'text': 'Alien Alarm (long)'},
{'value': 'bike', 'text': 'Bike'},
{'value': 'bugle', 'text': 'Bugle'},
{'value': 'cashregister', 'text': 'Cash Register'},
{'value': 'classical', 'text': 'Classical'},
{'value': 'climb', 'text': 'Climb (long)'},
{'value': 'cosmic', 'text': 'Cosmic'},
{'value': 'echo', 'text': 'Pushover Echo (long)'},
{'value': 'falling', 'text': 'Falling'},
{'value': 'gamelan', 'text': 'Gamelan'},
{'value': 'incoming', 'text': 'Incoming'},
{'value': 'intermission', 'text': 'Intermission'},
{'value': 'magic', 'text': 'Magic'},
{'value': 'mechanical', 'text': 'Mechanical'},
{'value': 'none', 'text': 'None (silent)'},
{'value': 'persistent', 'text': 'Persistent (long)'},
{'value': 'pianobar', 'text': 'Piano Bar'},
{'value': 'pushover', 'text': 'Pushover (default)'},
{'value': 'siren', 'text': 'Siren'},
{'value': 'spacealarm', 'text': 'Space Alarm'},
{'value': 'tugboat', 'text': 'Tug Boat'},
{'value': 'updown', 'text': 'Up Down (long)'},
{'value': 'vibrate', 'text': 'Vibrate Only'},
]
if self.config['sound'] not in [s['value'] for s in sounds]:
sounds.append({'value': self.config['sound'], 'text': self.config['sound']})
return sounds
@ -3281,9 +3287,10 @@ class PUSHOVER(Notifier):
{'label': 'Sound',
'value': self.config['sound'],
'name': 'pushover_sound',
'description': 'Set the notification sound. Leave blank for the default sound.',
'input_type': 'select',
'select_options': self.get_sounds()
'description': 'Select a notification sound or enter a custom sound name. Leave blank for the default sound.',
'input_type': 'selectize',
'select_options': self.get_sounds(),
'select_all': False
},
{'label': 'Priority',
'value': self.config['priority'],

View file

@ -331,6 +331,14 @@ class PlexTV(object):
return request
def get_public_ip(self, output_format=''):
uri = '/:/ip'
request = self.request_handler.make_request(uri=uri,
request_type='GET',
output_format=output_format)
return request
def get_plextv_geoip(self, ip_address='', output_format=''):
uri = '/api/v2/geoip?ip_address=%s' % ip_address
request = self.request_handler.make_request(uri=uri,

View file

@ -2545,7 +2545,7 @@ class PmsConnect(object):
children_list.append(children_output)
output = {'children_count': helpers.cast_to_int(helpers.get_xml_attr(xml_head[0], 'size')),
'children_type': helpers.get_xml_attr(xml_head[0], 'viewGroup'),
'children_type': helpers.get_xml_attr(xml_head[0], 'viewGroup') or (children_list[0]['media_type'] if children_list else ''),
'title': helpers.get_xml_attr(xml_head[0], 'title2'),
'children_list': children_list
}

View file

@ -4431,10 +4431,10 @@ class WebInterface(object):
@cherrypy.expose
@requireAuth()
def item_watch_time_stats(self, rating_key=None, **kwargs):
def item_watch_time_stats(self, rating_key=None, media_type=None, **kwargs):
if rating_key:
item_data = datafactory.DataFactory()
result = item_data.get_watch_time_stats(rating_key=rating_key)
result = item_data.get_watch_time_stats(rating_key=rating_key, media_type=media_type)
else:
result = None
@ -4446,10 +4446,10 @@ class WebInterface(object):
@cherrypy.expose
@requireAuth()
def item_user_stats(self, rating_key=None, **kwargs):
def item_user_stats(self, rating_key=None, media_type=None, **kwargs):
if rating_key:
item_data = datafactory.DataFactory()
result = item_data.get_user_stats(rating_key=rating_key)
result = item_data.get_user_stats(rating_key=rating_key, media_type=media_type)
else:
result = None
@ -4463,7 +4463,7 @@ class WebInterface(object):
@cherrypy.tools.json_out()
@requireAuth(member_of("admin"))
@addtoapi()
def get_item_watch_time_stats(self, rating_key=None, grouping=None, query_days=None, **kwargs):
def get_item_watch_time_stats(self, rating_key=None, media_type=None, grouping=None, query_days=None, **kwargs):
""" Get the watch time stats for the media item.
```
@ -4471,6 +4471,7 @@ class WebInterface(object):
rating_key (str): Rating key of the item
Optional parameters:
media_type (str): Media type of the item (only required for a collection)
grouping (int): 0 or 1
query_days (str): Comma separated days, e.g. "1,7,30,0"
@ -4504,7 +4505,9 @@ class WebInterface(object):
if rating_key:
item_data = datafactory.DataFactory()
result = item_data.get_watch_time_stats(rating_key=rating_key, grouping=grouping,
result = item_data.get_watch_time_stats(rating_key=rating_key,
media_type=media_type,
grouping=grouping,
query_days=query_days)
if result:
return result
@ -4518,7 +4521,7 @@ class WebInterface(object):
@cherrypy.tools.json_out()
@requireAuth(member_of("admin"))
@addtoapi()
def get_item_user_stats(self, rating_key=None, grouping=None, **kwargs):
def get_item_user_stats(self, rating_key=None, media_type=None, grouping=None, **kwargs):
""" Get the user stats for the media item.
```
@ -4526,6 +4529,7 @@ class WebInterface(object):
rating_key (str): Rating key of the item
Optional parameters:
media_type (str): Media type of the item (only required for a collection)
grouping (int): 0 or 1
Returns:
@ -4554,7 +4558,9 @@ class WebInterface(object):
if rating_key:
item_data = datafactory.DataFactory()
result = item_data.get_user_stats(rating_key=rating_key, grouping=grouping)
result = item_data.get_user_stats(rating_key=rating_key,
media_type=media_type,
grouping=grouping)
if result:
return result
else:

View file

@ -14,11 +14,12 @@ distro==1.8.0
dnspython==2.2.1
facebook-sdk==3.1.0
future==0.18.2
ga4mp==2.0.4
gntp==1.0.3
html5lib==1.1
httpagentparser==1.9.5
idna==3.4
importlib-metadata==5.2.0
importlib-metadata==6.0.0
importlib-resources==5.12.0
git+https://github.com/Tautulli/ipwhois.git@master#egg=ipwhois
IPy==1.01

View file

@ -15,6 +15,10 @@ architectures:
- build-on: arm64
- build-on: armhf
plugs:
shared-memory:
private: true
parts:
tautulli:
plugin: dump