mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-06 05:01:14 -07:00
Concurrent Streams per Day Graph (#2046)
* initial commit * fix grouping in webserve * remove event handler and adapt cursor * optimize most concurrent calculation * update branch from nightly and user filter * max concurrent streams in graph * made several changes mentioned in review
This commit is contained in:
parent
4938954c61
commit
59fe34982e
5 changed files with 265 additions and 5 deletions
|
@ -137,6 +137,20 @@
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<h4><i class="fa fa-video-camera"></i> Daily concurrent stream count</span> by stream type <small>Last <span class="days">30</span> days</small></h4>
|
||||
<p class="help-block">
|
||||
The total count of concurrent streams of tv, movies, and music by the transcode decision.
|
||||
</p>
|
||||
<div class="graphs-instance">
|
||||
<div class="watch-chart" id="graph_concurrent_streams_by_stream_type">
|
||||
<div class="graphs-load"><i class="fa fa-refresh fa-spin"></i> Loading chart...</div>
|
||||
<br>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<h4><i class="fa fa-expand-arrows-alt"></i> <span class="yaxis-text">Play count</span> by source resolution <small>Last <span class="days">30</span> days</small></h4>
|
||||
|
@ -312,7 +326,8 @@
|
|||
'Live TV': '#19A0D7',
|
||||
'Direct Play': '#E5A00D',
|
||||
'Direct Stream': '#FFFFFF',
|
||||
'Transcode': '#F06464'
|
||||
'Transcode': '#F06464',
|
||||
'Max. Concurrent Streams': '#1014FC'
|
||||
};
|
||||
var series_colors = [];
|
||||
$.each(data_series, function(index, series) {
|
||||
|
@ -327,6 +342,7 @@
|
|||
<script src="${http_root}js/graphs/plays_by_platform.js${cache_param}"></script>
|
||||
<script src="${http_root}js/graphs/plays_by_user.js${cache_param}"></script>
|
||||
<script src="${http_root}js/graphs/plays_by_stream_type.js${cache_param}"></script>
|
||||
<script src="${http_root}js/graphs/concurrent_streams_by_stream_type.js${cache_param}"></script>
|
||||
<script src="${http_root}js/graphs/plays_by_source_resolution.js${cache_param}"></script>
|
||||
<script src="${http_root}js/graphs/plays_by_stream_resolution.js${cache_param}"></script>
|
||||
<script src="${http_root}js/graphs/plays_by_platform_by_stream_type.js${cache_param}"></script>
|
||||
|
@ -540,6 +556,33 @@
|
|||
}
|
||||
});
|
||||
|
||||
$.ajax({
|
||||
url: "get_concurrent_streams_by_stream_type",
|
||||
type: 'get',
|
||||
data: { time_range: time_range, user_id: selected_user_id },
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
var dateArray = [];
|
||||
$.each(data.categories, function (i, day) {
|
||||
dateArray.push(moment(day, 'YYYY-MM-DD').valueOf());
|
||||
// Highlight the weekend
|
||||
if ((moment(day, 'YYYY-MM-DD').format('ddd') == 'Sat') ||
|
||||
(moment(day, 'YYYY-MM-DD').format('ddd') == 'Sun')) {
|
||||
hc_plays_by_day_options.xAxis.plotBands.push({
|
||||
from: i-0.5,
|
||||
to: i+0.5,
|
||||
color: 'rgba(80,80,80,0.3)'
|
||||
});
|
||||
}
|
||||
});
|
||||
hc_concurrent_streams_by_stream_type_options.yAxis.min = 0;
|
||||
hc_concurrent_streams_by_stream_type_options.xAxis.categories = dateArray;
|
||||
hc_concurrent_streams_by_stream_type_options.series = getGraphVisibility(hc_concurrent_streams_by_stream_type_options.chart.renderTo, data.series);
|
||||
hc_concurrent_streams_by_stream_type_options.colors = getGraphColors(data.series);
|
||||
var hc_plays_by_stream_type = new Highcharts.Chart(hc_concurrent_streams_by_stream_type_options);
|
||||
}
|
||||
});
|
||||
|
||||
$.ajax({
|
||||
url: "get_plays_by_source_resolution",
|
||||
type: 'get',
|
||||
|
@ -754,6 +797,7 @@
|
|||
|
||||
hc_plays_by_day_options.xAxis.plotBands = [];
|
||||
hc_plays_by_stream_type_options.xAxis.plotBands = [];
|
||||
hc_concurrent_streams_by_stream_type_options.xAxis.plotBands = [];
|
||||
|
||||
hc_plays_by_day_options.yAxis.labels.formatter = yaxis_format;
|
||||
hc_plays_by_dayofweek_options.yAxis.labels.formatter = yaxis_format;
|
||||
|
|
|
@ -0,0 +1,76 @@
|
|||
var formatter_function = function() {
|
||||
if (moment(this.x, 'X').isValid() && (this.x > 946684800)) {
|
||||
var s = '<b>'+ moment(this.x).format('ddd MMM D') +'</b>';
|
||||
} else {
|
||||
var s = '<b>'+ this.x +'</b>';
|
||||
}
|
||||
$.each(this.points, function(i, point) {
|
||||
s += '<br/>'+point.series.name+': '+point.y;
|
||||
});
|
||||
return s;
|
||||
};
|
||||
|
||||
var hc_concurrent_streams_by_stream_type_options = {
|
||||
chart: {
|
||||
type: 'line',
|
||||
backgroundColor: 'rgba(0,0,0,0)',
|
||||
renderTo: 'graph_concurrent_streams_by_stream_type'
|
||||
},
|
||||
title: {
|
||||
text: ''
|
||||
},
|
||||
legend: {
|
||||
enabled: true,
|
||||
itemStyle: {
|
||||
font: '9pt "Open Sans", sans-serif',
|
||||
color: '#A0A0A0'
|
||||
},
|
||||
itemHoverStyle: {
|
||||
color: '#FFF'
|
||||
},
|
||||
itemHiddenStyle: {
|
||||
color: '#444'
|
||||
}
|
||||
},
|
||||
credits: {
|
||||
enabled: false
|
||||
},
|
||||
plotOptions: {
|
||||
series: {
|
||||
events: {
|
||||
legendItemClick: function() {
|
||||
setGraphVisibility(this.chart.renderTo.id, this.chart.series, this.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
xAxis: {
|
||||
type: 'datetime',
|
||||
labels: {
|
||||
formatter: function() {
|
||||
return moment(this.value).format("MMM D");
|
||||
},
|
||||
style: {
|
||||
color: '#aaa'
|
||||
}
|
||||
},
|
||||
categories: [{}],
|
||||
plotBands: []
|
||||
},
|
||||
yAxis: {
|
||||
title: {
|
||||
text: null
|
||||
},
|
||||
labels: {
|
||||
style: {
|
||||
color: '#aaa'
|
||||
}
|
||||
}
|
||||
},
|
||||
tooltip: {
|
||||
shared: true,
|
||||
crosshairs: true,
|
||||
formatter: formatter_function
|
||||
},
|
||||
series: [{}]
|
||||
};
|
104
plexpy/graphs.py
104
plexpy/graphs.py
|
@ -22,7 +22,6 @@ from future.builtins import object
|
|||
|
||||
import arrow
|
||||
import datetime
|
||||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import common
|
||||
|
@ -826,6 +825,102 @@ class Graphs(object):
|
|||
'series': [series_1_output, series_2_output, series_3_output]}
|
||||
return output
|
||||
|
||||
def get_total_concurrent_streams_per_stream_type(self, time_range='30', user_id=None):
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
time_range = helpers.cast_to_int(time_range) or 30
|
||||
timestamp = helpers.timestamp() - time_range * 24 * 60 * 60
|
||||
|
||||
user_cond = self._make_user_cond(user_id, 'WHERE')
|
||||
|
||||
def calc_most_concurrent(result):
|
||||
times = []
|
||||
for item in result:
|
||||
times.append({'time': str(item['started']) + 'B', 'count': 1})
|
||||
times.append({'time': str(item['stopped']) + 'A', 'count': -1})
|
||||
times = sorted(times, key=lambda k: k['time'])
|
||||
|
||||
count = 0
|
||||
final_count = 0
|
||||
last_count = 0
|
||||
|
||||
for d in times:
|
||||
if d['count'] == 1:
|
||||
count += d['count']
|
||||
else:
|
||||
if count >= last_count:
|
||||
last_count = count
|
||||
final_count = count
|
||||
count += d['count']
|
||||
|
||||
return final_count
|
||||
|
||||
try:
|
||||
query = 'SELECT sh.date_played, sh.started, sh.stopped, shmi.transcode_decision ' \
|
||||
'FROM (SELECT *, ' \
|
||||
'date(started, "unixepoch", "localtime") AS date_played ' \
|
||||
'FROM session_history %s) AS sh ' \
|
||||
'JOIN session_history_media_info AS shmi ON sh.id = shmi.id ' \
|
||||
'WHERE sh.stopped >= %s ' \
|
||||
'ORDER BY sh.date_played' % (user_cond, timestamp)
|
||||
|
||||
result = monitor_db.select(query)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli Graphs :: Unable to execute database query for get_total_plays_per_stream_type: %s." % e)
|
||||
return None
|
||||
|
||||
# create our date range as some days may not have any data
|
||||
# but we still want to display them
|
||||
base = datetime.date.today()
|
||||
date_list = [base - datetime.timedelta(days=x) for x in range(0, int(time_range))]
|
||||
|
||||
categories = []
|
||||
series_1 = []
|
||||
series_2 = []
|
||||
series_3 = []
|
||||
series_4 = []
|
||||
|
||||
grouped_result_by_stream_type = helpers.group_by_keys(result, ('date_played','transcode_decision'))
|
||||
grouped_result_by_day = helpers.group_by_keys(result, ['date_played'])
|
||||
|
||||
for date_item in sorted(date_list):
|
||||
date_string = date_item.strftime('%Y-%m-%d')
|
||||
categories.append(date_string)
|
||||
series_1_value = 0
|
||||
series_2_value = 0
|
||||
series_3_value = 0
|
||||
series_4_value = 0
|
||||
|
||||
for item in grouped_result_by_stream_type:
|
||||
if item['key'] == (date_string,'direct play'):
|
||||
series_1_value = calc_most_concurrent(item['value'])
|
||||
elif item['key'] == (date_string,'copy'):
|
||||
series_2_value = calc_most_concurrent(item['value'])
|
||||
elif item['key'] == (date_string,'transcode'):
|
||||
series_3_value = calc_most_concurrent(item['value'])
|
||||
|
||||
for item in grouped_result_by_day:
|
||||
if item['key'] == date_string:
|
||||
series_4_value = calc_most_concurrent(item['value'])
|
||||
|
||||
series_1.append(series_1_value)
|
||||
series_2.append(series_2_value)
|
||||
series_3.append(series_3_value)
|
||||
series_4.append(series_4_value)
|
||||
|
||||
series_1_output = {'name': 'Direct Play',
|
||||
'data': series_1}
|
||||
series_2_output = {'name': 'Direct Stream',
|
||||
'data': series_2}
|
||||
series_3_output = {'name': 'Transcode',
|
||||
'data': series_3}
|
||||
series_4_output = {'name': 'Max. Concurrent Streams',
|
||||
'data': series_4}
|
||||
|
||||
output = {'categories': categories,
|
||||
'series': [series_1_output, series_2_output, series_3_output, series_4_output]}
|
||||
return output
|
||||
|
||||
def get_total_plays_by_source_resolution(self, time_range='30', y_axis='plays', user_id=None, grouping=None):
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
|
@ -1169,15 +1264,16 @@ class Graphs(object):
|
|||
|
||||
return output
|
||||
|
||||
def _make_user_cond(self, user_id):
|
||||
def _make_user_cond(self, user_id, cond_prefix='AND'):
|
||||
"""
|
||||
Expects user_id to be a comma-separated list of ints.
|
||||
"""
|
||||
user_cond = ''
|
||||
|
||||
if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()):
|
||||
user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id()
|
||||
user_cond = cond_prefix + ' session_history.user_id = %s ' % session.get_session_user_id()
|
||||
elif user_id:
|
||||
user_ids = helpers.split_strip(user_id)
|
||||
if all(id.isdigit() for id in user_ids):
|
||||
user_cond = 'AND session_history.user_id IN (%s) ' % ','.join(user_ids)
|
||||
user_cond =cond_prefix + ' session_history.user_id IN (%s) ' % ','.join(user_ids)
|
||||
return user_cond
|
||||
|
|
|
@ -32,6 +32,7 @@ import datetime
|
|||
from functools import reduce, wraps
|
||||
import hashlib
|
||||
import imghdr
|
||||
from itertools import groupby
|
||||
from future.moves.itertools import islice, zip_longest
|
||||
from ipaddress import ip_address, ip_network, IPv4Address
|
||||
import ipwhois
|
||||
|
@ -1241,6 +1242,11 @@ def grouper(iterable, n, fillvalue=None):
|
|||
args = [iter(iterable)] * n
|
||||
return zip_longest(fillvalue=fillvalue, *args)
|
||||
|
||||
def group_by_keys(iterable, keys):
|
||||
key_function = operator.itemgetter(*keys)
|
||||
|
||||
sorted_iterable = sorted(iterable, key=key_function)
|
||||
return[{'key': key, 'value': list(group)} for key, group in groupby(sorted_iterable, key_function)]
|
||||
|
||||
def chunk(it, size):
|
||||
it = iter(it)
|
||||
|
|
|
@ -2549,6 +2549,44 @@ class WebInterface(object):
|
|||
logger.warn("Unable to retrieve data for get_plays_by_stream_type.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth()
|
||||
@addtoapi()
|
||||
def get_concurrent_streams_by_stream_type(self, time_range='30', user_id=None, **kwargs):
|
||||
""" Get graph data for concurrent streams by stream type by date.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
None
|
||||
|
||||
Optional parameters:
|
||||
time_range (str): The number of days of data to return
|
||||
user_id (str): Comma separated list of user id to filter the data
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"categories":
|
||||
["YYYY-MM-DD", "YYYY-MM-DD", ...]
|
||||
"series":
|
||||
[{"name": "Direct Play", "data": [...]}
|
||||
{"name": "Direct Stream", "data": [...]},
|
||||
{"name": "Transcode", "data": [...]},
|
||||
{"name": "Max. Concurrent Streams", "data": [...]}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
|
||||
graph = graphs.Graphs()
|
||||
result = graph.get_total_concurrent_streams_per_stream_type(time_range=time_range, user_id=user_id)
|
||||
|
||||
if result:
|
||||
return result
|
||||
else:
|
||||
logger.warn("Unable to retrieve data for get_concurrent_streams_by_stream_type.")
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth()
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue