mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-16 02:02:58 -07:00
Merge branch 'dev'
This commit is contained in:
commit
b0600402dd
56 changed files with 10480 additions and 755 deletions
12
CHANGELOG.md
12
CHANGELOG.md
|
@ -1,5 +1,17 @@
|
||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## v1.2.0 (2015-09-29)
|
||||||
|
|
||||||
|
* Added option to group consecutive plays in the history tables.
|
||||||
|
* Added option for websocket monitoring (still slightly experimental and disabled by default).
|
||||||
|
* Added global search option (searches your Plex library).
|
||||||
|
* Added option to update any items that may have had their rating keys changed.
|
||||||
|
* Added option to disable consecutive notifications.
|
||||||
|
* Some visual tweaks and fixes.
|
||||||
|
* Fix bug where monitoring wouldn't start up after first run.
|
||||||
|
* Fix bug showing incorrect transcode decisions for music tracks on history tables.
|
||||||
|
|
||||||
|
|
||||||
## v1.1.10 (2015-09-20)
|
## v1.1.10 (2015-09-20)
|
||||||
|
|
||||||
* Added dedicated settings section for home stats configuration with ability to show/hide selected stats and sections.
|
* Added dedicated settings section for home stats configuration with ability to show/hide selected stats and sections.
|
||||||
|
|
12
PlexPy.py
12
PlexPy.py
|
@ -20,7 +20,7 @@ import sys
|
||||||
# Ensure lib added to path, before any other imports
|
# Ensure lib added to path, before any other imports
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'lib/'))
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'lib/'))
|
||||||
|
|
||||||
from plexpy import webstart, logger
|
from plexpy import webstart, logger, web_socket
|
||||||
|
|
||||||
import locale
|
import locale
|
||||||
import time
|
import time
|
||||||
|
@ -191,6 +191,16 @@ def main():
|
||||||
# Start the background threads
|
# Start the background threads
|
||||||
plexpy.start()
|
plexpy.start()
|
||||||
|
|
||||||
|
# Open connection for websocket
|
||||||
|
if plexpy.CONFIG.MONITORING_USE_WEBSOCKET:
|
||||||
|
try:
|
||||||
|
web_socket.start_thread()
|
||||||
|
except:
|
||||||
|
logger.warn(u"Websocket :: Unable to open connection.")
|
||||||
|
# Fallback to polling
|
||||||
|
plexpy.POLLING_FAILOVER = True
|
||||||
|
plexpy.initialize_scheduler()
|
||||||
|
|
||||||
# Open webbrowser
|
# Open webbrowser
|
||||||
if plexpy.CONFIG.LAUNCH_BROWSER and not args.nolaunch:
|
if plexpy.CONFIG.LAUNCH_BROWSER and not args.nolaunch:
|
||||||
plexpy.launch_browser(plexpy.CONFIG.HTTP_HOST, http_port,
|
plexpy.launch_browser(plexpy.CONFIG.HTTP_HOST, http_port,
|
||||||
|
|
|
@ -37,7 +37,7 @@ from plexpy import version
|
||||||
% elif plexpy.CONFIG.CHECK_GITHUB and plexpy.CURRENT_VERSION != plexpy.LATEST_VERSION and plexpy.COMMITS_BEHIND > 0 and plexpy.INSTALL_TYPE != 'win':
|
% elif plexpy.CONFIG.CHECK_GITHUB and plexpy.CURRENT_VERSION != plexpy.LATEST_VERSION and plexpy.COMMITS_BEHIND > 0 and plexpy.INSTALL_TYPE != 'win':
|
||||||
<div id="updatebar" style="display: none;">
|
<div id="updatebar" style="display: none;">
|
||||||
A <a
|
A <a
|
||||||
href="https://github.com/${plexpy.CONFIG.GIT_USER}/plexpy/compare/${plexpy.CURRENT_VERSION}...${plexpy.LATEST_VERSION}">
|
href="https://github.com/${plexpy.CONFIG.GIT_USER}/plexpy/compare/${plexpy.CURRENT_VERSION}...${plexpy.LATEST_VERSION}" target="_blank">
|
||||||
newer version</a> is available. You're ${plexpy.COMMITS_BEHIND} commits behind. <a href="update">Update</a> or
|
newer version</a> is available. You're ${plexpy.COMMITS_BEHIND} commits behind. <a href="update">Update</a> or
|
||||||
<a href="#" id="updateDismiss">Close</a>
|
<a href="#" id="updateDismiss">Close</a>
|
||||||
</div>
|
</div>
|
||||||
|
@ -57,6 +57,18 @@ from plexpy import version
|
||||||
</div>
|
</div>
|
||||||
<div class="collapse navbar-collapse navbar-right" id="navbar-collapse-1">
|
<div class="collapse navbar-collapse navbar-right" id="navbar-collapse-1">
|
||||||
<ul class="nav navbar-nav">
|
<ul class="nav navbar-nav">
|
||||||
|
<li>
|
||||||
|
<form action="search" method="post" class="form" id="search_form">
|
||||||
|
<div class="input-group">
|
||||||
|
<span class="input-textbox">
|
||||||
|
<input type="text" class="form-control" name="search_query" id="search_query" aria-label="Search" placeholder="Search..."/>
|
||||||
|
</span>
|
||||||
|
<span class="input-group-btn">
|
||||||
|
<button class="btn btn-dark btn-inactive" type="submit" id="search_button"><i class="fa fa-search"></i></button>
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</li>
|
||||||
% if title=="Home":
|
% if title=="Home":
|
||||||
<li class="active"><a href="home"><i class="fa fa-lg fa-home"></i></a></li>
|
<li class="active"><a href="home"><i class="fa fa-lg fa-home"></i></a></li>
|
||||||
% else:
|
% else:
|
||||||
|
@ -117,6 +129,28 @@ ${next.headerIncludes()}
|
||||||
$('#updatebar').show();
|
$('#updatebar').show();
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
<script>
|
||||||
|
$('#search_form').submit(function (e) {
|
||||||
|
if ($('#search_query').hasClass('active') && $('#search_query').val().trim() != '') {
|
||||||
|
$.ajax({
|
||||||
|
type: 'post',
|
||||||
|
url: 'search',
|
||||||
|
data: { 'query': $('#search_query').val() }
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
e.preventDefault();
|
||||||
|
$('#search_button').removeClass('btn-inactive');
|
||||||
|
$('#search_query').clearQueue().val('').animate({ right: '0', width: '250px' }).addClass('active').focus();
|
||||||
|
}
|
||||||
|
})
|
||||||
|
$('#search_query').on('blur', function (e) {
|
||||||
|
if ($(this).val().trim() == '') {
|
||||||
|
$(this).delay(200).animate({ right: '-250px', width: '0' }, function () {
|
||||||
|
$('#search_button').addClass('btn-inactive');
|
||||||
|
}).removeClass('active');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
</script>
|
||||||
${next.javascriptIncludes()}
|
${next.javascriptIncludes()}
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
|
@ -1101,7 +1101,7 @@ a:hover .dashboard-recent-media-cover {
|
||||||
display: block;
|
display: block;
|
||||||
width: 100%;
|
width: 100%;
|
||||||
height: 100%;
|
height: 100%;
|
||||||
background-image: url(../images/plex-logo-light.svg);
|
background-image: url(../images/plex-logo-light-small.png);
|
||||||
background-size: 100px;
|
background-size: 100px;
|
||||||
background-repeat: no-repeat;
|
background-repeat: no-repeat;
|
||||||
background-position: center;
|
background-position: center;
|
||||||
|
@ -1246,11 +1246,39 @@ a:hover .summary-poster-face-track .summary-poster-face-overlay span {
|
||||||
background-size: contain;
|
background-size: contain;
|
||||||
height: 16px;
|
height: 16px;
|
||||||
}
|
}
|
||||||
|
#children-list, #search-results-list {
|
||||||
|
position: relative;
|
||||||
|
z-index: 0;
|
||||||
|
}
|
||||||
.item-children-wrapper {
|
.item-children-wrapper {
|
||||||
}
|
}
|
||||||
|
.item-children-section-title {
|
||||||
|
position: relative;
|
||||||
|
padding: 10px;
|
||||||
|
background-color: #2c2c2c;
|
||||||
|
border-bottom: 1px solid #3d3d3d;
|
||||||
|
border-top: 1px solid #282828;
|
||||||
|
height: 50px;
|
||||||
|
line-height: 22px;
|
||||||
|
padding: 13px 20px;
|
||||||
|
margin: 20px 0;
|
||||||
|
}
|
||||||
|
.item-children-section-title h4 {
|
||||||
|
position: relative;
|
||||||
|
margin: 0;
|
||||||
|
line-height: 22px;
|
||||||
|
color: #fff;
|
||||||
|
font-size: 16px;
|
||||||
|
text-align: center;
|
||||||
|
text-transform: uppercase;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
.item-children-instance {
|
.item-children-instance {
|
||||||
list-style: none;
|
list-style: none;
|
||||||
margin: 0;
|
margin: 0;
|
||||||
|
overflow: hidden;
|
||||||
}
|
}
|
||||||
.item-children-instance li {
|
.item-children-instance li {
|
||||||
float: left;
|
float: left;
|
||||||
|
@ -1348,6 +1376,9 @@ a:hover .item-children-poster {
|
||||||
text-align: left;
|
text-align: left;
|
||||||
clear: both;
|
clear: both;
|
||||||
}
|
}
|
||||||
|
.item-children-instance-text-wrapper h3.text-muted {
|
||||||
|
color: #777;
|
||||||
|
}
|
||||||
.item-children-list-item-odd {
|
.item-children-list-item-odd {
|
||||||
border-top: 0px solid #343434;
|
border-top: 0px solid #343434;
|
||||||
border-bottom: 0px solid #343434;
|
border-bottom: 0px solid #343434;
|
||||||
|
@ -1391,6 +1422,13 @@ a:hover .item-children-poster {
|
||||||
width: 40px;
|
width: 40px;
|
||||||
margin-right: 20px;
|
margin-right: 20px;
|
||||||
}
|
}
|
||||||
|
#new_title h3 {
|
||||||
|
color: #F9AA03;
|
||||||
|
font-size: 14px;
|
||||||
|
line-height: 1.42857143;
|
||||||
|
font-weight: bold;
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
.settings-alert {
|
.settings-alert {
|
||||||
float: left;
|
float: left;
|
||||||
padding: 0;
|
padding: 0;
|
||||||
|
@ -2377,4 +2415,72 @@ a .home-platforms-instance-list-oval:hover,
|
||||||
.dashboard-instance {
|
.dashboard-instance {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
table.display tr.shown + tr div.slider {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
table.display tr.shown + tr > td {
|
||||||
|
padding-top: 0;
|
||||||
|
padding-bottom: 0;
|
||||||
|
}
|
||||||
|
table.display tr.shown + tr:hover {
|
||||||
|
background-color: rgba(255,255,255,0);
|
||||||
|
}
|
||||||
|
table.display tr.shown + tr:hover a,
|
||||||
|
table.display tr.shown + tr td:hover a,
|
||||||
|
table.display tr.shown + tr .pagination > .active > a,
|
||||||
|
table.display tr.shown + tr .pagination > .active > a:hover {
|
||||||
|
color: #fff;
|
||||||
|
}
|
||||||
|
table.display tr.shown + tr table[id^='history_child'] td:hover a {
|
||||||
|
color: #F9AA03;
|
||||||
|
}
|
||||||
|
table.display tr.shown + tr .pagination > .disabled > a {
|
||||||
|
color: #444444;
|
||||||
|
}
|
||||||
|
table.display tr.shown + tr .pagination > li > a:hover {
|
||||||
|
color: #23527c;
|
||||||
|
}
|
||||||
|
table[id^='history_child'] {
|
||||||
|
margin-top: 0;
|
||||||
|
margin-left: -4px;
|
||||||
|
opacity: .6;
|
||||||
|
}
|
||||||
|
table[id^='history_child'] thead th {
|
||||||
|
line-height: 0;
|
||||||
|
height: 0 !important;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
#search_form {
|
||||||
|
width: 350px;
|
||||||
|
padding: 8px 15px;
|
||||||
|
}
|
||||||
|
#search_form span.input-textbox {
|
||||||
|
overflow: hidden;
|
||||||
|
width: 250px;
|
||||||
|
height: 34px;
|
||||||
|
display: inline-flex;
|
||||||
|
float: right;
|
||||||
|
}
|
||||||
|
#search_form #search_query {
|
||||||
|
width: 0;
|
||||||
|
height: 34px;
|
||||||
|
margin-top: 0;
|
||||||
|
float: right;
|
||||||
|
position: relative;
|
||||||
|
right: -250px;
|
||||||
|
border-radius: 3px 0 0 3px;
|
||||||
|
}
|
||||||
|
#search_form #search_query.active {
|
||||||
|
width: 250px;
|
||||||
|
right: 0px;
|
||||||
|
}
|
||||||
|
#search_form #search_button.btn-inactive {
|
||||||
|
background-color: #000;
|
||||||
|
border: 1px solid rgba(0,0,0,0);
|
||||||
|
-webkit-transition: background 0.3s;
|
||||||
|
-moz-transition: background 0.3s;
|
||||||
|
-ms-transition: background 0.3s;
|
||||||
|
-o-transition: background 0.3s;
|
||||||
|
transition: background 0.3s;
|
||||||
}
|
}
|
|
@ -19,7 +19,7 @@ session_key Returns a unique session id for the active stream
|
||||||
rating_key Returns the unique identifier for the media item.
|
rating_key Returns the unique identifier for the media item.
|
||||||
media_index Returns the index of the media item.
|
media_index Returns the index of the media item.
|
||||||
parent_media_index Returns the index of the media item's parent.
|
parent_media_index Returns the index of the media item's parent.
|
||||||
type Returns the type of session. Either 'track', 'episode' or 'movie'.
|
media_type Returns the type of session. Either 'track', 'episode' or 'movie'.
|
||||||
thumb Returns the location of the item's thumbnail. Use with pms_image_proxy.
|
thumb Returns the location of the item's thumbnail. Use with pms_image_proxy.
|
||||||
bif_thumb Returns the location of the item's bif thumbnail. Use with pms_image_proxy.
|
bif_thumb Returns the location of the item's bif thumbnail. Use with pms_image_proxy.
|
||||||
art Returns the location of the item's artwork
|
art Returns the location of the item's artwork
|
||||||
|
@ -67,21 +67,21 @@ DOCUMENTATION :: END
|
||||||
% if data['stream_count'] != '0':
|
% if data['stream_count'] != '0':
|
||||||
% for a in data['sessions']:
|
% for a in data['sessions']:
|
||||||
<div class="dashboard-instance" id="instance-${a['session_key']}">
|
<div class="dashboard-instance" id="instance-${a['session_key']}">
|
||||||
% if a['type'] == 'movie' or a['type'] == 'episode' or a['type'] == 'track':
|
% if a['media_type'] == 'movie' or a['media_type'] == 'episode' or a['media_type'] == 'track':
|
||||||
<a href="info?item_id=${a['rating_key']}">
|
<a href="info?item_id=${a['rating_key']}">
|
||||||
% endif
|
% endif
|
||||||
<div class="dashboard-activity-poster">
|
<div class="dashboard-activity-poster">
|
||||||
% if a['type'] == 'movie' and not a['indexes']:
|
% if a['media_type'] == 'movie' and not a['indexes']:
|
||||||
<div class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img=${a['art']}&width=500&height=280);"></div>
|
<div class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img=${a['art']}&width=500&height=280);"></div>
|
||||||
% elif a['type'] == 'episode' and not a['indexes']:
|
% elif a['media_type'] == 'episode' and not a['indexes']:
|
||||||
<div class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img=${a['art']}&width=500&height=280);"></div>
|
<div class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img=${a['art']}&width=500&height=280);"></div>
|
||||||
% elif a['indexes']:
|
% elif a['indexes']:
|
||||||
<div class="dashboard-activity-poster-face bif" style="background-image: url(pms_image_proxy?img=${a['bif_thumb']}&width=500&height=280); display: none;"></div>
|
<div class="dashboard-activity-poster-face bif" style="background-image: url(pms_image_proxy?img=${a['bif_thumb']}&width=500&height=280); display: none;"></div>
|
||||||
% else:
|
% else:
|
||||||
% if a['type'] == 'track':
|
% if a['media_type'] == 'track':
|
||||||
<div class="dashboard-activity-cover-face-bg" style="background-image: url(pms_image_proxy?img=${a['thumb']}&width=300&height=300);"></div>
|
<div class="dashboard-activity-cover-face-bg" style="background-image: url(pms_image_proxy?img=${a['thumb']}&width=300&height=300);"></div>
|
||||||
<div class="dashboard-activity-cover-face" style="background-image: url(pms_image_proxy?img=${a['thumb']}&width=300&height=300);"></div>
|
<div class="dashboard-activity-cover-face" style="background-image: url(pms_image_proxy?img=${a['thumb']}&width=300&height=300);"></div>
|
||||||
% elif a['type'] == 'clip':
|
% elif a['media_type'] == 'clip':
|
||||||
% if a['art'][:4] == 'http':
|
% if a['art'][:4] == 'http':
|
||||||
<div class="dashboard-activity-poster-face" style="background-image: url(${a['art']});"></div>
|
<div class="dashboard-activity-poster-face" style="background-image: url(${a['art']});"></div>
|
||||||
% elif a['thumb'][:4] == 'http':
|
% elif a['thumb'][:4] == 'http':
|
||||||
|
@ -93,7 +93,7 @@ DOCUMENTATION :: END
|
||||||
<div class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img=${a['thumb']}&width=500&height=280);"></div>
|
<div class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img=${a['thumb']}&width=500&height=280);"></div>
|
||||||
% endif
|
% endif
|
||||||
% endif
|
% endif
|
||||||
% elif a['type'] == 'photo':
|
% elif a['media_type'] == 'photo':
|
||||||
<div class="dashboard-activity-poster-face bif" style="background-image: url(pms_image_proxy?img=${a['thumb']}&width=500&height=500);"></div>
|
<div class="dashboard-activity-poster-face bif" style="background-image: url(pms_image_proxy?img=${a['thumb']}&width=500&height=500);"></div>
|
||||||
% else:
|
% else:
|
||||||
<div class="dashboard-activity-cover-face" style="background-image: url(pms_image_proxy?img=${a['thumb']}&width=300&height=300&fallback=cover);"></div>
|
<div class="dashboard-activity-cover-face" style="background-image: url(pms_image_proxy?img=${a['thumb']}&width=300&height=300&fallback=cover);"></div>
|
||||||
|
@ -106,7 +106,12 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
<div id="stream-${a['session_key']}" class="dashboard-activity-info-details-overlay">
|
<div id="stream-${a['session_key']}" class="dashboard-activity-info-details-overlay">
|
||||||
<div class="dashboard-activity-info-details-content">
|
<div class="dashboard-activity-info-details-content">
|
||||||
<div class="dashboard-activity-info-platform" id="platform-${a['session_key']}">
|
<div id="platform-${a['session_key']}" title="${a['platform']}">
|
||||||
|
<script>
|
||||||
|
$("#platform-${a['session_key']}").html("<div class='dashboard-activity-info-platform-box' style='background-image: url(" + getPlatformImagePath('${a['platform']}') + ");'>");
|
||||||
|
</script>
|
||||||
|
</div>
|
||||||
|
<div class="dashboard-activity-info-platform">
|
||||||
<strong>${a['player']}</strong><br />
|
<strong>${a['player']}</strong><br />
|
||||||
% if a['state'] == 'playing':
|
% if a['state'] == 'playing':
|
||||||
State <strong>Playing</strong>
|
State <strong>Playing</strong>
|
||||||
|
@ -116,7 +121,7 @@ DOCUMENTATION :: END
|
||||||
State <strong>Buffering</strong>
|
State <strong>Buffering</strong>
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
% if a['type'] == 'track':
|
% if a['media_type'] == 'track':
|
||||||
% if a['audio_decision'] == 'direct play':
|
% if a['audio_decision'] == 'direct play':
|
||||||
Stream <strong>Direct Play</strong>
|
Stream <strong>Direct Play</strong>
|
||||||
% elif a['audio_decision'] == 'copy':
|
% elif a['audio_decision'] == 'copy':
|
||||||
|
@ -137,7 +142,7 @@ DOCUMENTATION :: END
|
||||||
% elif a['audio_decision'] != 'transcode':
|
% elif a['audio_decision'] != 'transcode':
|
||||||
Audio <strong>Transcode (${a['transcode_audio_codec']}) (${a['transcode_audio_channels']}ch)</strong>
|
Audio <strong>Transcode (${a['transcode_audio_codec']}) (${a['transcode_audio_channels']}ch)</strong>
|
||||||
% endif
|
% endif
|
||||||
% elif a['type'] == 'episode' or a['type'] == 'movie' or a['type'] == 'clip':
|
% elif a['media_type'] == 'episode' or a['media_type'] == 'movie' or a['media_type'] == 'clip':
|
||||||
% if a['video_decision'] == 'direct play' and a['audio_decision'] == 'direct play':
|
% if a['video_decision'] == 'direct play' and a['audio_decision'] == 'direct play':
|
||||||
Stream <strong>Direct Play</strong>
|
Stream <strong>Direct Play</strong>
|
||||||
% elif a['video_decision'] == 'copy' and a['audio_decision'] == 'copy':
|
% elif a['video_decision'] == 'copy' and a['audio_decision'] == 'copy':
|
||||||
|
@ -166,7 +171,7 @@ DOCUMENTATION :: END
|
||||||
% elif a['audio_decision'] == 'transcode':
|
% elif a['audio_decision'] == 'transcode':
|
||||||
Audio <strong>Transcode (${a['transcode_audio_codec']}) (${a['transcode_audio_channels']}ch)</strong>
|
Audio <strong>Transcode (${a['transcode_audio_codec']}) (${a['transcode_audio_channels']}ch)</strong>
|
||||||
% endif
|
% endif
|
||||||
% elif a['type'] == 'photo':
|
% elif a['media_type'] == 'photo':
|
||||||
% if a['video_decision'] == 'direct play':
|
% if a['video_decision'] == 'direct play':
|
||||||
Stream <strong>Direct Play</strong>
|
Stream <strong>Direct Play</strong>
|
||||||
% elif a['video_decision'] == 'copy':
|
% elif a['video_decision'] == 'copy':
|
||||||
|
@ -184,15 +189,15 @@ DOCUMENTATION :: END
|
||||||
<br>
|
<br>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
% if a['type'] != 'photo':
|
% if a['media_type'] != 'photo':
|
||||||
<div class="dashboard-activity-poster-info-bar">
|
<div class="dashboard-activity-poster-info-bar">
|
||||||
<div class="dashboard-activity-poster-info-time">
|
<div class="dashboard-activity-poster-info-time">
|
||||||
<span class="progress_time">${a['progress']}</span>/<span class="progress_time">${a['duration']}</span>
|
<span class="progress_time">${a['view_offset']}</span>/<span class="progress_time">${a['duration']}</span>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
% if a['type'] == 'movie' or a['type'] == 'episode' or a['type'] == 'track':
|
% if a['media_type'] == 'movie' or a['media_type'] == 'episode' or a['media_type'] == 'track':
|
||||||
</a>
|
</a>
|
||||||
% endif
|
% endif
|
||||||
<div class="dashboard-activity-progress">
|
<div class="dashboard-activity-progress">
|
||||||
|
@ -213,45 +218,42 @@ DOCUMENTATION :: END
|
||||||
% elif a['state'] == 'buffering':
|
% elif a['state'] == 'buffering':
|
||||||
<i class="fa fa-spinner"></i>
|
<i class="fa fa-spinner"></i>
|
||||||
% endif
|
% endif
|
||||||
% if a['type'] == 'episode':
|
% if a['media_type'] == 'episode':
|
||||||
<a href="info?item_id=${a['rating_key']}">${a['grandparent_title']} - ${a['title']}</a>
|
<a href="info?item_id=${a['rating_key']}" title="${a['grandparent_title']} - ${a['title']}">${a['grandparent_title']} - ${a['title']}</a>
|
||||||
% elif a['type'] == 'movie':
|
% elif a['media_type'] == 'movie':
|
||||||
<a href="info?item_id=${a['rating_key']}">${a['title']}</a>
|
<a href="info?item_id=${a['rating_key']}" title="${a['title']}">${a['title']}</a>
|
||||||
% elif a['type'] == 'clip':
|
% elif a['media_type'] == 'clip':
|
||||||
${a['title']}
|
<span title="${a['title']}">${a['title']}</span>
|
||||||
% elif a['type'] == 'track':
|
% elif a['media_type'] == 'track':
|
||||||
<a href="info?item_id=${a['rating_key']}">${a['grandparent_title']} - ${a['title']}</a>
|
<a href="info?item_id=${a['rating_key']}" title="${a['grandparent_title']} - ${a['title']}">${a['grandparent_title']} - ${a['title']}</a>
|
||||||
% elif a['type'] == 'photo':
|
% elif a['media_type'] == 'photo':
|
||||||
${a['parent_title']}
|
<span title="${a['parent_title']}">${a['parent_title']}</span>
|
||||||
% else:
|
% else:
|
||||||
${a['title']}
|
<span title="${a['title']}">${a['title']}</span>
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
<div class="dashboard-activity-metadata-subtitle">
|
<div class="dashboard-activity-metadata-subtitle">
|
||||||
% if a['type'] == 'episode':
|
% if a['media_type'] == 'episode':
|
||||||
S${a['parent_media_index']} · E${a['media_index']}
|
<span title="S${a['parent_media_index']} · E${a['media_index']}">S${a['parent_media_index']} · E${a['media_index']}</span>
|
||||||
% elif a['type'] == 'movie':
|
% elif a['media_type'] == 'movie':
|
||||||
${a['year']}
|
<span title="${a['year']}">${a['year']}</span>
|
||||||
% elif a['type'] == 'track':
|
% elif a['media_type'] == 'track':
|
||||||
<a href="info?item_id=${a['parent_rating_key']}">${a['parent_title']}</a>
|
<a href="info?item_id=${a['parent_rating_key']}" title="${a['parent_title']}">${a['parent_title']}</a>
|
||||||
% elif a['type'] == 'photo':
|
% elif a['media_type'] == 'photo':
|
||||||
${a['title']}
|
<span title="${a['title']}">${a['title']}</span>
|
||||||
% else:
|
% else:
|
||||||
${a['year']}
|
<span title="${a['year']}">${a['year']}</span>
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
<div class="dashboard-activity-metadata-user">
|
<div class="dashboard-activity-metadata-user">
|
||||||
% if a['user_id']:
|
% if a['user_id']:
|
||||||
<a href="user?user_id=${a['user_id']}">${a['friendly_name']}</a>
|
<a href="user?user_id=${a['user_id']}" title="${a['friendly_name']}">${a['friendly_name']}</a>
|
||||||
% else:
|
% else:
|
||||||
<a href="user?user=${a['user']}">${a['friendly_name']}</a>
|
<a href="user?user=${a['user']}" title="${a['friendly_name']}">${a['friendly_name']}</a>
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<script>
|
|
||||||
$("#platform-${a['session_key']}").prepend("<div class='dashboard-activity-info-platform-box' style='background-image: url(" + getPlatformImagePath('${a['platform']}') + ");'>");
|
|
||||||
</script>
|
|
||||||
|
|
||||||
% endfor
|
% endfor
|
||||||
<script>
|
<script>
|
||||||
|
|
|
@ -38,6 +38,7 @@
|
||||||
type: "post",
|
type: "post",
|
||||||
data: function ( d ) {
|
data: function ( d ) {
|
||||||
return { 'json_data': JSON.stringify( d ),
|
return { 'json_data': JSON.stringify( d ),
|
||||||
|
'grouping': false,
|
||||||
'start_date': '${data}'
|
'start_date': '${data}'
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -77,7 +77,7 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
<div class="home-platforms-instance-playcount">
|
<div class="home-platforms-instance-playcount">
|
||||||
<h4>
|
<h4>
|
||||||
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
${top_stat['rows'][0]['title']}
|
${top_stat['rows'][0]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h4>
|
</h4>
|
||||||
|
@ -89,7 +89,7 @@ DOCUMENTATION :: END
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
% if top_stat['rows'][0]['grandparent_thumb']:
|
% if top_stat['rows'][0]['grandparent_thumb']:
|
||||||
<div class="home-platforms-instance-poster">
|
<div class="home-platforms-instance-poster">
|
||||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||||
|
@ -111,7 +111,7 @@ DOCUMENTATION :: END
|
||||||
<div class="home-platforms-instance-list-info">
|
<div class="home-platforms-instance-list-info">
|
||||||
<div class="home-platforms-instance-list-name">
|
<div class="home-platforms-instance-list-name">
|
||||||
<h5>
|
<h5>
|
||||||
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
${top_stat['rows'][loop.index]['title']}
|
${top_stat['rows'][loop.index]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h5>
|
</h5>
|
||||||
|
@ -125,7 +125,7 @@ DOCUMENTATION :: END
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||||
<div class="home-platforms-instance-list-poster">
|
<div class="home-platforms-instance-list-poster">
|
||||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||||
|
@ -157,7 +157,7 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
<div class="home-platforms-instance-playcount">
|
<div class="home-platforms-instance-playcount">
|
||||||
<h4>
|
<h4>
|
||||||
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
${top_stat['rows'][0]['title']}
|
${top_stat['rows'][0]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h4>
|
</h4>
|
||||||
|
@ -165,7 +165,7 @@ DOCUMENTATION :: END
|
||||||
<p> users</p>
|
<p> users</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
% if top_stat['rows'][0]['grandparent_thumb'] != '':
|
% if top_stat['rows'][0]['grandparent_thumb'] != '':
|
||||||
<div class="home-platforms-instance-poster">
|
<div class="home-platforms-instance-poster">
|
||||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||||
|
@ -187,7 +187,7 @@ DOCUMENTATION :: END
|
||||||
<div class="home-platforms-instance-list-info">
|
<div class="home-platforms-instance-list-info">
|
||||||
<div class="home-platforms-instance-list-name">
|
<div class="home-platforms-instance-list-name">
|
||||||
<h5>
|
<h5>
|
||||||
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
${top_stat['rows'][loop.index]['title']}
|
${top_stat['rows'][loop.index]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h5>
|
</h5>
|
||||||
|
@ -197,7 +197,7 @@ DOCUMENTATION :: END
|
||||||
<p> users</p>
|
<p> users</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||||
<div class="home-platforms-instance-list-poster">
|
<div class="home-platforms-instance-list-poster">
|
||||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||||
|
@ -229,7 +229,7 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
<div class="home-platforms-instance-playcount">
|
<div class="home-platforms-instance-playcount">
|
||||||
<h4>
|
<h4>
|
||||||
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
${top_stat['rows'][0]['title']}
|
${top_stat['rows'][0]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h4>
|
</h4>
|
||||||
|
@ -241,7 +241,7 @@ DOCUMENTATION :: END
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
% if top_stat['rows'][0]['thumb']:
|
% if top_stat['rows'][0]['thumb']:
|
||||||
<div class="home-platforms-instance-poster">
|
<div class="home-platforms-instance-poster">
|
||||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||||
|
@ -263,7 +263,7 @@ DOCUMENTATION :: END
|
||||||
<div class="home-platforms-instance-list-info">
|
<div class="home-platforms-instance-list-info">
|
||||||
<div class="home-platforms-instance-list-name">
|
<div class="home-platforms-instance-list-name">
|
||||||
<h5>
|
<h5>
|
||||||
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
${top_stat['rows'][loop.index]['title']}
|
${top_stat['rows'][loop.index]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h5>
|
</h5>
|
||||||
|
@ -277,7 +277,7 @@ DOCUMENTATION :: END
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
% if top_stat['rows'][loop.index]['thumb']:
|
% if top_stat['rows'][loop.index]['thumb']:
|
||||||
<div class="home-platforms-instance-list-poster">
|
<div class="home-platforms-instance-list-poster">
|
||||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||||
|
@ -309,7 +309,7 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
<div class="home-platforms-instance-playcount">
|
<div class="home-platforms-instance-playcount">
|
||||||
<h4>
|
<h4>
|
||||||
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
${top_stat['rows'][0]['title']}
|
${top_stat['rows'][0]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h4>
|
</h4>
|
||||||
|
@ -317,7 +317,7 @@ DOCUMENTATION :: END
|
||||||
<p> users</p>
|
<p> users</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
% if top_stat['rows'][0]['thumb']:
|
% if top_stat['rows'][0]['thumb']:
|
||||||
<div class="home-platforms-instance-poster">
|
<div class="home-platforms-instance-poster">
|
||||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||||
|
@ -339,7 +339,7 @@ DOCUMENTATION :: END
|
||||||
<div class="home-platforms-instance-list-info">
|
<div class="home-platforms-instance-list-info">
|
||||||
<div class="home-platforms-instance-list-name">
|
<div class="home-platforms-instance-list-name">
|
||||||
<h5>
|
<h5>
|
||||||
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
${top_stat['rows'][loop.index]['title']}
|
${top_stat['rows'][loop.index]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h5>
|
</h5>
|
||||||
|
@ -349,7 +349,7 @@ DOCUMENTATION :: END
|
||||||
<p> users</p>
|
<p> users</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
% if top_stat['rows'][loop.index]['thumb']:
|
% if top_stat['rows'][loop.index]['thumb']:
|
||||||
<div class="home-platforms-instance-list-poster">
|
<div class="home-platforms-instance-list-poster">
|
||||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||||
|
@ -381,7 +381,7 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
<div class="home-platforms-instance-playcount">
|
<div class="home-platforms-instance-playcount">
|
||||||
<h4>
|
<h4>
|
||||||
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
${top_stat['rows'][0]['title']}
|
${top_stat['rows'][0]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h4>
|
</h4>
|
||||||
|
@ -393,7 +393,7 @@ DOCUMENTATION :: END
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
% if top_stat['rows'][0]['grandparent_thumb']:
|
% if top_stat['rows'][0]['grandparent_thumb']:
|
||||||
<div class="home-platforms-instance-poster">
|
<div class="home-platforms-instance-poster">
|
||||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||||
|
@ -415,7 +415,7 @@ DOCUMENTATION :: END
|
||||||
<div class="home-platforms-instance-list-info">
|
<div class="home-platforms-instance-list-info">
|
||||||
<div class="home-platforms-instance-list-name">
|
<div class="home-platforms-instance-list-name">
|
||||||
<h5>
|
<h5>
|
||||||
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
${top_stat['rows'][loop.index]['title']}
|
${top_stat['rows'][loop.index]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h5>
|
</h5>
|
||||||
|
@ -429,7 +429,7 @@ DOCUMENTATION :: END
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||||
<div class="home-platforms-instance-list-poster">
|
<div class="home-platforms-instance-list-poster">
|
||||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||||
|
@ -461,7 +461,7 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
<div class="home-platforms-instance-playcount">
|
<div class="home-platforms-instance-playcount">
|
||||||
<h4>
|
<h4>
|
||||||
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
${top_stat['rows'][0]['title']}
|
${top_stat['rows'][0]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h4>
|
</h4>
|
||||||
|
@ -469,7 +469,7 @@ DOCUMENTATION :: END
|
||||||
<p> users</p>
|
<p> users</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
% if top_stat['rows'][0]['grandparent_thumb'] != '':
|
% if top_stat['rows'][0]['grandparent_thumb'] != '':
|
||||||
<div class="home-platforms-instance-poster">
|
<div class="home-platforms-instance-poster">
|
||||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||||
|
@ -491,7 +491,7 @@ DOCUMENTATION :: END
|
||||||
<div class="home-platforms-instance-list-info">
|
<div class="home-platforms-instance-list-info">
|
||||||
<div class="home-platforms-instance-list-name">
|
<div class="home-platforms-instance-list-name">
|
||||||
<h5>
|
<h5>
|
||||||
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
${top_stat['rows'][loop.index]['title']}
|
${top_stat['rows'][loop.index]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h5>
|
</h5>
|
||||||
|
@ -501,7 +501,7 @@ DOCUMENTATION :: END
|
||||||
<p> users</p>
|
<p> users</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||||
<div class="home-platforms-instance-list-poster">
|
<div class="home-platforms-instance-list-poster">
|
||||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||||
|
@ -534,12 +534,12 @@ DOCUMENTATION :: END
|
||||||
<div class="home-platforms-instance-playcount">
|
<div class="home-platforms-instance-playcount">
|
||||||
<h4>
|
<h4>
|
||||||
% if top_stat['rows'][0]['user_id']:
|
% if top_stat['rows'][0]['user_id']:
|
||||||
<a href="user?user_id=${top_stat['rows'][0]['user_id']}">
|
<a href="user?user_id=${top_stat['rows'][0]['user_id']}" title="${top_stat['rows'][0]['friendly_name']}">
|
||||||
% else:
|
% else:
|
||||||
<a href="user?user=${top_stat['rows'][0]['user']}">
|
<a href="user?user=${top_stat['rows'][0]['user']}" title="${top_stat['rows'][0]['friendly_name']}">
|
||||||
% endif
|
% endif
|
||||||
${top_stat['rows'][0]['friendly_name']}
|
${top_stat['rows'][0]['friendly_name']}
|
||||||
</a>
|
</a>
|
||||||
</h4>
|
</h4>
|
||||||
% if top_stat['stat_type'] == 'total_plays':
|
% if top_stat['stat_type'] == 'total_plays':
|
||||||
<h3>${top_stat['rows'][0]['total_plays']}</h3>
|
<h3>${top_stat['rows'][0]['total_plays']}</h3>
|
||||||
|
@ -550,9 +550,9 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
% if top_stat['rows'][0]['user_id']:
|
% if top_stat['rows'][0]['user_id']:
|
||||||
<a href="user?user_id=${top_stat['rows'][0]['user_id']}">
|
<a href="user?user_id=${top_stat['rows'][0]['user_id']}" title="${top_stat['rows'][0]['friendly_name']}">
|
||||||
% else:
|
% else:
|
||||||
<a href="user?user=${top_stat['rows'][0]['user']}">
|
<a href="user?user=${top_stat['rows'][0]['user']}" title="${top_stat['rows'][0]['friendly_name']}">
|
||||||
% endif
|
% endif
|
||||||
% if top_stat['rows'][0]['user_thumb'] != '':
|
% if top_stat['rows'][0]['user_thumb'] != '':
|
||||||
<div class="home-platforms-instance-poster">
|
<div class="home-platforms-instance-poster">
|
||||||
|
@ -576,12 +576,12 @@ DOCUMENTATION :: END
|
||||||
<div class="home-platforms-instance-list-name">
|
<div class="home-platforms-instance-list-name">
|
||||||
<h5>
|
<h5>
|
||||||
% if top_stat['rows'][loop.index]['user_id']:
|
% if top_stat['rows'][loop.index]['user_id']:
|
||||||
<a href="user?user_id=${top_stat['rows'][loop.index]['user_id']}">
|
<a href="user?user_id=${top_stat['rows'][loop.index]['user_id']}" title="${top_stat['rows'][loop.index]['friendly_name']}">
|
||||||
% else:
|
% else:
|
||||||
<a href="user?user=${top_stat['rows'][loop.index]['user']}">
|
<a href="user?user=${top_stat['rows'][loop.index]['user']}" title="${top_stat['rows'][loop.index]['friendly_name']}">
|
||||||
% endif
|
% endif
|
||||||
${top_stat['rows'][loop.index]['friendly_name']}
|
${top_stat['rows'][loop.index]['friendly_name']}
|
||||||
</a>
|
</a>
|
||||||
</h5>
|
</h5>
|
||||||
</div>
|
</div>
|
||||||
<div class="home-platforms-instance-list-playcount">
|
<div class="home-platforms-instance-list-playcount">
|
||||||
|
@ -594,9 +594,9 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
% if top_stat['rows'][loop.index]['user_id']:
|
% if top_stat['rows'][loop.index]['user_id']:
|
||||||
<a href="user?user_id=${top_stat['rows'][loop.index]['user_id']}">
|
<a href="user?user_id=${top_stat['rows'][loop.index]['user_id']}" title="${top_stat['rows'][loop.index]['friendly_name']}">
|
||||||
% else:
|
% else:
|
||||||
<a href="user?user=${top_stat['rows'][loop.index]['user']}">
|
<a href="user?user=${top_stat['rows'][loop.index]['user']}" title="${top_stat['rows'][loop.index]['friendly_name']}">
|
||||||
% endif
|
% endif
|
||||||
% if top_stat['rows'][loop.index]['user_thumb'] != '':
|
% if top_stat['rows'][loop.index]['user_thumb'] != '':
|
||||||
<div class="home-platforms-instance-poster">
|
<div class="home-platforms-instance-poster">
|
||||||
|
@ -628,7 +628,7 @@ DOCUMENTATION :: END
|
||||||
<h4>Most Active Platform</h4>
|
<h4>Most Active Platform</h4>
|
||||||
</div>
|
</div>
|
||||||
<div class="home-platforms-instance-playcount">
|
<div class="home-platforms-instance-playcount">
|
||||||
<h4>${top_stat['rows'][0]['platform_type']}</h4>
|
<h4 title="${top_stat['rows'][0]['platform_type']}">${top_stat['rows'][0]['platform_type']}</h4>
|
||||||
% if top_stat['stat_type'] == 'total_plays':
|
% if top_stat['stat_type'] == 'total_plays':
|
||||||
<h3>${top_stat['rows'][0]['total_plays']}</h3>
|
<h3>${top_stat['rows'][0]['total_plays']}</h3>
|
||||||
<p> plays</p>
|
<p> plays</p>
|
||||||
|
@ -637,7 +637,7 @@ DOCUMENTATION :: END
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div id="platform-stat" class="home-platforms-instance-poster">
|
<div id="platform-stat" class="home-platforms-instance-poster" title="${top_stat['rows'][0]['platform_type']}">
|
||||||
<script>
|
<script>
|
||||||
$("#platform-stat").html("<div class='home-platforms-instance-box' style='background-image: url(" + getPlatformImagePath('${top_stat['rows'][0]['platform_type']}') + ");'>");
|
$("#platform-stat").html("<div class='home-platforms-instance-box' style='background-image: url(" + getPlatformImagePath('${top_stat['rows'][0]['platform_type']}') + ");'>");
|
||||||
</script>
|
</script>
|
||||||
|
@ -652,7 +652,7 @@ DOCUMENTATION :: END
|
||||||
<li>
|
<li>
|
||||||
<div class="home-platforms-instance-list-info">
|
<div class="home-platforms-instance-list-info">
|
||||||
<div class="home-platforms-instance-list-name">
|
<div class="home-platforms-instance-list-name">
|
||||||
<h5>
|
<h5 title="${top_stat['rows'][loop.index]['platform_type']}">
|
||||||
${top_stat['rows'][loop.index]['platform_type']}
|
${top_stat['rows'][loop.index]['platform_type']}
|
||||||
</h5>
|
</h5>
|
||||||
</div>
|
</div>
|
||||||
|
@ -665,7 +665,7 @@ DOCUMENTATION :: END
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="home-platforms-instance-poster" id="home-platforms-instance-poster-${loop.index + 1}">
|
<div class="home-platforms-instance-poster" id="home-platforms-instance-poster-${loop.index + 1}" title="${top_stat['rows'][loop.index]['platform_type']}">
|
||||||
<script>
|
<script>
|
||||||
$("#home-platforms-instance-poster-${loop.index + 1}").html("<div class='home-platforms-instance-list-box' style='background-image: url(" + getPlatformImagePath('${top_stat['rows'][loop.index]['platform_type']}') + ");'>");
|
$("#home-platforms-instance-poster-${loop.index + 1}").html("<div class='home-platforms-instance-list-box' style='background-image: url(" + getPlatformImagePath('${top_stat['rows'][loop.index]['platform_type']}') + ");'>");
|
||||||
</script>
|
</script>
|
||||||
|
@ -691,16 +691,16 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
<div class="home-platforms-instance-last-user">
|
<div class="home-platforms-instance-last-user">
|
||||||
<h4>
|
<h4>
|
||||||
<a href="info?source=history&item_id=${top_stat['rows'][0]['row_id']}">
|
<a href="info?source=history&item_id=${top_stat['rows'][0]['row_id']}" title="${top_stat['rows'][0]['title']}">
|
||||||
${top_stat['rows'][0]['title']}
|
${top_stat['rows'][0]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h4>
|
</h4>
|
||||||
<h5>
|
<h5>
|
||||||
% if top_stat['rows'][0]['user_id']:
|
% if top_stat['rows'][0]['user_id']:
|
||||||
<a href="user?user_id=${top_stat['rows'][0]['user_id']}">
|
<a href="user?user_id=${top_stat['rows'][0]['user_id']}" title="${top_stat['rows'][0]['friendly_name']}">
|
||||||
% else:
|
% else:
|
||||||
<a href="user?user=${top_stat['rows'][0]['user']}">
|
<a href="user?user=${top_stat['rows'][0]['user']}" title="${top_stat['rows'][0]['friendly_name']}">
|
||||||
% endif
|
% endif
|
||||||
${top_stat['rows'][0]['friendly_name']}
|
${top_stat['rows'][0]['friendly_name']}
|
||||||
</a>
|
</a>
|
||||||
</h5>
|
</h5>
|
||||||
|
@ -713,7 +713,7 @@ DOCUMENTATION :: END
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?source=history&item_id=${top_stat['rows'][0]['row_id']}">
|
<a href="info?source=history&item_id=${top_stat['rows'][0]['row_id']}" title="${top_stat['rows'][0]['title']}">
|
||||||
% if top_stat['rows'][0]['thumb']:
|
% if top_stat['rows'][0]['thumb']:
|
||||||
<div class="home-platforms-instance-poster">
|
<div class="home-platforms-instance-poster">
|
||||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||||
|
@ -735,7 +735,7 @@ DOCUMENTATION :: END
|
||||||
<div class="home-platforms-instance-list-info">
|
<div class="home-platforms-instance-list-info">
|
||||||
<div class="home-platforms-instance-list-name">
|
<div class="home-platforms-instance-list-name">
|
||||||
<h5>
|
<h5>
|
||||||
<a href="info?source=history&item_id=${top_stat['rows'][loop.index]['row_id']}">
|
<a href="info?source=history&item_id=${top_stat['rows'][loop.index]['row_id']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
${top_stat['rows'][loop.index]['title']}
|
${top_stat['rows'][loop.index]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h5>
|
</h5>
|
||||||
|
@ -743,10 +743,10 @@ DOCUMENTATION :: END
|
||||||
<div class="home-platforms-instance-list-last-user">
|
<div class="home-platforms-instance-list-last-user">
|
||||||
<h5>
|
<h5>
|
||||||
% if top_stat['rows'][loop.index]['user_id']:
|
% if top_stat['rows'][loop.index]['user_id']:
|
||||||
<a href="user?user_id=${top_stat['rows'][loop.index]['user_id']}">
|
<a href="user?user_id=${top_stat['rows'][loop.index]['user_id']}" title="${top_stat['rows'][loop.index]['friendly_name']}">
|
||||||
% else:
|
% else:
|
||||||
<a href="user?user=${top_stat['rows'][loop.index]['user']}">
|
<a href="user?user=${top_stat['rows'][loop.index]['user']}" title="${top_stat['rows'][loop.index]['friendly_name']}">
|
||||||
% endif
|
% endif
|
||||||
${top_stat['rows'][loop.index]['friendly_name']}
|
${top_stat['rows'][loop.index]['friendly_name']}
|
||||||
</a>
|
</a>
|
||||||
</h5>
|
</h5>
|
||||||
|
@ -759,7 +759,7 @@ DOCUMENTATION :: END
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?source=history&item_id=${top_stat['rows'][loop.index]['row_id']}">
|
<a href="info?source=history&item_id=${top_stat['rows'][loop.index]['row_id']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
% if top_stat['rows'][loop.index]['thumb']:
|
% if top_stat['rows'][loop.index]['thumb']:
|
||||||
<div class="home-platforms-instance-list-poster">
|
<div class="home-platforms-instance-list-poster">
|
||||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||||
|
|
BIN
data/interfaces/default/images/plex-logo-light-small.png
Normal file
BIN
data/interfaces/default/images/plex-logo-light-small.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 1.4 KiB |
|
@ -11,27 +11,40 @@ data :: Usable parameters (if not applicable for media type, blank value will be
|
||||||
|
|
||||||
== Global keys ==
|
== Global keys ==
|
||||||
rating_key Returns the unique identifier for the media item.
|
rating_key Returns the unique identifier for the media item.
|
||||||
type Returns the type of media. Either 'movie', 'episode' or 'show' or 'season'.
|
type Returns the type of media. Either 'movie', 'show', 'season', 'episode', 'artist', 'album', or 'track'.
|
||||||
art Returns the location of the item's artwork
|
art Returns the location of the item's artwork
|
||||||
title Returns the name of the episode, show, season or movie.
|
title Returns the name of the movie, show, episode, artist, album, or track.
|
||||||
duration Returns the standard runtime of the media.
|
duration Returns the standard runtime of the media.
|
||||||
content_rating Returns the age rating for the media.
|
content_rating Returns the age rating for the media.
|
||||||
summary Returns a brief description of the media plot.
|
summary Returns a brief description of the media plot.
|
||||||
grandparent_title Returns the name of the TV show.
|
grandparent_title Returns the name of the show, or artist.
|
||||||
parent_index Returns the season number of the TV show.
|
parent_index Returns the index number of the season.
|
||||||
index Returns the episode number.
|
index Returns the index number of the episode, or track.
|
||||||
parent_thumb Returns the location of the item's thumbnail. Use with pms_image_proxy.
|
parent_thumb Returns the location of the item's thumbnail. Use with pms_image_proxy.
|
||||||
writers Returns an array of writers.
|
writers Returns an array of writers.
|
||||||
thumb Returns the location of the item's thumbnail. Use with pms_image_proxy.
|
thumb Returns the location of the item's thumbnail. Use with pms_image_proxy.
|
||||||
parent_title Returns the name of the TV show.
|
parent_title Returns the name of the show, or artist.
|
||||||
rating Returns the 5 star rating value for the movie. Between 1 and 5.
|
rating Returns the 5 star rating value for the movie. Between 1 and 5.
|
||||||
year Returns the release year of the movie.
|
year Returns the release year of the movie, or show.
|
||||||
genres Returns an array of genres.
|
genres Returns an array of genres.
|
||||||
actors Returns an array of actors.
|
actors Returns an array of actors.
|
||||||
directors Returns an array of directors.
|
directors Returns an array of directors.
|
||||||
studio Returns the name of the studio.
|
studio Returns the name of the studio.
|
||||||
originally_available_at Returns the air date of the item.
|
originally_available_at Returns the air date of the item.
|
||||||
|
|
||||||
|
query :: Usable parameters
|
||||||
|
|
||||||
|
== Global keys ==
|
||||||
|
query_string Returns the string used for the search query.
|
||||||
|
title Returns the name of the movie, episode, or track.
|
||||||
|
parent_title Returns the name of the album.
|
||||||
|
grandparent_title Returns the name of the show, or artist.
|
||||||
|
media_index Returns the index number of the episode, or track.
|
||||||
|
parent_media_index Returns the index number of the season.
|
||||||
|
year Returns the release year of the movie, or show.
|
||||||
|
media_type Returns the type of media. Either 'movie', 'show', 'season', 'episode', 'artist', 'album', or 'track'.
|
||||||
|
rating_key Returns the unique identifier for the media item.
|
||||||
|
|
||||||
DOCUMENTATION :: END
|
DOCUMENTATION :: END
|
||||||
</%doc>
|
</%doc>
|
||||||
|
|
||||||
|
@ -261,7 +274,7 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class='table-card-back'>
|
<div class='table-card-back'>
|
||||||
<div id="children-list"></div>
|
<div id="children-list"><i class="fa fa-refresh fa-spin"></i> Loading season list...</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
% elif data['type'] == 'season':
|
% elif data['type'] == 'season':
|
||||||
|
@ -272,7 +285,7 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class='table-card-back'>
|
<div class='table-card-back'>
|
||||||
<div id="children-list"></div>
|
<div id="children-list"><i class="fa fa-refresh fa-spin"></i> Loading episode list...</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
% elif data['type'] == 'artist':
|
% elif data['type'] == 'artist':
|
||||||
|
@ -283,7 +296,7 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class='table-card-back'>
|
<div class='table-card-back'>
|
||||||
<div id="children-list"></div>
|
<div id="children-list"><i class="fa fa-refresh fa-spin"></i> Loading album list...</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
% elif data['type'] == 'album':
|
% elif data['type'] == 'album':
|
||||||
|
@ -294,7 +307,7 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class='table-card-back'>
|
<div class='table-card-back'>
|
||||||
<div id="children-list"></div>
|
<div id="children-list"><i class="fa fa-refresh fa-spin"></i> Loading track list...</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
% endif
|
% endif
|
||||||
|
@ -361,11 +374,122 @@ DOCUMENTATION :: END
|
||||||
% else:
|
% else:
|
||||||
<div class="container-fluid">
|
<div class="container-fluid">
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col-md-10">
|
<div class="summary-container">
|
||||||
<h3>
|
<div class="summary-navbar">
|
||||||
Error retrieving item data. This media may not be available in the Plex Media Server database
|
<div class="col-md-12">
|
||||||
anymore.
|
<div class="summary-navbar-list">
|
||||||
</h3>
|
% if query:
|
||||||
|
% if query['media_type'] == 'movie':
|
||||||
|
<span>Movies</span>
|
||||||
|
<span class="hidden-xs hidden-sm"><i class="fa fa-chevron-right"></i></span>
|
||||||
|
<span>${query['title']}</span>
|
||||||
|
% elif query['media_type'] == 'show':
|
||||||
|
<span>TV Shows</span>
|
||||||
|
<span class="hidden-xs hidden-sm"><i class="fa fa-chevron-right"></i></span>
|
||||||
|
<span>${query['grandparent_title']}</span>
|
||||||
|
% elif query['media_type'] == 'season':
|
||||||
|
<span class="hidden-xs hidden-sm">TV Shows</span>
|
||||||
|
<span class="hidden-xs hidden-sm"><i class="fa fa-chevron-right"></i></span>
|
||||||
|
<span class="hidden-xs hidden-sm">${query['grandparent_title']}</span>
|
||||||
|
<span class="hidden-xs hidden-sm"><i class="fa fa-chevron-right"></i></span>
|
||||||
|
<span>Season ${query['parent_media_index']}</span>
|
||||||
|
% elif query['media_type'] == 'episode':
|
||||||
|
<span class="hidden-xs hidden-sm">TV Shows</span>
|
||||||
|
<span class="hidden-xs hidden-sm"><i class="fa fa-chevron-right"></i></span>
|
||||||
|
<span class="hidden-xs hidden-sm">${query['grandparent_title']}</span>
|
||||||
|
<span class="hidden-xs hidden-sm"><i class="fa fa-chevron-right"></i></span>
|
||||||
|
<span>Season ${query['parent_media_index']}</span>
|
||||||
|
<span><i class="fa fa-chevron-right"></i></span>
|
||||||
|
<span>Episode ${query['media_index']} - ${query['title']}</span>
|
||||||
|
% elif query['media_type'] == 'artist':
|
||||||
|
<span>Music</span>
|
||||||
|
<span><i class="fa fa-chevron-right"></i></span>
|
||||||
|
<span>${query['grandparent_title']}</span>
|
||||||
|
% elif query['media_type'] == 'album':
|
||||||
|
<span class="hidden-xs hidden-sm">Music</span>
|
||||||
|
<span class="hidden-xs hidden-sm"><i class="fa fa-chevron-right"></i></span>
|
||||||
|
<span>${query['grandparent_title']}</span>
|
||||||
|
<span><i class="fa fa-chevron-right"></i></span>
|
||||||
|
<span>${query['parent_title']}</span>
|
||||||
|
% elif query['media_type'] == 'track':
|
||||||
|
<span class="hidden-xs hidden-sm">Music</span>
|
||||||
|
<span class="hidden-xs hidden-sm"><i class="fa fa-chevron-right"></i></span>
|
||||||
|
<span class="hidden-xs hidden-sm">${query['grandparent_title']}</span>
|
||||||
|
<span class="hidden-xs hidden-sm"><i class="fa fa-chevron-right"></i></span>
|
||||||
|
<span>${query['parent_title']}</span>
|
||||||
|
<span><i class="fa fa-chevron-right"></i></span>
|
||||||
|
<span>Track ${query['media_index']} - ${query['title']}</span>
|
||||||
|
% endif
|
||||||
|
% endif
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="summary-content-title-wrapper">
|
||||||
|
<div class="col-md-12">
|
||||||
|
<h4 style="text-align: center; margin-bottom: 20px;">
|
||||||
|
Error retrieving item metadata. This media item is not available in the Plex Media Server library.
|
||||||
|
</h4>
|
||||||
|
% if query:
|
||||||
|
<h4 style="text-align: center; margin-bottom: 20px;">
|
||||||
|
If the item has been moved, please select the correct match below to update the PlexPy database.
|
||||||
|
</h4>
|
||||||
|
% endif
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="summary-content-wrapper">
|
||||||
|
<div class='col-md-12'>
|
||||||
|
% if query:
|
||||||
|
<div class='table-card-header'>
|
||||||
|
<div class="header-bar">
|
||||||
|
<span>Search Results for <strong>${query['query_string']}</strong></span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class='table-card-back'>
|
||||||
|
<div id="search-results-list"><i class="fa fa-refresh fa-spin"></i> Loading search results...</div>
|
||||||
|
</div>
|
||||||
|
<div class="modal fade" id="confirm-modal" tabindex="-1" role="dialog" aria-labelledby="confirm-modal">
|
||||||
|
<div class="modal-dialog">
|
||||||
|
<div class="modal-content">
|
||||||
|
<div class="modal-header">
|
||||||
|
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||||
|
<h4 class="modal-title" id="myModalLabel">Confirm Update</h4>
|
||||||
|
</div>
|
||||||
|
<div class="modal-body" style="text-align: center;">
|
||||||
|
<p>Are you REALLY sure you want to replace
|
||||||
|
<p><strong>
|
||||||
|
% if query['media_type'] == 'movie':
|
||||||
|
${query['title']}<br />${query['year']}
|
||||||
|
% elif query['media_type'] == 'show':
|
||||||
|
${query['grandparent_title']}
|
||||||
|
% elif query['media_type'] == 'season':
|
||||||
|
${query['grandparent_title']}<br />S${query['parent_media_index']}
|
||||||
|
% elif query['media_type'] == 'episode':
|
||||||
|
${query['grandparent_title']}<br />${query['title']}<br />S${query['parent_media_index']} · E${query['media_index']}
|
||||||
|
% elif query['media_type'] == 'artist':
|
||||||
|
${query['grandparent_title']}
|
||||||
|
% elif query['media_type'] == 'album':
|
||||||
|
${query['grandparent_title']}<br />${query['parent_title']}
|
||||||
|
% elif query['media_type'] == 'track':
|
||||||
|
${query['grandparent_title']}<br />${query['title']}<br />${query['parent_title']}
|
||||||
|
% endif
|
||||||
|
</strong></p>
|
||||||
|
<p> with </p>
|
||||||
|
<p><span id="new_title"></span></p>
|
||||||
|
% if query['media_type'] != 'movie':
|
||||||
|
<p>All items for <strong>${query['grandparent_title']}</strong> will also be updated.</p>
|
||||||
|
% endif
|
||||||
|
<p>This is permanent and cannot be undone!</p>
|
||||||
|
</div>
|
||||||
|
<div class="modal-footer">
|
||||||
|
<button type="button" class="btn btn-dark" data-dismiss="modal">Cancel</button>
|
||||||
|
<button type="button" class="btn btn-danger btn-ok" data-dismiss="modal" id="confirm-update">Update</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
% endif
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -381,13 +505,6 @@ DOCUMENTATION :: END
|
||||||
<script src="interfaces/default/js/moment-with-locale.js"></script>
|
<script src="interfaces/default/js/moment-with-locale.js"></script>
|
||||||
|
|
||||||
% if data:
|
% if data:
|
||||||
% if data['type'] == 'movie' or data['type'] == 'show' or data['type'] == 'episode':
|
|
||||||
<script>
|
|
||||||
// Convert rating to 5 star rating type
|
|
||||||
var starRating = Math.round(${data['rating']} / 2);
|
|
||||||
$('#stars').attr('data-rateit-value', starRating);
|
|
||||||
</script>
|
|
||||||
% endif
|
|
||||||
<script src="interfaces/default/js/tables/history_table.js"></script>
|
<script src="interfaces/default/js/tables/history_table.js"></script>
|
||||||
% if data['type'] == 'show' or data['type'] == 'artist':
|
% if data['type'] == 'show' or data['type'] == 'artist':
|
||||||
<script>
|
<script>
|
||||||
|
@ -488,10 +605,57 @@ DOCUMENTATION :: END
|
||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
% endif
|
% endif
|
||||||
|
% if data['rating']:
|
||||||
|
<script>
|
||||||
|
// Convert rating to 5 star rating type
|
||||||
|
var starRating = Math.round(${data['rating']} / 2);
|
||||||
|
$('#stars').attr('data-rateit-value', starRating);
|
||||||
|
</script>
|
||||||
|
% endif
|
||||||
<script>
|
<script>
|
||||||
$("#airdate").html(moment($("#airdate").text()).format('MMM DD, YYYY'));
|
$("#airdate").html(moment($("#airdate").text()).format('MMM DD, YYYY'));
|
||||||
$("#runtime").html(millisecondsToMinutes($("#runtime").text(), true));
|
$("#runtime").html(millisecondsToMinutes($("#runtime").text(), true));
|
||||||
$('div.art-face').animate({ opacity: 0.2 }, { duration: 1000 });
|
$('div.art-face').animate({ opacity: 0.2 }, { duration: 1000 });
|
||||||
</script>
|
</script>
|
||||||
|
% elif query:
|
||||||
|
<script>
|
||||||
|
$.ajax({
|
||||||
|
url: 'get_search_results_children',
|
||||||
|
type: "GET",
|
||||||
|
async: true,
|
||||||
|
data: {'query': "${query['query_string']}",
|
||||||
|
'media_type': "${query['media_type']}",
|
||||||
|
'season_index': "${query['parent_media_index']}"
|
||||||
|
},
|
||||||
|
complete: function(xhr, status) {
|
||||||
|
$("#search-results-list").html(xhr.responseText); }
|
||||||
|
});
|
||||||
|
$(document).on('click', '#search-results-list a', function (e) {
|
||||||
|
e.preventDefault();
|
||||||
|
var new_rating_key = $(this).attr("id");
|
||||||
|
var new_href = $(this).attr("href");
|
||||||
|
|
||||||
|
$('#new_title').html($(this).find('.item-children-instance-text-wrapper').html());
|
||||||
|
|
||||||
|
$('#confirm-modal').modal();
|
||||||
|
$('#confirm-modal').one('click', '#confirm-update', function () {
|
||||||
|
$(this).prop('disabled', true);
|
||||||
|
var msg = "<i class='fa fa-refresh fa-spin'></i> Updating database..."
|
||||||
|
showMsg(msg, false, false, 0)
|
||||||
|
|
||||||
|
$.ajax({
|
||||||
|
url: 'update_history_rating_key',
|
||||||
|
data: { old_rating_key: "${query['rating_key']}",
|
||||||
|
new_rating_key: new_rating_key,
|
||||||
|
media_type: "${query['media_type']}"
|
||||||
|
},
|
||||||
|
async: true,
|
||||||
|
success: function (data) {
|
||||||
|
window.location.href = new_href;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
</script>
|
||||||
% endif
|
% endif
|
||||||
</%def>
|
</%def>
|
||||||
|
|
|
@ -63,14 +63,14 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="item-children-instance-text-wrapper episode-item">
|
<div class="item-children-instance-text-wrapper episode-item">
|
||||||
<h3>${child['title']}</h3>
|
<h3 title="${child['title']}">${child['title']}</h3>
|
||||||
</div>
|
</div>
|
||||||
% elif data['children_type'] == 'album':
|
% elif data['children_type'] == 'album':
|
||||||
<div class="item-children-poster">
|
<div class="item-children-poster">
|
||||||
<div class="item-children-poster-face album-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=300);"></div>
|
<div class="item-children-poster-face album-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=300);"></div>
|
||||||
</div>
|
</div>
|
||||||
<div class="item-children-instance-text-wrapper album-item">
|
<div class="item-children-instance-text-wrapper album-item">
|
||||||
<h3>${child['title']}</h3>
|
<h3 title="${child['title']}">${child['title']}</h3>
|
||||||
</div>
|
</div>
|
||||||
% elif data['children_type'] == 'track':
|
% elif data['children_type'] == 'track':
|
||||||
% if loop.index % 2 == 0:
|
% if loop.index % 2 == 0:
|
||||||
|
|
224
data/interfaces/default/info_search_results_list.html
Normal file
224
data/interfaces/default/info_search_results_list.html
Normal file
|
@ -0,0 +1,224 @@
|
||||||
|
<%doc>
|
||||||
|
USAGE DOCUMENTATION :: PLEASE LEAVE THIS AT THE TOP OF THIS FILE
|
||||||
|
|
||||||
|
For Mako templating syntax documentation please visit: http://docs.makotemplates.org/en/latest/
|
||||||
|
|
||||||
|
Filename: info_children_list.html
|
||||||
|
Version: 0.1
|
||||||
|
Variable names: data [list]
|
||||||
|
|
||||||
|
data :: Usable parameters
|
||||||
|
|
||||||
|
== Global keys ==
|
||||||
|
results_count Returns the number of search results.
|
||||||
|
results_list Returns a dictionary of search result types.
|
||||||
|
|
||||||
|
data['results_list'] :: Usable paramaters
|
||||||
|
|
||||||
|
== media_type keys ==
|
||||||
|
movie Returns an array of movie results
|
||||||
|
show Returns an array of show results
|
||||||
|
season Returns an array of season results
|
||||||
|
episode Returns an array of episode results
|
||||||
|
artist Returns an array of artist results
|
||||||
|
album Returns an array of album results
|
||||||
|
track Returns an array of track results
|
||||||
|
|
||||||
|
data['results_list'][media_type] :: Usable paramaters
|
||||||
|
|
||||||
|
== Global keys ==
|
||||||
|
rating_key Returns the unique identifier for the media item.
|
||||||
|
type Returns the type of media. Either 'movie', 'show', 'season', 'episode', 'artist', 'album', or 'track'.
|
||||||
|
art Returns the location of the item's artwork
|
||||||
|
title Returns the name of the movie, show, episode, artist, album, or track.
|
||||||
|
duration Returns the standard runtime of the media.
|
||||||
|
content_rating Returns the age rating for the media.
|
||||||
|
summary Returns a brief description of the media plot.
|
||||||
|
grandparent_title Returns the name of the show, or artist.
|
||||||
|
parent_index Returns the index number of the season.
|
||||||
|
index Returns the index number of the episode, or track.
|
||||||
|
parent_thumb Returns the location of the item's thumbnail. Use with pms_image_proxy.
|
||||||
|
writers Returns an array of writers.
|
||||||
|
thumb Returns the location of the item's thumbnail. Use with pms_image_proxy.
|
||||||
|
parent_title Returns the name of the show, or artist.
|
||||||
|
rating Returns the 5 star rating value for the movie. Between 1 and 5.
|
||||||
|
year Returns the release year of the movie, or show.
|
||||||
|
genres Returns an array of genres.
|
||||||
|
actors Returns an array of actors.
|
||||||
|
directors Returns an array of directors.
|
||||||
|
studio Returns the name of the studio.
|
||||||
|
originally_available_at Returns the air date of the item.
|
||||||
|
|
||||||
|
DOCUMENTATION :: END
|
||||||
|
</%doc>
|
||||||
|
|
||||||
|
% if data != None:
|
||||||
|
% if data['results_count'] > 0:
|
||||||
|
% if 'movie' in data['results_list'] and data['results_list']['movie']:
|
||||||
|
<div class="item-children-wrapper">
|
||||||
|
<div class="item-children-section-title">
|
||||||
|
<h4>Movies</h4>
|
||||||
|
</div>
|
||||||
|
<ul class="item-children-instance list-unstyled">
|
||||||
|
% for child in data['results_list']['movie']:
|
||||||
|
<li>
|
||||||
|
<a href="info?item_id=${child['rating_key']}" id="${child['rating_key']}">
|
||||||
|
<div class="item-children-poster">
|
||||||
|
<div class="item-children-poster-face season-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=450);"></div>
|
||||||
|
</div>
|
||||||
|
<div class="item-children-instance-text-wrapper season-item">
|
||||||
|
<h3 title="${child['title']}">${child['title']}</h3>
|
||||||
|
<h3 class="text-muted">${child['year']}</h3>
|
||||||
|
</div>
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
% endfor
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
% endif
|
||||||
|
% if 'show' in data['results_list'] and data['results_list']['show']:
|
||||||
|
<div class="item-children-wrapper">
|
||||||
|
<div class="item-children-section-title">
|
||||||
|
<h4>TV Shows</h4>
|
||||||
|
</div>
|
||||||
|
<ul class="item-children-instance list-unstyled">
|
||||||
|
% for child in data['results_list']['show']:
|
||||||
|
<li>
|
||||||
|
<a href="info?item_id=${child['rating_key']}" id="${child['rating_key']}">
|
||||||
|
<div class="item-children-poster">
|
||||||
|
<div class="item-children-poster-face season-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=450);"></div>
|
||||||
|
</div>
|
||||||
|
<div class="item-children-instance-text-wrapper season-item">
|
||||||
|
<h3 title="${child['title']}">${child['title']}</h3>
|
||||||
|
<h3 class="text-muted">${child['year']}</h3>
|
||||||
|
</div>
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
% endfor
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
% endif
|
||||||
|
% if 'season' in data['results_list'] and data['results_list']['season']:
|
||||||
|
<div class="item-children-wrapper">
|
||||||
|
<div class="item-children-section-title">
|
||||||
|
<h4>Seasons</h4>
|
||||||
|
</div>
|
||||||
|
<ul class="item-children-instance list-unstyled">
|
||||||
|
% for child in data['results_list']['season']:
|
||||||
|
<li>
|
||||||
|
<a href="info?item_id=${child['rating_key']}" id="${child['rating_key']}">
|
||||||
|
<div class="item-children-poster">
|
||||||
|
<div class="item-children-poster-face season-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=450);"></div>
|
||||||
|
</div>
|
||||||
|
<div class="item-children-instance-text-wrapper season-item">
|
||||||
|
<h3 title="${child['parent_title']}">${child['parent_title']}</h3>
|
||||||
|
<h3 class="text-muted">S${child['index']}</h3>
|
||||||
|
</div>
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
% endfor
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
% endif
|
||||||
|
% if 'episode' in data['results_list'] and data['results_list']['episode']:
|
||||||
|
<div class="item-children-wrapper">
|
||||||
|
<div class="item-children-section-title">
|
||||||
|
<h4>Episodes</h4>
|
||||||
|
</div>
|
||||||
|
<ul class="item-children-instance list-unstyled">
|
||||||
|
% for child in data['results_list']['episode']:
|
||||||
|
<li>
|
||||||
|
<a href="info?item_id=${child['rating_key']}" id="${child['rating_key']}">
|
||||||
|
<div class="item-children-poster">
|
||||||
|
<div class="item-children-poster-face episode-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=450);"></div>
|
||||||
|
</div>
|
||||||
|
<div class="item-children-instance-text-wrapper episode-item">
|
||||||
|
<h3 title="${child['grandparent_title']}">${child['grandparent_title']}</h3>
|
||||||
|
<h3 title="${child['title']}">${child['title']}</h3>
|
||||||
|
<h3 class="text-muted">S${child['parent_index']} · E${child['index']}</h3>
|
||||||
|
</div>
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
% endfor
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
% endif
|
||||||
|
% if 'artist' in data['results_list'] and data['results_list']['artist']:
|
||||||
|
<div class="item-children-wrapper">
|
||||||
|
<div class="item-children-section-title">
|
||||||
|
<h4>Artists</h4>
|
||||||
|
</div>
|
||||||
|
<ul class="item-children-instance list-unstyled">
|
||||||
|
% for child in data['results_list']['artist']:
|
||||||
|
<li>
|
||||||
|
<a href="info?item_id=${child['rating_key']}" id="${child['rating_key']}">
|
||||||
|
<div class="item-children-poster">
|
||||||
|
<div class="item-children-poster-face album-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=300);"></div>
|
||||||
|
</div>
|
||||||
|
<div class="item-children-instance-text-wrapper album-item">
|
||||||
|
<h3 title="${child['title']}">${child['title']}</h3>
|
||||||
|
</div>
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
% endfor
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
% endif
|
||||||
|
% if 'album' in data['results_list'] and data['results_list']['album']:
|
||||||
|
<div class="item-children-wrapper">
|
||||||
|
<div class="item-children-section-title">
|
||||||
|
<h4>Albums</h4>
|
||||||
|
</div>
|
||||||
|
<ul class="item-children-instance list-unstyled">
|
||||||
|
% for child in data['results_list']['album']:
|
||||||
|
<li>
|
||||||
|
<a href="info?item_id=${child['rating_key']}" id="${child['rating_key']}">
|
||||||
|
<div class="item-children-poster">
|
||||||
|
<div class="item-children-poster-face album-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=300);"></div>
|
||||||
|
</div>
|
||||||
|
<div class="item-children-instance-text-wrapper album-item">
|
||||||
|
<h3 title="${child['parent_title']}">${child['parent_title']}</h3>
|
||||||
|
<h3 title="${child['title']}">${child['title']}</h3>
|
||||||
|
</div>
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
% endfor
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
% endif
|
||||||
|
% if 'track' in data['results_list'] and data['results_list']['track']:
|
||||||
|
<div class="item-children-wrapper">
|
||||||
|
<div class="item-children-section-title">
|
||||||
|
<h4>Tracks</h4>
|
||||||
|
</div>
|
||||||
|
<ul class="item-children-instance list-unstyled">
|
||||||
|
% for child in data['results_list']['track']:
|
||||||
|
<li>
|
||||||
|
<a href="info?item_id=${child['rating_key']}" id="${child['rating_key']}">
|
||||||
|
<div class="item-children-poster">
|
||||||
|
<div class="item-children-poster-face album-poster" style="background-image: url(pms_image_proxy?img=${child['parent_thumb']}&width=300&height=300);">
|
||||||
|
<div class="item-children-card-overlay">
|
||||||
|
<div class="item-children-overlay-text">
|
||||||
|
Track ${child['index']}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="item-children-instance-text-wrapper album-item">
|
||||||
|
<h3 title="${child['grandparent_title']}">${child['grandparent_title']}</h3>
|
||||||
|
<h3 title="${child['title']}">${child['title']}</h3>
|
||||||
|
<h3 title="${child['parent_title']}" class="text-muted">${child['parent_title']}</h3>
|
||||||
|
</div>
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
% endfor
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
% endif
|
||||||
|
% else:
|
||||||
|
<div class="item-children-wrapper">
|
||||||
|
No search results found.
|
||||||
|
</div>
|
||||||
|
% endif
|
||||||
|
% endif
|
||||||
|
|
|
@ -213,11 +213,11 @@ function getPlatformImagePath(platformName) {
|
||||||
return 'interfaces/default/images/platforms/opera.png';
|
return 'interfaces/default/images/platforms/opera.png';
|
||||||
} else if (platformName.indexOf("KODI") > -1) {
|
} else if (platformName.indexOf("KODI") > -1) {
|
||||||
return 'interfaces/default/images/platforms/kodi.png';
|
return 'interfaces/default/images/platforms/kodi.png';
|
||||||
} else if (platformName.indexOf("Mystery 3") > -1) {
|
} else if (platformName.indexOf("Playstation 3") > -1) {
|
||||||
return 'interfaces/default/images/platforms/playstation.png';
|
return 'interfaces/default/images/platforms/playstation.png';
|
||||||
} else if (platformName.indexOf("Mystery 4") > -1) {
|
} else if (platformName.indexOf("Playstation 4") > -1) {
|
||||||
return 'interfaces/default/images/platforms/playstation.png';
|
return 'interfaces/default/images/platforms/playstation.png';
|
||||||
} else if (platformName.indexOf("Mystery 5") > -1) {
|
} else if (platformName.indexOf("Xbox 360") > -1) {
|
||||||
return 'interfaces/default/images/platforms/xbox.png';
|
return 'interfaces/default/images/platforms/xbox.png';
|
||||||
} else if (platformName.indexOf("Windows") > -1) {
|
} else if (platformName.indexOf("Windows") > -1) {
|
||||||
return 'interfaces/default/images/platforms/win8.png';
|
return 'interfaces/default/images/platforms/win8.png';
|
||||||
|
|
|
@ -46,13 +46,18 @@ history_table_options = {
|
||||||
"createdCell": function (td, cellData, rowData, row, col) {
|
"createdCell": function (td, cellData, rowData, row, col) {
|
||||||
if (rowData['stopped'] === null) {
|
if (rowData['stopped'] === null) {
|
||||||
$(td).html('Currently watching...');
|
$(td).html('Currently watching...');
|
||||||
|
} else if (rowData['group_count'] > 1) {
|
||||||
|
date = moment(cellData, "X").format(date_format);
|
||||||
|
expand_history = '<span class="expand-history-tooltip" data-toggle="tooltip" title="Show Detailed History"><i class="fa fa-plus-circle fa-fw"></i></span>';
|
||||||
|
$(td).html('<div><a href="#"><div style="float: left;">' + expand_history + ' ' + date + '</div></a></div>');
|
||||||
} else {
|
} else {
|
||||||
$(td).html(moment(cellData,"X").format(date_format));
|
date = moment(cellData, "X").format(date_format);
|
||||||
|
$(td).html('<div style="float: left;"><i class="fa fa-fw"></i> ' + date + '</div>');
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"searchable": false,
|
"searchable": false,
|
||||||
"width": "8%",
|
"width": "8%",
|
||||||
"className": "no-wrap"
|
"className": "no-wrap expand-history"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"targets": [2],
|
"targets": [2],
|
||||||
|
@ -83,7 +88,8 @@ history_table_options = {
|
||||||
$(td).html('n/a');
|
$(td).html('n/a');
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
$(td).html('<a href="javascript:void(0)" data-toggle="modal" data-target="#ip-info-modal"><i class="fa fa-map-marker"></i> ' + cellData + '</a>');
|
external_ip = '<span class="external-ip-tooltip" data-toggle="tooltip" title="External IP"><i class="fa fa-map-marker fa-fw"></i></span>';
|
||||||
|
$(td).html('<a href="javascript:void(0)" data-toggle="modal" data-target="#ip-info-modal">'+ external_ip + cellData + '</a>');
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
$(td).html('n/a');
|
$(td).html('n/a');
|
||||||
|
@ -98,14 +104,14 @@ history_table_options = {
|
||||||
"createdCell": function (td, cellData, rowData, row, col) {
|
"createdCell": function (td, cellData, rowData, row, col) {
|
||||||
if (cellData !== '') {
|
if (cellData !== '') {
|
||||||
var transcode_dec = '';
|
var transcode_dec = '';
|
||||||
if (rowData['video_decision'] === 'transcode') {
|
if (rowData['video_decision'] === 'transcode' || rowData['audio_decision'] === 'transcode') {
|
||||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Transcode"><i class="fa fa-server fa-fw"></i></span>';
|
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Transcode"><i class="fa fa-server fa-fw"></i></span>';
|
||||||
} else if (rowData['video_decision'] === 'copy') {
|
} else if (rowData['video_decision'] === 'copy' || rowData['audio_decision'] === 'copy') {
|
||||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Stream"><i class="fa fa-video-camera fa-fw"></i></span>';
|
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Stream"><i class="fa fa-video-camera fa-fw"></i></span>';
|
||||||
} else if (rowData['video_decision'] === 'direct play' || rowData['video_decision'] === '') {
|
} else if (rowData['video_decision'] === 'direct play' || rowData['audio_decision'] === 'direct play') {
|
||||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Play"><i class="fa fa-play-circle fa-fw"></i></span>';
|
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Play"><i class="fa fa-play-circle fa-fw"></i></span>';
|
||||||
}
|
}
|
||||||
$(td).html('<div><a href="#" data-target="#info-modal" data-toggle="modal"><div style="float: left;">' + transcode_dec + ' ' + cellData + '</div></a></div>');
|
$(td).html('<div><a href="#" data-target="#info-modal" data-toggle="modal"><div style="float: left;">' + transcode_dec + ' ' + cellData + '</div></a></div>');
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"width": "15%",
|
"width": "15%",
|
||||||
|
@ -121,16 +127,16 @@ history_table_options = {
|
||||||
if (rowData['media_type'] === 'movie') {
|
if (rowData['media_type'] === 'movie') {
|
||||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Movie"><i class="fa fa-film fa-fw"></i></span>';
|
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Movie"><i class="fa fa-film fa-fw"></i></span>';
|
||||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120">' + cellData + ' (' + rowData['year'] + ')</span>'
|
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120">' + cellData + ' (' + rowData['year'] + ')</span>'
|
||||||
$(td).html('<div class="history-title"><a href="info?source=history&item_id=' + rowData['id'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
$(td).html('<div class="history-title"><a href="info?source=history&item_id=' + rowData['id'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||||
} else if (rowData['media_type'] === 'episode') {
|
} else if (rowData['media_type'] === 'episode') {
|
||||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Episode"><i class="fa fa-television fa-fw"></i></span>';
|
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Episode"><i class="fa fa-television fa-fw"></i></span>';
|
||||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120">' + cellData + ' \
|
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120">' + cellData + ' \
|
||||||
(S' + rowData['parent_media_index'] + '· E' + rowData['media_index'] + ')</span>'
|
(S' + rowData['parent_media_index'] + '· E' + rowData['media_index'] + ')</span>'
|
||||||
$(td).html('<div class="history-title"><a href="info?source=history&item_id=' + rowData['id'] + '"><div style="float: left;" >' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
$(td).html('<div class="history-title"><a href="info?source=history&item_id=' + rowData['id'] + '"><div style="float: left;" >' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||||
} else if (rowData['media_type'] === 'track') {
|
} else if (rowData['media_type'] === 'track') {
|
||||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Track"><i class="fa fa-music fa-fw"></i></span>';
|
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Track"><i class="fa fa-music fa-fw"></i></span>';
|
||||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=300&fallback=poster" data-height="80">' + cellData + ' (' + rowData['parent_title'] + ')</span>'
|
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=300&fallback=poster" data-height="80">' + cellData + ' (' + rowData['parent_title'] + ')</span>'
|
||||||
$(td).html('<div class="history-title"><a href="info?source=history&item_id=' + rowData['id'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
$(td).html('<div class="history-title"><a href="info?source=history&item_id=' + rowData['id'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||||
} else {
|
} else {
|
||||||
$(td).html('<a href="info?item_id=' + rowData['id'] + '">' + cellData + '</a>');
|
$(td).html('<a href="info?item_id=' + rowData['id'] + '">' + cellData + '</a>');
|
||||||
}
|
}
|
||||||
|
@ -155,7 +161,7 @@ history_table_options = {
|
||||||
{
|
{
|
||||||
"targets": [7],
|
"targets": [7],
|
||||||
"data":"paused_counter",
|
"data":"paused_counter",
|
||||||
"render": function ( data, type, full ) {
|
"render": function (data, type, full) {
|
||||||
if (data !== null) {
|
if (data !== null) {
|
||||||
return Math.round(moment.duration(data, 'seconds').as('minutes')) + ' mins';
|
return Math.round(moment.duration(data, 'seconds').as('minutes')) + ' mins';
|
||||||
} else {
|
} else {
|
||||||
|
@ -183,7 +189,7 @@ history_table_options = {
|
||||||
{
|
{
|
||||||
"targets": [9],
|
"targets": [9],
|
||||||
"data":"duration",
|
"data":"duration",
|
||||||
"render": function ( data, type, full ) {
|
"render": function (data, type, full) {
|
||||||
if (data !== null) {
|
if (data !== null) {
|
||||||
return Math.round(moment.duration(data, 'seconds').as('minutes')) + ' mins';
|
return Math.round(moment.duration(data, 'seconds').as('minutes')) + ' mins';
|
||||||
} else {
|
} else {
|
||||||
|
@ -196,11 +202,11 @@ history_table_options = {
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"targets": [10],
|
"targets": [10],
|
||||||
"data":"percent_complete",
|
"data": "watched_status",
|
||||||
"render": function ( data, type, full ) {
|
"render": function (data, type, full) {
|
||||||
if (data > 80) {
|
if (data == 1) {
|
||||||
return '<span class="watched-tooltip" data-toggle="tooltip" title="Watched"><i class="fa fa-lg fa-circle"></i></span>'
|
return '<span class="watched-tooltip" data-toggle="tooltip" title="Watched"><i class="fa fa-lg fa-circle"></i></span>'
|
||||||
} else if (data > 40) {
|
} else if (data == 0.5) {
|
||||||
return '<span class="watched-tooltip" data-toggle="tooltip" title="Partial"><i class="fa fa-lg fa-adjust fa-rotate-180"></i></span>'
|
return '<span class="watched-tooltip" data-toggle="tooltip" title="Partial"><i class="fa fa-lg fa-adjust fa-rotate-180"></i></span>'
|
||||||
} else {
|
} else {
|
||||||
return '<span class="watched-tooltip" data-toggle="tooltip" title="Unwatched"><i class="fa fa-lg fa-circle-o"></i></span>'
|
return '<span class="watched-tooltip" data-toggle="tooltip" title="Unwatched"><i class="fa fa-lg fa-circle-o"></i></span>'
|
||||||
|
@ -218,6 +224,8 @@ history_table_options = {
|
||||||
$('#ajaxMsg').fadeOut();
|
$('#ajaxMsg').fadeOut();
|
||||||
|
|
||||||
// Create the tooltips.
|
// Create the tooltips.
|
||||||
|
$('.expand-history-tooltip').tooltip({ container: 'body' });
|
||||||
|
$('.external-ip-tooltip').tooltip();
|
||||||
$('.transcode-tooltip').tooltip();
|
$('.transcode-tooltip').tooltip();
|
||||||
$('.media-type-tooltip').tooltip();
|
$('.media-type-tooltip').tooltip();
|
||||||
$('.watched-tooltip').tooltip();
|
$('.watched-tooltip').tooltip();
|
||||||
|
@ -231,24 +239,57 @@ history_table_options = {
|
||||||
});
|
});
|
||||||
|
|
||||||
if ($('#row-edit-mode').hasClass('active')) {
|
if ($('#row-edit-mode').hasClass('active')) {
|
||||||
$('.delete-control').each(function() {
|
$('.delete-control').each(function () {
|
||||||
$(this).removeClass('hidden');
|
$(this).removeClass('hidden');
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
history_table.rows().every(function () {
|
||||||
|
var rowData = this.data();
|
||||||
|
if (rowData['group_count'] != 1 && rowData['reference_id'] in history_child_table) {
|
||||||
|
// if grouped row and a child table was already created
|
||||||
|
$(this.node()).find('i.fa').toggleClass('fa-plus-circle').toggleClass('fa-minus-circle');
|
||||||
|
this.child(childTableFormat(rowData)).show();
|
||||||
|
createChildTable(this, rowData)
|
||||||
|
}
|
||||||
|
});
|
||||||
},
|
},
|
||||||
"preDrawCallback": function(settings) {
|
"preDrawCallback": function(settings) {
|
||||||
var msg = "<div class='msg'><i class='fa fa-refresh fa-spin'></i> Fetching rows...</div>";
|
var msg = "<i class='fa fa-refresh fa-spin'></i> Fetching rows...";
|
||||||
showMsg(msg, false, false, 0)
|
showMsg(msg, false, false, 0)
|
||||||
},
|
},
|
||||||
"rowCallback": function (row, rowData) {
|
"rowCallback": function (row, rowData, rowIndex) {
|
||||||
if ($.inArray(rowData['id'], history_to_delete) !== -1) {
|
if (rowData['group_count'] == 1) {
|
||||||
|
// if no grouped rows simply toggle the delete button
|
||||||
|
if ($.inArray(rowData['id'], history_to_delete) !== -1) {
|
||||||
|
$(row).find('button[data-id="' + rowData['id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger');
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// if grouped rows
|
||||||
|
// toggle the parent button to danger
|
||||||
$(row).find('button[data-id="' + rowData['id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger');
|
$(row).find('button[data-id="' + rowData['id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger');
|
||||||
|
// check if any child rows are not selected
|
||||||
|
var group_ids = rowData['group_ids'].split(',').map(Number);
|
||||||
|
group_ids.forEach(function (id) {
|
||||||
|
var index = $.inArray(id, history_to_delete);
|
||||||
|
if (index == -1) {
|
||||||
|
$(row).find('button[data-id="' + rowData['id'] + '"]').addClass('btn-warning').removeClass('btn-danger');
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (rowData['group_count'] != 1 && rowData['reference_id'] in history_child_table) {
|
||||||
|
// if grouped row and a child table was already created
|
||||||
|
$(row).addClass('shown')
|
||||||
|
history_table.row(row).child(childTableFormat(rowData)).show();
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
$('#history_table').on('click', 'td.modal-control', function () {
|
// Parent table platform modal
|
||||||
var tr = $(this).parents('tr');
|
$('#history_table').on('click', '> tbody > tr > td.modal-control', function () {
|
||||||
|
var tr = $(this).closest('tr');
|
||||||
var row = history_table.row( tr );
|
var row = history_table.row( tr );
|
||||||
var rowData = row.data();
|
var rowData = row.data();
|
||||||
|
|
||||||
|
@ -266,8 +307,9 @@ $('#history_table').on('click', 'td.modal-control', function () {
|
||||||
showStreamDetails();
|
showStreamDetails();
|
||||||
});
|
});
|
||||||
|
|
||||||
$('#history_table').on('click', 'td.modal-control-ip', function () {
|
// Parent table ip address modal
|
||||||
var tr = $(this).parents('tr');
|
$('#history_table').on('click', '> tbody > tr > td.modal-control-ip', function () {
|
||||||
|
var tr = $(this).closest('tr');
|
||||||
var row = history_table.row( tr );
|
var row = history_table.row( tr );
|
||||||
var rowData = row.data();
|
var rowData = row.data();
|
||||||
|
|
||||||
|
@ -288,16 +330,238 @@ $('#history_table').on('click', 'td.modal-control-ip', function () {
|
||||||
getUserLocation(rowData['ip_address']);
|
getUserLocation(rowData['ip_address']);
|
||||||
});
|
});
|
||||||
|
|
||||||
$('#history_table').on('click', 'td.delete-control > button', function () {
|
// Parent table delete mode
|
||||||
var tr = $(this).parents('tr');
|
$('#history_table').on('click', '> tbody > tr > td.delete-control > button', function () {
|
||||||
|
var tr = $(this).closest('tr');
|
||||||
var row = history_table.row( tr );
|
var row = history_table.row( tr );
|
||||||
var rowData = row.data();
|
var rowData = row.data();
|
||||||
|
|
||||||
var index = $.inArray(rowData['id'], history_to_delete);
|
if (rowData['group_count'] == 1) {
|
||||||
if (index === -1) {
|
// if no grouped rows simply add or remove row from history_to_delete
|
||||||
history_to_delete.push(rowData['id']);
|
var index = $.inArray(rowData['id'], history_to_delete);
|
||||||
|
if (index === -1) {
|
||||||
|
history_to_delete.push(rowData['id']);
|
||||||
|
} else {
|
||||||
|
history_to_delete.splice(index, 1);
|
||||||
|
}
|
||||||
|
$(this).toggleClass('btn-warning').toggleClass('btn-danger');
|
||||||
} else {
|
} else {
|
||||||
history_to_delete.splice(index, 1);
|
// if grouped rows
|
||||||
|
if ($(this).hasClass('btn-warning')) {
|
||||||
|
// add all grouped rows to history_to_delete
|
||||||
|
var group_ids = rowData['group_ids'].split(',').map(Number);
|
||||||
|
group_ids.forEach(function (id) {
|
||||||
|
var index = $.inArray(id, history_to_delete);
|
||||||
|
if (index == -1) {
|
||||||
|
history_to_delete.push(id);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
$(this).toggleClass('btn-warning').toggleClass('btn-danger');
|
||||||
|
if (row.child.isShown()) {
|
||||||
|
// if child table is visible, toggle all child buttons to danger
|
||||||
|
tr.next().find('td.delete-control > button.btn-warning').toggleClass('btn-warning').toggleClass('btn-danger');
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// remove all grouped rows to history_to_delete
|
||||||
|
var group_ids = rowData['group_ids'].split(',').map(Number);
|
||||||
|
group_ids.forEach(function (id) {
|
||||||
|
var index = $.inArray(id, history_to_delete);
|
||||||
|
if (index != -1) {
|
||||||
|
history_to_delete.splice(index, 1);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
$(this).toggleClass('btn-warning').toggleClass('btn-danger');
|
||||||
|
if (row.child.isShown()) {
|
||||||
|
// if child table is visible, toggle all child buttons to warning
|
||||||
|
tr.next().find('td.delete-control > button.btn-danger').toggleClass('btn-warning').toggleClass('btn-danger');
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
$(this).toggleClass('btn-warning').toggleClass('btn-danger');
|
});
|
||||||
});
|
|
||||||
|
// Parent table expand detailed history
|
||||||
|
$('#history_table').on('click', '> tbody > tr > td.expand-history a', function () {
|
||||||
|
var tr = $(this).closest('tr');
|
||||||
|
var row = history_table.row(tr);
|
||||||
|
var rowData = row.data();
|
||||||
|
|
||||||
|
$(this).find('i.fa').toggleClass('fa-plus-circle').toggleClass('fa-minus-circle');
|
||||||
|
|
||||||
|
if (row.child.isShown()) {
|
||||||
|
$('div.slider', row.child()).slideUp(function () {
|
||||||
|
row.child.hide();
|
||||||
|
tr.removeClass('shown');
|
||||||
|
delete history_child_table[rowData['reference_id']];
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
tr.addClass('shown');
|
||||||
|
row.child(childTableFormat(rowData)).show();
|
||||||
|
createChildTable(row, rowData);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
// Initialize the detailed history child table options using the parent table options
|
||||||
|
function childTableOptions(rowData) {
|
||||||
|
history_child_options = history_table_options;
|
||||||
|
// Remove settings that are not necessary
|
||||||
|
history_child_options.searching = false;
|
||||||
|
history_child_options.lengthChange = false;
|
||||||
|
history_child_options.info = false;
|
||||||
|
history_child_options.pageLength = 10;
|
||||||
|
history_child_options.bStateSave = false;
|
||||||
|
history_child_options.ajax = {
|
||||||
|
"url": "get_history",
|
||||||
|
type: "post",
|
||||||
|
data: function (d) {
|
||||||
|
return {
|
||||||
|
'json_data': JSON.stringify(d),
|
||||||
|
'grouping': false,
|
||||||
|
'reference_id': rowData['reference_id']
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
history_child_options.fnDrawCallback = function (settings) {
|
||||||
|
$('#ajaxMsg').fadeOut();
|
||||||
|
|
||||||
|
// Create the tooltips.
|
||||||
|
$('.expand-history-tooltip').tooltip({ container: 'body' });
|
||||||
|
$('.external-ip-tooltip').tooltip();
|
||||||
|
$('.transcode-tooltip').tooltip();
|
||||||
|
$('.media-type-tooltip').tooltip();
|
||||||
|
$('.watched-tooltip').tooltip();
|
||||||
|
$('.thumb-tooltip').popover({
|
||||||
|
html: true,
|
||||||
|
trigger: 'hover',
|
||||||
|
placement: 'right',
|
||||||
|
content: function () {
|
||||||
|
return '<div class="history-thumbnail" style="background-image: url(' + $(this).data('img') + '); height: ' + $(this).data('height') + 'px;" />';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if ($('#row-edit-mode').hasClass('active')) {
|
||||||
|
$('.delete-control').each(function () {
|
||||||
|
$(this).removeClass('hidden');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
$(this).closest('div.slider').slideDown();
|
||||||
|
}
|
||||||
|
|
||||||
|
return history_child_options;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format the detailed history child table
|
||||||
|
function childTableFormat(rowData) {
|
||||||
|
return '<div class="slider">' +
|
||||||
|
'<table id="history_child-' + rowData['reference_id'] + '">' +
|
||||||
|
'<thead>' +
|
||||||
|
'<tr>' +
|
||||||
|
'<th align="left" id="delete_row">Delete</th>' +
|
||||||
|
'<th align="left" id="time">Time</th>' +
|
||||||
|
'<th align="left" id="friendly_name">User</th>' +
|
||||||
|
'<th align="left" id="ip_address">IP Address</th>' +
|
||||||
|
'<th align="left" id="platform">Platform</th>' +
|
||||||
|
'<th align="left" id="title">Title</th>' +
|
||||||
|
'<th align="left" id="started">Started</th>' +
|
||||||
|
'<th align="left" id="paused_counter">Paused</th>' +
|
||||||
|
'<th align="left" id="stopped">Stopped</th>' +
|
||||||
|
'<th align="left" id="duration">Duration</th>' +
|
||||||
|
'<th align="left" id="percent_complete"></th>' +
|
||||||
|
'</tr>' +
|
||||||
|
'</thead>' +
|
||||||
|
'<tbody>' +
|
||||||
|
'</tbody>' +
|
||||||
|
'</table>' +
|
||||||
|
'</div>';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the detailed history child table
|
||||||
|
history_child_table = {};
|
||||||
|
function createChildTable(row, rowData) {
|
||||||
|
history_child_options = childTableOptions(rowData);
|
||||||
|
// initialize the child table
|
||||||
|
history_child_table[rowData['reference_id']] = $('#history_child-' + rowData['reference_id']).DataTable(history_child_options);
|
||||||
|
|
||||||
|
// Set child table column visibility to match parent table
|
||||||
|
var visibility = history_table.columns().visible();
|
||||||
|
for (var i = 0; i < visibility.length; i++) {
|
||||||
|
if (!(visibility[i])) { history_child_table[rowData['reference_id']].column(i).visible(visibility[i]); }
|
||||||
|
}
|
||||||
|
history_table.on('column-visibility', function (e, settings, colIdx, visibility) {
|
||||||
|
if (row.child.isShown()) {
|
||||||
|
history_child_table[rowData['reference_id']].column(colIdx).visible(visibility);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Child table platform modal
|
||||||
|
$('#history_child-' + rowData['reference_id']).on('click', 'td.modal-control', function () {
|
||||||
|
var tr = $(this).closest('tr');
|
||||||
|
var childRow = history_child_table[rowData['reference_id']].row(tr);
|
||||||
|
var childRowData = childRow.data();
|
||||||
|
|
||||||
|
function showStreamDetails() {
|
||||||
|
$.ajax({
|
||||||
|
url: 'get_stream_data',
|
||||||
|
data: { row_id: childRowData['id'], user: childRowData['friendly_name'] },
|
||||||
|
cache: false,
|
||||||
|
async: true,
|
||||||
|
complete: function (xhr, status) {
|
||||||
|
$("#info-modal").html(xhr.responseText);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
showStreamDetails();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Child table ip address modal
|
||||||
|
$('#history_child-' + rowData['reference_id']).on('click', 'td.modal-control-ip', function () {
|
||||||
|
var tr = $(this).closest('tr');
|
||||||
|
var childRow = history_child_table[rowData['reference_id']].row(tr);
|
||||||
|
var childRowData = childRow.data();
|
||||||
|
|
||||||
|
function getUserLocation(ip_address) {
|
||||||
|
if (isPrivateIP(ip_address)) {
|
||||||
|
return "n/a"
|
||||||
|
} else {
|
||||||
|
$.ajax({
|
||||||
|
url: 'get_ip_address_details',
|
||||||
|
data: { ip_address: ip_address },
|
||||||
|
async: true,
|
||||||
|
complete: function (xhr, status) {
|
||||||
|
$("#ip-info-modal").html(xhr.responseText);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
getUserLocation(childRowData['ip_address']);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Child table delete mode
|
||||||
|
$('#history_child-' + rowData['reference_id']).on('click', 'td.delete-control > button', function () {
|
||||||
|
var tr = $(this).closest('tr');
|
||||||
|
var childRow = history_child_table[rowData['reference_id']].row(tr);
|
||||||
|
var childRowData = childRow.data();
|
||||||
|
|
||||||
|
// add or remove row from history_to_delete
|
||||||
|
var index = $.inArray(childRowData['id'], history_to_delete);
|
||||||
|
if (index === -1) {
|
||||||
|
history_to_delete.push(childRowData['id']);
|
||||||
|
} else {
|
||||||
|
history_to_delete.splice(index, 1);
|
||||||
|
}
|
||||||
|
$(this).toggleClass('btn-warning').toggleClass('btn-danger');
|
||||||
|
|
||||||
|
tr.parents('tr').prev().find('td.delete-control > button.btn-warning').toggleClass('btn-warning').toggleClass('btn-danger');
|
||||||
|
// check if any child rows are not selected
|
||||||
|
var group_ids = rowData['group_ids'].split(',').map(Number);
|
||||||
|
group_ids.forEach(function (id) {
|
||||||
|
var index = $.inArray(id, history_to_delete);
|
||||||
|
if (index == -1) {
|
||||||
|
// if any child row is not selected, toggle parent button to warning
|
||||||
|
tr.parents('tr').prev().find('td.delete-control > button.btn-danger').addClass('btn-warning').removeClass('btn-danger');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
|
@ -77,11 +77,11 @@ history_table_modal_options = {
|
||||||
"createdCell": function (td, cellData, rowData, row, col) {
|
"createdCell": function (td, cellData, rowData, row, col) {
|
||||||
if (cellData !== '') {
|
if (cellData !== '') {
|
||||||
var transcode_dec = '';
|
var transcode_dec = '';
|
||||||
if (rowData['video_decision'] === 'transcode') {
|
if (rowData['video_decision'] === 'transcode' || rowData['audio_decision'] === 'transcode') {
|
||||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Transcode"><i class="fa fa-server fa-fw"></i></span>';
|
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Transcode"><i class="fa fa-server fa-fw"></i></span>';
|
||||||
} else if (rowData['video_decision'] === 'copy') {
|
} else if (rowData['video_decision'] === 'copy' || rowData['audio_decision'] === 'copy') {
|
||||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Stream"><i class="fa fa-video-camera fa-fw"></i></span>';
|
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Stream"><i class="fa fa-video-camera fa-fw"></i></span>';
|
||||||
} else if (rowData['video_decision'] === 'direct play' || rowData['video_decision'] === '') {
|
} else if (rowData['video_decision'] === 'direct play' || rowData['audio_decision'] === 'direct play') {
|
||||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Play"><i class="fa fa-play-circle fa-fw"></i></span>';
|
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Play"><i class="fa fa-play-circle fa-fw"></i></span>';
|
||||||
}
|
}
|
||||||
$(td).html('<div><a href="#" data-target="#info-modal" data-toggle="modal"><div style="float: left;">' + transcode_dec + ' ' + cellData + '</div></a></div>');
|
$(td).html('<div><a href="#" data-target="#info-modal" data-toggle="modal"><div style="float: left;">' + transcode_dec + ' ' + cellData + '</div></a></div>');
|
||||||
|
@ -126,6 +126,7 @@ history_table_modal_options = {
|
||||||
$('.media-type-tooltip').tooltip();
|
$('.media-type-tooltip').tooltip();
|
||||||
$('.thumb-tooltip').popover({
|
$('.thumb-tooltip').popover({
|
||||||
html: true,
|
html: true,
|
||||||
|
container: '#history-modal',
|
||||||
trigger: 'hover',
|
trigger: 'hover',
|
||||||
placement: 'right',
|
placement: 'right',
|
||||||
content: function () {
|
content: function () {
|
||||||
|
@ -134,7 +135,7 @@ history_table_modal_options = {
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
"preDrawCallback": function(settings) {
|
"preDrawCallback": function(settings) {
|
||||||
var msg = "<div class='msg'><i class='fa fa-refresh fa-spin'></i> Fetching rows...</div>";
|
var msg = "<i class='fa fa-refresh fa-spin'></i> Fetching rows...";
|
||||||
showMsg(msg, false, false, 0)
|
showMsg(msg, false, false, 0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,7 +35,7 @@ var log_table_options = {
|
||||||
$('#ajaxMsg').fadeOut();
|
$('#ajaxMsg').fadeOut();
|
||||||
},
|
},
|
||||||
"preDrawCallback": function(settings) {
|
"preDrawCallback": function(settings) {
|
||||||
var msg = "<div class='msg'><i class='fa fa-refresh fa-spin'></i> Fetching rows...</div>";
|
var msg = "<i class='fa fa-refresh fa-spin'></i> Fetching rows...";
|
||||||
showMsg(msg, false, false, 0)
|
showMsg(msg, false, false, 0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -123,7 +123,7 @@ user_ip_table_options = {
|
||||||
|
|
||||||
},
|
},
|
||||||
"preDrawCallback": function(settings) {
|
"preDrawCallback": function(settings) {
|
||||||
var msg = "<div class='msg'><i class='fa fa-refresh fa-spin'></i> Fetching rows...</div>";
|
var msg = "<i class='fa fa-refresh fa-spin'></i> Fetching rows...";
|
||||||
showMsg(msg, false, false, 0)
|
showMsg(msg, false, false, 0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -187,7 +187,7 @@ users_list_table_options = {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"preDrawCallback": function(settings) {
|
"preDrawCallback": function(settings) {
|
||||||
var msg = "<div class='msg'><i class='fa fa-refresh fa-spin'></i> Fetching rows...</div>";
|
var msg = "<i class='fa fa-refresh fa-spin'></i> Fetching rows...";
|
||||||
showMsg(msg, false, false, 0)
|
showMsg(msg, false, false, 0)
|
||||||
},
|
},
|
||||||
"rowCallback": function (row, rowData) {
|
"rowCallback": function (row, rowData) {
|
||||||
|
|
|
@ -12,22 +12,30 @@ from plexpy import helpers
|
||||||
<div class="container-fluid">
|
<div class="container-fluid">
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<form action="set_notification_config" method="post" class="form" id="set_notification_config" data-parsley-validate>
|
<form action="set_notification_config" method="post" class="form" id="set_notification_config" data-parsley-validate>
|
||||||
<div class="col-md-8">
|
<div class="col-md-12">
|
||||||
% for item in data:
|
% for item in data:
|
||||||
% if item['input_type'] == 'text' or item['input_type'] == 'number' or item['input_type'] == 'password':
|
% if item['input_type'] == 'text' or item['input_type'] == 'number' or item['input_type'] == 'password':
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
<label for="${item['name']}">${item['label']}</label>
|
<label for="${item['name']}">${item['label']}</label>
|
||||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30">
|
<div class="row">
|
||||||
% if item['name'] == 'osx_notify_app':
|
<div class="col-md-8">
|
||||||
<a href="javascript:void(0)" id="osxnotifyregister">Register</a>
|
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30">
|
||||||
% endif
|
% if item['name'] == 'osx_notify_app':
|
||||||
|
<a href="javascript:void(0)" id="osxnotifyregister">Register</a>
|
||||||
|
% endif
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
<p class="help-block">${item['description']}</p>
|
<p class="help-block">${item['description']}</p>
|
||||||
</div>
|
</div>
|
||||||
% elif item['input_type'] == 'button':
|
% elif item['input_type'] == 'button':
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
<input type="${item['input_type']}" class="btn btn-bright" id="${item['name']}" name="${item['name']}" value="${item['value']}">
|
<div class="row">
|
||||||
|
<div class="col-md-8">
|
||||||
|
<input type="${item['input_type']}" class="btn btn-bright" id="${item['name']}" name="${item['name']}" value="${item['value']}">
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<p class="help-block">${item['description']}</p>
|
||||||
</div>
|
</div>
|
||||||
<p class="help-block">${item['description']}</p>
|
|
||||||
% elif item['input_type'] == 'checkbox':
|
% elif item['input_type'] == 'checkbox':
|
||||||
<div class="checkbox">
|
<div class="checkbox">
|
||||||
<label>
|
<label>
|
||||||
|
|
|
@ -44,10 +44,10 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
<div class="dashboard-recent-media-metacontainer">
|
<div class="dashboard-recent-media-metacontainer">
|
||||||
% if item['type'] == 'season':
|
% if item['type'] == 'season':
|
||||||
<h3>${item['parent_title']}</h3>
|
<h3 title="${item['parent_title']}">${item['parent_title']}</h3>
|
||||||
<h3 class="text-muted">${item['title']}</h3>
|
<h3 class="text-muted">${item['title']}</h3>
|
||||||
% elif item['type'] == 'movie':
|
% elif item['type'] == 'movie':
|
||||||
<h3>${item['title']}</h3>
|
<h3 title="${item['title']}">${item['title']}</h3>
|
||||||
<h3 class="text-muted">${item['year']}</h3>
|
<h3 class="text-muted">${item['year']}</h3>
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
|
@ -66,7 +66,7 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="dashboard-recent-media-metacontainer">
|
<div class="dashboard-recent-media-metacontainer">
|
||||||
<h3>${item['parent_title']}</h3>
|
<h3 title="${item['parent_title']}">${item['parent_title']}</h3>
|
||||||
<h3 class="text-muted">${item['title']}</h3>
|
<h3 class="text-muted">${item['title']}</h3>
|
||||||
</div>
|
</div>
|
||||||
</a>
|
</a>
|
||||||
|
|
41
data/interfaces/default/search.html
Normal file
41
data/interfaces/default/search.html
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
<%inherit file="base.html"/>
|
||||||
|
|
||||||
|
<%def name="headIncludes()">
|
||||||
|
</%def>
|
||||||
|
|
||||||
|
<%def name="headerIncludes()">
|
||||||
|
</%def>
|
||||||
|
|
||||||
|
<%def name="body()">
|
||||||
|
<div class='container-fluid'>
|
||||||
|
<div class='table-card-header'>
|
||||||
|
<div class="header-bar">
|
||||||
|
<span><i class="fa fa-search"></i> Search Results
|
||||||
|
% if query:
|
||||||
|
for <strong>${query}</strong>
|
||||||
|
% endif
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class='table-card-back'>
|
||||||
|
<div id="search-results-list"><i class="fa fa-refresh fa-spin"></i> Loading search results...</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</%def>
|
||||||
|
|
||||||
|
<%def name="javascriptIncludes()">
|
||||||
|
<script>
|
||||||
|
$('#search_button').removeClass('btn-inactive');
|
||||||
|
$('#search_query').val('${query}').css({ right: '0', width: '250px' }).addClass('active');
|
||||||
|
|
||||||
|
$.ajax({
|
||||||
|
url: 'get_search_results_children',
|
||||||
|
type: "GET",
|
||||||
|
async: true,
|
||||||
|
data: {'query': "${query}"},
|
||||||
|
complete: function (xhr, status) {
|
||||||
|
$("#search-results-list").html(xhr.responseText);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
</%def>
|
|
@ -31,7 +31,7 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
</div>
|
</div>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<!-- Nav tabs -->
|
<!-- Nav tabs -->
|
||||||
<div class="col-md-4">
|
<div class="col-md-3">
|
||||||
<ul class="nav-settings list-unstyled" role="tablist">
|
<ul class="nav-settings list-unstyled" role="tablist">
|
||||||
<li role="presentation" class="active"><a href="#tabs-1" aria-controls="tabs-1" role="tab" data-toggle="tab">General</a></li>
|
<li role="presentation" class="active"><a href="#tabs-1" aria-controls="tabs-1" role="tab" data-toggle="tab">General</a></li>
|
||||||
<li role="presentation"><a href="#tabs-2" aria-controls="tabs-2" role="tab" data-toggle="tab">Homepage Statistics</a></li>
|
<li role="presentation"><a href="#tabs-2" aria-controls="tabs-2" role="tab" data-toggle="tab">Homepage Statistics</a></li>
|
||||||
|
@ -45,7 +45,7 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
<li role="presentation"><a href="#tabs-10" aria-controls="tabs-10" role="tab" data-toggle="tab">Notification Agents</a></li>
|
<li role="presentation"><a href="#tabs-10" aria-controls="tabs-10" role="tab" data-toggle="tab">Notification Agents</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
<div class="col-md-8">
|
<div class="col-md-9">
|
||||||
<form action="configUpdate" method="post" class="form" id="configUpdate" data-parsley-validate>
|
<form action="configUpdate" method="post" class="form" id="configUpdate" data-parsley-validate>
|
||||||
<div class="tab-content">
|
<div class="tab-content">
|
||||||
<div role="tabpanel" class="tab-pane active" id="tabs-1">
|
<div role="tabpanel" class="tab-pane active" id="tabs-1">
|
||||||
|
@ -84,6 +84,12 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
</div>
|
</div>
|
||||||
<p class="help-block">Set your preferred time format. <a href="javascript:void(0)" data-target="#dateTimeOptionsModal" data-toggle="modal">Click here</a> to see the parameter list.</p>
|
<p class="help-block">Set your preferred time format. <a href="javascript:void(0)" data-target="#dateTimeOptionsModal" data-toggle="modal">Click here</a> to see the parameter list.</p>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="checkbox">
|
||||||
|
<label>
|
||||||
|
<input type="checkbox" id="group_history_tables" name="group_history_tables" value="1" ${config['group_history_tables']}> Group Table History
|
||||||
|
</label>
|
||||||
|
<p class="help-block">Group successive play history by the same user as a single entry in tables.</p>
|
||||||
|
</div>
|
||||||
<p><input type="button" class="btn btn-bright save-button" value="Save" data-success="Changes saved successfully"></p>
|
<p><input type="button" class="btn btn-bright save-button" value="Save" data-success="Changes saved successfully"></p>
|
||||||
</div>
|
</div>
|
||||||
<div role="tabpanel" class="tab-pane" id="tabs-2">
|
<div role="tabpanel" class="tab-pane" id="tabs-2">
|
||||||
|
@ -133,7 +139,7 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
</div>
|
</div>
|
||||||
<div class="checkbox">
|
<div class="checkbox">
|
||||||
<label>
|
<label>
|
||||||
<input type="checkbox" id="home_stats_type" name="home_stats_type" value="1" ${config['home_stats_type']}> Use play duration
|
<input type="checkbox" id="home_stats_type" name="home_stats_type" value="1" ${config['home_stats_type']}> Use Play Duration
|
||||||
</label>
|
</label>
|
||||||
<p class="help-block">Use play duration instead of play count to generate statistics.</p>
|
<p class="help-block">Use play duration instead of play count to generate statistics.</p>
|
||||||
</div>
|
</div>
|
||||||
|
@ -165,7 +171,7 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
<label for="http_host">HTTP Host</label>
|
<label for="http_host">HTTP Host</label>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col-md-6">
|
<div class="col-md-6">
|
||||||
<input type="text" class="form-control" id="http_host" name="http_host" value="${config['http_host']}" data-parsley-trigger="change" required>
|
<input type="text" class="form-control http-settings" id="http_host" name="http_host" value="${config['http_host']}" data-parsley-trigger="change" required>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<p class="help-block">e.g. localhost or an IP, such as 0.0.0.0</p>
|
<p class="help-block">e.g. localhost or an IP, such as 0.0.0.0</p>
|
||||||
|
@ -174,7 +180,7 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
<label for="http_port">HTTP Port</label>
|
<label for="http_port">HTTP Port</label>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col-md-2">
|
<div class="col-md-2">
|
||||||
<input type="text" class="form-control" data-parsley-type="integer" id="http_port" name="http_port" value="${config['http_port']}" data-parsley-trigger="change" data-parsley-errors-container="#http_port_error" required>
|
<input type="text" class="form-control http-settings" data-parsley-type="integer" id="http_port" name="http_port" value="${config['http_port']}" data-parsley-trigger="change" data-parsley-errors-container="#http_port_error" required>
|
||||||
</div>
|
</div>
|
||||||
<div id="http_port_error" class="alert alert-danger settings-alert" role="alert"></div>
|
<div id="http_port_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -188,18 +194,18 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
</div>
|
</div>
|
||||||
<div class="checkbox">
|
<div class="checkbox">
|
||||||
<label>
|
<label>
|
||||||
<input type="checkbox" name="enable_https" id="enable_https" value="1" ${config['enable_https']} /> Enable HTTPS
|
<input type="checkbox" class="http-settings" name="enable_https" id="enable_https" value="1" ${config['enable_https']} /> Enable HTTPS
|
||||||
</label>
|
</label>
|
||||||
<p class="help-block">Enable HTTPS for web server for encrypted communication.</p>
|
<p class="help-block">Enable HTTPS for web server for encrypted communication.</p>
|
||||||
</div>
|
</div>
|
||||||
<div id="https_options">
|
<div id="https_options">
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
<label for="https_cert">HTTPS Cert</label>
|
<label for="https_cert">HTTPS Cert</label>
|
||||||
<input type="text" class="form-control" id="https_cert" name="https_cert" value="${config['https_cert']}">
|
<input type="text" class="form-control http-settings" id="https_cert" name="https_cert" value="${config['https_cert']}">
|
||||||
</div>
|
</div>
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
<label for="https_key">HTTPS Key</label>
|
<label for="https_key">HTTPS Key</label>
|
||||||
<input type="text" class="form-control" id="https_key" name="https_key" value="${config['https_key']}">
|
<input type="text" class="form-control http-settings" id="https_key" name="https_key" value="${config['https_key']}">
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
@ -215,7 +221,7 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
<label for="http_username">HTTP Username</label>
|
<label for="http_username">HTTP Username</label>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col-md-4">
|
<div class="col-md-4">
|
||||||
<input type="text" class="form-control" id="http_username" name="http_username" value="${config['http_username']}" size="30">
|
<input type="text" class="form-control auth-settings" id="http_username" name="http_username" value="${config['http_username']}" size="30">
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<p class="help-block">Username for web server authentication. Leave empty to disable.</p>
|
<p class="help-block">Username for web server authentication. Leave empty to disable.</p>
|
||||||
|
@ -224,7 +230,7 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
<label for="http_password">HTTP Password</label>
|
<label for="http_password">HTTP Password</label>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col-md-4">
|
<div class="col-md-4">
|
||||||
<input type="password" class="form-control" id="http_password" name="http_password" value="${config['http_password']}" size="30">
|
<input type="password" class="form-control auth-settings" id="http_password" name="http_password" value="${config['http_password']}" size="30">
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<p class="help-block">Password for web server authentication. Leave empty to disable.</p>
|
<p class="help-block">Password for web server authentication. Leave empty to disable.</p>
|
||||||
|
@ -262,6 +268,7 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
<div class="padded-header">
|
<div class="padded-header">
|
||||||
<h3>Plex Media Server</h3>
|
<h3>Plex Media Server</h3>
|
||||||
</div>
|
</div>
|
||||||
|
<p class="help-block">If you're using websocket monitoring, any server changes require a restart of PlexPy.</p>
|
||||||
<div class="form-group has-feedback" id="pms-ip-group">
|
<div class="form-group has-feedback" id="pms-ip-group">
|
||||||
<label for="pms_ip">Plex IP or Hostname</label>
|
<label for="pms_ip">Plex IP or Hostname</label>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
|
@ -305,7 +312,7 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
<input type="text" class="form-control" id="pms_logs_folder" name="pms_logs_folder" value="${config['pms_logs_folder']}" size="30" data-parsley-trigger="change">
|
<input type="text" class="form-control" id="pms_logs_folder" name="pms_logs_folder" value="${config['pms_logs_folder']}" size="30" data-parsley-trigger="change">
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<p class="help-block">Set the folder where your Plex Server logs are. This is required if you enable IP logging.<br /><a href="https://support.plex.tv/hc/en-us/articles/200250417-Plex-Media-Server-Log-Files" target="_blank">Click here</a> for help.</p>
|
<p class="help-block">Set the complete folder path where your Plex Server logs are, shortcuts are not recognized.<br /><a href="https://support.plex.tv/hc/en-us/articles/200250417-Plex-Media-Server-Log-Files" target="_blank">Click here</a> for help. This is required if you enable IP logging. </p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<input type="hidden" id="pms_identifier" name="pms_identifier" value="${config['pms_identifier']}">
|
<input type="hidden" id="pms_identifier" name="pms_identifier" value="${config['pms_identifier']}">
|
||||||
|
@ -338,7 +345,7 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
<h3>Friends List</h3>
|
<h3>Friends List</h3>
|
||||||
</div>
|
</div>
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
<label for="refresh_users_interval">User list Refresh Interval</label>
|
<label for="refresh_users_interval">User List Refresh Interval</label>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col-md-2">
|
<div class="col-md-2">
|
||||||
<input type="text" class="form-control" data-parsley-type="integer" id="refresh_users_interval" name="refresh_users_interval" value="${config['refresh_users_interval']}" size="5" data-parsley-range="[1,24]" data-parsley-trigger="change" data-parsley-errors-container="#refresh_users_interval_error" required>
|
<input type="text" class="form-control" data-parsley-type="integer" id="refresh_users_interval" name="refresh_users_interval" value="${config['refresh_users_interval']}" size="5" data-parsley-range="[1,24]" data-parsley-trigger="change" data-parsley-errors-container="#refresh_users_interval_error" required>
|
||||||
|
@ -349,7 +356,7 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
</div>
|
</div>
|
||||||
<div class="checkbox">
|
<div class="checkbox">
|
||||||
<label>
|
<label>
|
||||||
<input type="checkbox" id="refresh_users_on_startup" name="refresh_users_on_startup" value="1" ${config['refresh_users_on_startup']}> Refresh user list on startup
|
<input type="checkbox" id="refresh_users_on_startup" name="refresh_users_on_startup" value="1" ${config['refresh_users_on_startup']}> Refresh User List on Startup
|
||||||
</label>
|
</label>
|
||||||
<p class="help-block">Refresh the user list when PlexPy starts.</p>
|
<p class="help-block">Refresh the user list when PlexPy starts.</p>
|
||||||
</div>
|
</div>
|
||||||
|
@ -363,7 +370,7 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
|
|
||||||
<div class="checkbox">
|
<div class="checkbox">
|
||||||
<label>
|
<label>
|
||||||
<input type="checkbox" id="pms_use_bif" name="pms_use_bif" value="1" ${config['pms_use_bif']}> Use video preview thumbnails (BIF)
|
<input type="checkbox" id="pms_use_bif" name="pms_use_bif" value="1" ${config['pms_use_bif']}> Use Video Preview Thumbnails (BIF)
|
||||||
</label>
|
</label>
|
||||||
<p class="help-block">If you have media indexing enabled on your server, use these on the activity pane.</p>
|
<p class="help-block">If you have media indexing enabled on your server, use these on the activity pane.</p>
|
||||||
</div>
|
</div>
|
||||||
|
@ -390,6 +397,12 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
</div>
|
</div>
|
||||||
<p class="help-block">The interval (in seconds) PlexPy will ping your Plex Server. Min 30 seconds, recommended 60 seconds.</p>
|
<p class="help-block">The interval (in seconds) PlexPy will ping your Plex Server. Min 30 seconds, recommended 60 seconds.</p>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="checkbox">
|
||||||
|
<label>
|
||||||
|
<input type="checkbox" class="monitor-settings" id="monitoring_use_websocket" name="monitoring_use_websocket" value="1" ${config['monitoring_use_websocket']}> Use Websocket (requires restart)
|
||||||
|
</label>
|
||||||
|
<p class="help-block">Instead of polling the server at regular intervals let the server tell us when something happens. This is currently experimental. Encrypted websocket is not currently supported.</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div class="padded-header">
|
<div class="padded-header">
|
||||||
<h3>History Logging</h3>
|
<h3>History Logging</h3>
|
||||||
|
@ -400,6 +413,12 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
</label>
|
</label>
|
||||||
<p class="help-block">Keep records of all video items played from your Plex Media Server.</p>
|
<p class="help-block">Keep records of all video items played from your Plex Media Server.</p>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="checkbox">
|
||||||
|
<label>
|
||||||
|
<input type="checkbox" id="music_logging_enable" name="music_logging_enable" value="1" ${config['music_logging_enable']}> Log Music
|
||||||
|
</label>
|
||||||
|
<p class="help-block">Keep records of all audio items played from your Plex Media Server. VERY experimental.</p>
|
||||||
|
</div>
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
<label for="logging_ignore_interval">Ignore Interval</label>
|
<label for="logging_ignore_interval">Ignore Interval</label>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
|
@ -410,16 +429,6 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
</div>
|
</div>
|
||||||
<p class="help-block">The interval (in seconds) an item must be in a playing state before logging it. 0 to disable.</p>
|
<p class="help-block">The interval (in seconds) an item must be in a playing state before logging it. 0 to disable.</p>
|
||||||
</div>
|
</div>
|
||||||
<div class="checkbox">
|
|
||||||
<label>
|
|
||||||
<input type="checkbox" id="music_logging_enable" name="music_logging_enable" value="1" ${config['music_logging_enable']}> Log Music
|
|
||||||
</label>
|
|
||||||
<p class="help-block">Keep records of all audio items played from your Plex Media Server. VERY experimental.</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="padded-header">
|
|
||||||
<h3>IP Logging</h3>
|
|
||||||
</div>
|
|
||||||
<div class="checkbox">
|
<div class="checkbox">
|
||||||
<label>
|
<label>
|
||||||
<input type="checkbox" id="ip_logging_enable" name="ip_logging_enable" value="1" ${config['ip_logging_enable']}> Enable IP Logging
|
<input type="checkbox" id="ip_logging_enable" name="ip_logging_enable" value="1" ${config['ip_logging_enable']}> Enable IP Logging
|
||||||
|
@ -487,6 +496,12 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
</div>
|
</div>
|
||||||
<p class="help-block">Set the progress percentage of when a watched notification should be triggered. Minimum 50, Maximum 95.</p>
|
<p class="help-block">Set the progress percentage of when a watched notification should be triggered. Minimum 50, Maximum 95.</p>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="checkbox">
|
||||||
|
<label>
|
||||||
|
<input type="checkbox" name="notify_consecutive" id="notify_consecutive" value="1" ${config['notify_consecutive']}> Allow Consecutive Notifications
|
||||||
|
</label>
|
||||||
|
<p class="help-block">Disable to prevent consecutive notifications (i.e. both watched & stopped notifications).</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div class="padded-header">
|
<div class="padded-header">
|
||||||
<h3>Custom Notification Messages</h3>
|
<h3>Custom Notification Messages</h3>
|
||||||
|
@ -974,6 +989,22 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<div id="restart-modal" class="modal fade" tabindex="-1" role="dialog" aria-labelledby="restart-modal">
|
||||||
|
<div class="modal-dialog" role="document">
|
||||||
|
<div class="modal-content">
|
||||||
|
<div class="modal-header">
|
||||||
|
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||||
|
<h4 class="modal-title">Restart</h4>
|
||||||
|
</div>
|
||||||
|
<div class="modal-body">
|
||||||
|
You have changed settings that require PlexPy to restart. Hit the restart button below to restart now.
|
||||||
|
</div>
|
||||||
|
<div class="modal-footer">
|
||||||
|
<button id="modal_link_restart" class="btn btn-bright"><i class="fa fa-refresh"></i> Restart</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</%def>
|
</%def>
|
||||||
|
|
||||||
|
@ -994,6 +1025,19 @@ $(document).ready(function() {
|
||||||
window.location.hash = e.target.hash.replace("#", "#" + prefix);
|
window.location.hash = e.target.hash.replace("#", "#" + prefix);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Global Variables
|
||||||
|
serverChanged = false;
|
||||||
|
authChanged = false;
|
||||||
|
httpChanged = false;
|
||||||
|
monitorChanged = false;
|
||||||
|
|
||||||
|
// Alert the user that their changes require a restart.
|
||||||
|
function postSaveChecks() {
|
||||||
|
if ((serverChanged && $('#monitoring_use_websocket').is(":checked")) || authChanged || httpChanged || monitorChanged) {
|
||||||
|
$('#restart-modal').modal('show');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
var configForm = $("#configUpdate");
|
var configForm = $("#configUpdate");
|
||||||
$('.save-button').click(function() {
|
$('.save-button').click(function() {
|
||||||
if ($("#pms_identifier").val() == "") {
|
if ($("#pms_identifier").val() == "") {
|
||||||
|
@ -1001,6 +1045,7 @@ $(document).ready(function() {
|
||||||
} else {
|
} else {
|
||||||
if (configForm.parsley().validate()) {
|
if (configForm.parsley().validate()) {
|
||||||
doAjaxCall('configUpdate',$(this),'tabs',true);
|
doAjaxCall('configUpdate',$(this),'tabs',true);
|
||||||
|
postSaveChecks();
|
||||||
return false;
|
return false;
|
||||||
} else {
|
} else {
|
||||||
showMsg('<i class="fa fa-exclamation-circle"></i> Please verify your settings.',false,true,2000,true)
|
showMsg('<i class="fa fa-exclamation-circle"></i> Please verify your settings.',false,true,2000,true)
|
||||||
|
@ -1027,6 +1072,10 @@ $(document).ready(function() {
|
||||||
window.location.href = "checkGithub";
|
window.location.href = "checkGithub";
|
||||||
});
|
});
|
||||||
|
|
||||||
|
$("#modal_link_restart").click(function() {
|
||||||
|
window.location.href = "restart";
|
||||||
|
});
|
||||||
|
|
||||||
if ($("#api_enabled").is(":checked")) {
|
if ($("#api_enabled").is(":checked")) {
|
||||||
$("#apioptions").show();
|
$("#apioptions").show();
|
||||||
} else {
|
} else {
|
||||||
|
@ -1067,7 +1116,20 @@ $(document).ready(function() {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
$( ".http-settings" ).change(function() {
|
||||||
|
httpChanged = true;
|
||||||
|
});
|
||||||
|
|
||||||
|
$( ".auth-settings" ).change(function() {
|
||||||
|
authChanged = true;
|
||||||
|
});
|
||||||
|
|
||||||
|
$( ".monitor-settings" ).change(function() {
|
||||||
|
monitorChanged = true;
|
||||||
|
});
|
||||||
|
|
||||||
$( ".pms-settings" ).change(function() {
|
$( ".pms-settings" ).change(function() {
|
||||||
|
serverChanged = true;
|
||||||
$("#pms_identifier").val("");
|
$("#pms_identifier").val("");
|
||||||
$("#pms-verify-status").html("");
|
$("#pms-verify-status").html("");
|
||||||
verifyServer();
|
verifyServer();
|
||||||
|
@ -1129,11 +1191,11 @@ $(document).ready(function() {
|
||||||
'Authorization': 'Basic ' + btoa($("#pms_username").val() + ':' + $("#pms_password").val())
|
'Authorization': 'Basic ' + btoa($("#pms_username").val() + ':' + $("#pms_password").val())
|
||||||
},
|
},
|
||||||
error: function(jqXHR, textStatus, errorThrown) {
|
error: function(jqXHR, textStatus, errorThrown) {
|
||||||
$("#pms-token-status").html('<i class="fa fa-exclamation-circle"></i> Authentation failed!');
|
$("#pms-token-status").html('<i class="fa fa-exclamation-circle"></i> Authentication failed!');
|
||||||
},
|
},
|
||||||
success: function (xml) {
|
success: function (xml) {
|
||||||
var authToken = $(xml).find('user').attr('authenticationToken');
|
var authToken = $(xml).find('user').attr('authenticationToken');
|
||||||
$("#pms-token-status").html('<i class="fa fa-check"></i> Authentation successful!');
|
$("#pms-token-status").html('<i class="fa fa-check"></i> Authentication successful!');
|
||||||
$("#pms_token").val(authToken);
|
$("#pms_token").val(authToken);
|
||||||
$('#pms-auth-modal').modal('hide');
|
$('#pms-auth-modal').modal('hide');
|
||||||
}
|
}
|
||||||
|
|
|
@ -48,11 +48,11 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
<div class="dashboard-recent-media-metacontainer">
|
<div class="dashboard-recent-media-metacontainer">
|
||||||
% if item['type'] == 'episode':
|
% if item['type'] == 'episode':
|
||||||
<h3>${item['grandparent_title']}</h3>
|
<h3 title="${item['grandparent_title']}">${item['grandparent_title']}</h3>
|
||||||
<h3>${item['title']}</h3>
|
<h3 title="${item['title']}">${item['title']}</h3>
|
||||||
<h3 class="text-muted">S${item['parent_index']} · E${item['index']}</h3>
|
<h3 class="text-muted">S${item['parent_index']} · E${item['index']}</h3>
|
||||||
% elif item['type'] == 'movie':
|
% elif item['type'] == 'movie':
|
||||||
<h3>${item['title']}</h3>
|
<h3 title="${item['title']}">${item['title']}</h3>
|
||||||
<h3 class="text-muted">${item['year']}</h3>
|
<h3 class="text-muted">${item['year']}</h3>
|
||||||
% endif
|
% endif
|
||||||
<div class="text-muted" id="time-${item['time']}">
|
<div class="text-muted" id="time-${item['time']}">
|
||||||
|
@ -73,8 +73,8 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="dashboard-recent-media-metacontainer">
|
<div class="dashboard-recent-media-metacontainer">
|
||||||
<h3>${item['grandparent_title']}</h3>
|
<h3 title="${item['grandparent_title']}">${item['grandparent_title']}</h3>
|
||||||
<h3>${item['title']}</h3>
|
<h3 title="${item['title']}">${item['title']}</h3>
|
||||||
<h3 class="text-muted">${item['parent_title']}</h3>
|
<h3 class="text-muted">${item['parent_title']}</h3>
|
||||||
</div>
|
</div>
|
||||||
</a>
|
</a>
|
||||||
|
|
25
lib/websocket/__init__.py
Normal file
25
lib/websocket/__init__.py
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
from ._core import *
|
||||||
|
from ._app import WebSocketApp
|
||||||
|
|
||||||
|
__version__ = "0.32.0"
|
382
lib/websocket/_abnf.py
Normal file
382
lib/websocket/_abnf.py
Normal file
|
@ -0,0 +1,382 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
import six
|
||||||
|
import array
|
||||||
|
import struct
|
||||||
|
import os
|
||||||
|
from ._exceptions import *
|
||||||
|
from ._utils import validate_utf8
|
||||||
|
|
||||||
|
# closing frame status codes.
|
||||||
|
STATUS_NORMAL = 1000
|
||||||
|
STATUS_GOING_AWAY = 1001
|
||||||
|
STATUS_PROTOCOL_ERROR = 1002
|
||||||
|
STATUS_UNSUPPORTED_DATA_TYPE = 1003
|
||||||
|
STATUS_STATUS_NOT_AVAILABLE = 1005
|
||||||
|
STATUS_ABNORMAL_CLOSED = 1006
|
||||||
|
STATUS_INVALID_PAYLOAD = 1007
|
||||||
|
STATUS_POLICY_VIOLATION = 1008
|
||||||
|
STATUS_MESSAGE_TOO_BIG = 1009
|
||||||
|
STATUS_INVALID_EXTENSION = 1010
|
||||||
|
STATUS_UNEXPECTED_CONDITION = 1011
|
||||||
|
STATUS_TLS_HANDSHAKE_ERROR = 1015
|
||||||
|
|
||||||
|
VALID_CLOSE_STATUS = (
|
||||||
|
STATUS_NORMAL,
|
||||||
|
STATUS_GOING_AWAY,
|
||||||
|
STATUS_PROTOCOL_ERROR,
|
||||||
|
STATUS_UNSUPPORTED_DATA_TYPE,
|
||||||
|
STATUS_INVALID_PAYLOAD,
|
||||||
|
STATUS_POLICY_VIOLATION,
|
||||||
|
STATUS_MESSAGE_TOO_BIG,
|
||||||
|
STATUS_INVALID_EXTENSION,
|
||||||
|
STATUS_UNEXPECTED_CONDITION,
|
||||||
|
)
|
||||||
|
|
||||||
|
class ABNF(object):
|
||||||
|
"""
|
||||||
|
ABNF frame class.
|
||||||
|
see http://tools.ietf.org/html/rfc5234
|
||||||
|
and http://tools.ietf.org/html/rfc6455#section-5.2
|
||||||
|
"""
|
||||||
|
|
||||||
|
# operation code values.
|
||||||
|
OPCODE_CONT = 0x0
|
||||||
|
OPCODE_TEXT = 0x1
|
||||||
|
OPCODE_BINARY = 0x2
|
||||||
|
OPCODE_CLOSE = 0x8
|
||||||
|
OPCODE_PING = 0x9
|
||||||
|
OPCODE_PONG = 0xa
|
||||||
|
|
||||||
|
# available operation code value tuple
|
||||||
|
OPCODES = (OPCODE_CONT, OPCODE_TEXT, OPCODE_BINARY, OPCODE_CLOSE,
|
||||||
|
OPCODE_PING, OPCODE_PONG)
|
||||||
|
|
||||||
|
# opcode human readable string
|
||||||
|
OPCODE_MAP = {
|
||||||
|
OPCODE_CONT: "cont",
|
||||||
|
OPCODE_TEXT: "text",
|
||||||
|
OPCODE_BINARY: "binary",
|
||||||
|
OPCODE_CLOSE: "close",
|
||||||
|
OPCODE_PING: "ping",
|
||||||
|
OPCODE_PONG: "pong"
|
||||||
|
}
|
||||||
|
|
||||||
|
# data length threashold.
|
||||||
|
LENGTH_7 = 0x7e
|
||||||
|
LENGTH_16 = 1 << 16
|
||||||
|
LENGTH_63 = 1 << 63
|
||||||
|
|
||||||
|
def __init__(self, fin=0, rsv1=0, rsv2=0, rsv3=0,
|
||||||
|
opcode=OPCODE_TEXT, mask=1, data=""):
|
||||||
|
"""
|
||||||
|
Constructor for ABNF.
|
||||||
|
please check RFC for arguments.
|
||||||
|
"""
|
||||||
|
self.fin = fin
|
||||||
|
self.rsv1 = rsv1
|
||||||
|
self.rsv2 = rsv2
|
||||||
|
self.rsv3 = rsv3
|
||||||
|
self.opcode = opcode
|
||||||
|
self.mask = mask
|
||||||
|
if data == None:
|
||||||
|
data = ""
|
||||||
|
self.data = data
|
||||||
|
self.get_mask_key = os.urandom
|
||||||
|
|
||||||
|
def validate(self, skip_utf8_validation=False):
|
||||||
|
"""
|
||||||
|
validate the ABNF frame.
|
||||||
|
skip_utf8_validation: skip utf8 validation.
|
||||||
|
"""
|
||||||
|
if self.rsv1 or self.rsv2 or self.rsv3:
|
||||||
|
raise WebSocketProtocolException("rsv is not implemented, yet")
|
||||||
|
|
||||||
|
if self.opcode not in ABNF.OPCODES:
|
||||||
|
raise WebSocketProtocolException("Invalid opcode %r", self.opcode)
|
||||||
|
|
||||||
|
if self.opcode == ABNF.OPCODE_PING and not self.fin:
|
||||||
|
raise WebSocketProtocolException("Invalid ping frame.")
|
||||||
|
|
||||||
|
if self.opcode == ABNF.OPCODE_CLOSE:
|
||||||
|
l = len(self.data)
|
||||||
|
if not l:
|
||||||
|
return
|
||||||
|
if l == 1 or l >= 126:
|
||||||
|
raise WebSocketProtocolException("Invalid close frame.")
|
||||||
|
if l > 2 and not skip_utf8_validation and not validate_utf8(self.data[2:]):
|
||||||
|
raise WebSocketProtocolException("Invalid close frame.")
|
||||||
|
|
||||||
|
code = 256*six.byte2int(self.data[0:1]) + six.byte2int(self.data[1:2])
|
||||||
|
if not self._is_valid_close_status(code):
|
||||||
|
raise WebSocketProtocolException("Invalid close opcode.")
|
||||||
|
|
||||||
|
def _is_valid_close_status(self, code):
|
||||||
|
return code in VALID_CLOSE_STATUS or (3000 <= code <5000)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "fin=" + str(self.fin) \
|
||||||
|
+ " opcode=" + str(self.opcode) \
|
||||||
|
+ " data=" + str(self.data)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_frame(data, opcode, fin=1):
|
||||||
|
"""
|
||||||
|
create frame to send text, binary and other data.
|
||||||
|
|
||||||
|
data: data to send. This is string value(byte array).
|
||||||
|
if opcode is OPCODE_TEXT and this value is uniocde,
|
||||||
|
data value is conveted into unicode string, automatically.
|
||||||
|
|
||||||
|
opcode: operation code. please see OPCODE_XXX.
|
||||||
|
|
||||||
|
fin: fin flag. if set to 0, create continue fragmentation.
|
||||||
|
"""
|
||||||
|
if opcode == ABNF.OPCODE_TEXT and isinstance(data, six.text_type):
|
||||||
|
data = data.encode("utf-8")
|
||||||
|
# mask must be set if send data from client
|
||||||
|
return ABNF(fin, 0, 0, 0, opcode, 1, data)
|
||||||
|
|
||||||
|
def format(self):
|
||||||
|
"""
|
||||||
|
format this object to string(byte array) to send data to server.
|
||||||
|
"""
|
||||||
|
if any(x not in (0, 1) for x in [self.fin, self.rsv1, self.rsv2, self.rsv3]):
|
||||||
|
raise ValueError("not 0 or 1")
|
||||||
|
if self.opcode not in ABNF.OPCODES:
|
||||||
|
raise ValueError("Invalid OPCODE")
|
||||||
|
length = len(self.data)
|
||||||
|
if length >= ABNF.LENGTH_63:
|
||||||
|
raise ValueError("data is too long")
|
||||||
|
|
||||||
|
frame_header = chr(self.fin << 7
|
||||||
|
| self.rsv1 << 6 | self.rsv2 << 5 | self.rsv3 << 4
|
||||||
|
| self.opcode)
|
||||||
|
if length < ABNF.LENGTH_7:
|
||||||
|
frame_header += chr(self.mask << 7 | length)
|
||||||
|
frame_header = six.b(frame_header)
|
||||||
|
elif length < ABNF.LENGTH_16:
|
||||||
|
frame_header += chr(self.mask << 7 | 0x7e)
|
||||||
|
frame_header = six.b(frame_header)
|
||||||
|
frame_header += struct.pack("!H", length)
|
||||||
|
else:
|
||||||
|
frame_header += chr(self.mask << 7 | 0x7f)
|
||||||
|
frame_header = six.b(frame_header)
|
||||||
|
frame_header += struct.pack("!Q", length)
|
||||||
|
|
||||||
|
if not self.mask:
|
||||||
|
return frame_header + self.data
|
||||||
|
else:
|
||||||
|
mask_key = self.get_mask_key(4)
|
||||||
|
return frame_header + self._get_masked(mask_key)
|
||||||
|
|
||||||
|
def _get_masked(self, mask_key):
|
||||||
|
s = ABNF.mask(mask_key, self.data)
|
||||||
|
|
||||||
|
if isinstance(mask_key, six.text_type):
|
||||||
|
mask_key = mask_key.encode('utf-8')
|
||||||
|
|
||||||
|
return mask_key + s
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def mask(mask_key, data):
|
||||||
|
"""
|
||||||
|
mask or unmask data. Just do xor for each byte
|
||||||
|
|
||||||
|
mask_key: 4 byte string(byte).
|
||||||
|
|
||||||
|
data: data to mask/unmask.
|
||||||
|
"""
|
||||||
|
if data == None:
|
||||||
|
data = ""
|
||||||
|
if isinstance(mask_key, six.text_type):
|
||||||
|
mask_key = six.b(mask_key)
|
||||||
|
|
||||||
|
if isinstance(data, six.text_type):
|
||||||
|
data = six.b(data)
|
||||||
|
|
||||||
|
_m = array.array("B", mask_key)
|
||||||
|
_d = array.array("B", data)
|
||||||
|
for i in range(len(_d)):
|
||||||
|
_d[i] ^= _m[i % 4]
|
||||||
|
|
||||||
|
if six.PY3:
|
||||||
|
return _d.tobytes()
|
||||||
|
else:
|
||||||
|
return _d.tostring()
|
||||||
|
|
||||||
|
|
||||||
|
class frame_buffer(object):
|
||||||
|
_HEADER_MASK_INDEX = 5
|
||||||
|
_HEADER_LENGHT_INDEX = 6
|
||||||
|
|
||||||
|
def __init__(self, recv_fn, skip_utf8_validation):
|
||||||
|
self.recv = recv_fn
|
||||||
|
self.skip_utf8_validation = skip_utf8_validation
|
||||||
|
# Buffers over the packets from the layer beneath until desired amount
|
||||||
|
# bytes of bytes are received.
|
||||||
|
self.recv_buffer = []
|
||||||
|
self.clear()
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
self.header = None
|
||||||
|
self.length = None
|
||||||
|
self.mask = None
|
||||||
|
|
||||||
|
def has_received_header(self):
|
||||||
|
return self.header is None
|
||||||
|
|
||||||
|
def recv_header(self):
|
||||||
|
header = self.recv_strict(2)
|
||||||
|
b1 = header[0]
|
||||||
|
|
||||||
|
if six.PY2:
|
||||||
|
b1 = ord(b1)
|
||||||
|
|
||||||
|
fin = b1 >> 7 & 1
|
||||||
|
rsv1 = b1 >> 6 & 1
|
||||||
|
rsv2 = b1 >> 5 & 1
|
||||||
|
rsv3 = b1 >> 4 & 1
|
||||||
|
opcode = b1 & 0xf
|
||||||
|
b2 = header[1]
|
||||||
|
|
||||||
|
if six.PY2:
|
||||||
|
b2 = ord(b2)
|
||||||
|
|
||||||
|
has_mask = b2 >> 7 & 1
|
||||||
|
length_bits = b2 & 0x7f
|
||||||
|
|
||||||
|
self.header = (fin, rsv1, rsv2, rsv3, opcode, has_mask, length_bits)
|
||||||
|
|
||||||
|
def has_mask(self):
|
||||||
|
if not self.header:
|
||||||
|
return False
|
||||||
|
return self.header[frame_buffer._HEADER_MASK_INDEX]
|
||||||
|
|
||||||
|
|
||||||
|
def has_received_length(self):
|
||||||
|
return self.length is None
|
||||||
|
|
||||||
|
def recv_length(self):
|
||||||
|
bits = self.header[frame_buffer._HEADER_LENGHT_INDEX]
|
||||||
|
length_bits = bits & 0x7f
|
||||||
|
if length_bits == 0x7e:
|
||||||
|
v = self.recv_strict(2)
|
||||||
|
self.length = struct.unpack("!H", v)[0]
|
||||||
|
elif length_bits == 0x7f:
|
||||||
|
v = self.recv_strict(8)
|
||||||
|
self.length = struct.unpack("!Q", v)[0]
|
||||||
|
else:
|
||||||
|
self.length = length_bits
|
||||||
|
|
||||||
|
def has_received_mask(self):
|
||||||
|
return self.mask is None
|
||||||
|
|
||||||
|
def recv_mask(self):
|
||||||
|
self.mask = self.recv_strict(4) if self.has_mask() else ""
|
||||||
|
|
||||||
|
def recv_frame(self):
|
||||||
|
# Header
|
||||||
|
if self.has_received_header():
|
||||||
|
self.recv_header()
|
||||||
|
(fin, rsv1, rsv2, rsv3, opcode, has_mask, _) = self.header
|
||||||
|
|
||||||
|
# Frame length
|
||||||
|
if self.has_received_length():
|
||||||
|
self.recv_length()
|
||||||
|
length = self.length
|
||||||
|
|
||||||
|
# Mask
|
||||||
|
if self.has_received_mask():
|
||||||
|
self.recv_mask()
|
||||||
|
mask = self.mask
|
||||||
|
|
||||||
|
# Payload
|
||||||
|
payload = self.recv_strict(length)
|
||||||
|
if has_mask:
|
||||||
|
payload = ABNF.mask(mask, payload)
|
||||||
|
|
||||||
|
# Reset for next frame
|
||||||
|
self.clear()
|
||||||
|
|
||||||
|
frame = ABNF(fin, rsv1, rsv2, rsv3, opcode, has_mask, payload)
|
||||||
|
frame.validate(self.skip_utf8_validation)
|
||||||
|
|
||||||
|
return frame
|
||||||
|
|
||||||
|
def recv_strict(self, bufsize):
|
||||||
|
shortage = bufsize - sum(len(x) for x in self.recv_buffer)
|
||||||
|
while shortage > 0:
|
||||||
|
# Limit buffer size that we pass to socket.recv() to avoid
|
||||||
|
# fragmenting the heap -- the number of bytes recv() actually
|
||||||
|
# reads is limited by socket buffer and is relatively small,
|
||||||
|
# yet passing large numbers repeatedly causes lots of large
|
||||||
|
# buffers allocated and then shrunk, which results in fragmentation.
|
||||||
|
bytes = self.recv(min(16384, shortage))
|
||||||
|
self.recv_buffer.append(bytes)
|
||||||
|
shortage -= len(bytes)
|
||||||
|
|
||||||
|
unified = six.b("").join(self.recv_buffer)
|
||||||
|
|
||||||
|
if shortage == 0:
|
||||||
|
self.recv_buffer = []
|
||||||
|
return unified
|
||||||
|
else:
|
||||||
|
self.recv_buffer = [unified[bufsize:]]
|
||||||
|
return unified[:bufsize]
|
||||||
|
|
||||||
|
|
||||||
|
class continuous_frame(object):
|
||||||
|
def __init__(self, fire_cont_frame, skip_utf8_validation):
|
||||||
|
self.fire_cont_frame = fire_cont_frame
|
||||||
|
self.skip_utf8_validation = skip_utf8_validation
|
||||||
|
self.cont_data = None
|
||||||
|
self.recving_frames = None
|
||||||
|
|
||||||
|
def validate(self, frame):
|
||||||
|
if not self.recving_frames and frame.opcode == ABNF.OPCODE_CONT:
|
||||||
|
raise WebSocketProtocolException("Illegal frame")
|
||||||
|
if self.recving_frames and frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY):
|
||||||
|
raise WebSocketProtocolException("Illegal frame")
|
||||||
|
|
||||||
|
def add(self, frame):
|
||||||
|
if self.cont_data:
|
||||||
|
self.cont_data[1] += frame.data
|
||||||
|
else:
|
||||||
|
if frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY):
|
||||||
|
self.recving_frames = frame.opcode
|
||||||
|
self.cont_data = [frame.opcode, frame.data]
|
||||||
|
|
||||||
|
if frame.fin:
|
||||||
|
self.recving_frames = None
|
||||||
|
|
||||||
|
def is_fire(self, frame):
|
||||||
|
return frame.fin or self.fire_cont_frame
|
||||||
|
|
||||||
|
def extract(self, frame):
|
||||||
|
data = self.cont_data
|
||||||
|
self.cont_data = None
|
||||||
|
frame.data = data[1]
|
||||||
|
if not self.fire_cont_frame and data[0] == ABNF.OPCODE_TEXT and not self.skip_utf8_validation and not validate_utf8(frame.data):
|
||||||
|
raise WebSocketPayloadException("cannot decode: " + repr(frame.data))
|
||||||
|
|
||||||
|
return [data[0], frame]
|
236
lib/websocket/_app.py
Normal file
236
lib/websocket/_app.py
Normal file
|
@ -0,0 +1,236 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
"""
|
||||||
|
WebSocketApp provides higher level APIs.
|
||||||
|
"""
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
import traceback
|
||||||
|
import sys
|
||||||
|
import select
|
||||||
|
import six
|
||||||
|
|
||||||
|
from ._core import WebSocket, getdefaulttimeout
|
||||||
|
from ._exceptions import *
|
||||||
|
from ._logging import *
|
||||||
|
from websocket._abnf import ABNF
|
||||||
|
|
||||||
|
__all__ = ["WebSocketApp"]
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketApp(object):
|
||||||
|
"""
|
||||||
|
Higher level of APIs are provided.
|
||||||
|
The interface is like JavaScript WebSocket object.
|
||||||
|
"""
|
||||||
|
def __init__(self, url, header=[],
|
||||||
|
on_open=None, on_message=None, on_error=None,
|
||||||
|
on_close=None, on_ping=None, on_pong=None,
|
||||||
|
on_cont_message=None,
|
||||||
|
keep_running=True, get_mask_key=None, cookie=None,
|
||||||
|
subprotocols=None):
|
||||||
|
"""
|
||||||
|
url: websocket url.
|
||||||
|
header: custom header for websocket handshake.
|
||||||
|
on_open: callable object which is called at opening websocket.
|
||||||
|
this function has one argument. The arugment is this class object.
|
||||||
|
on_message: callbale object which is called when recieved data.
|
||||||
|
on_message has 2 arguments.
|
||||||
|
The 1st arugment is this class object.
|
||||||
|
The passing 2nd arugment is utf-8 string which we get from the server.
|
||||||
|
on_error: callable object which is called when we get error.
|
||||||
|
on_error has 2 arguments.
|
||||||
|
The 1st arugment is this class object.
|
||||||
|
The passing 2nd arugment is exception object.
|
||||||
|
on_close: callable object which is called when closed the connection.
|
||||||
|
this function has one argument. The arugment is this class object.
|
||||||
|
on_cont_message: callback object which is called when recieve continued
|
||||||
|
frame data.
|
||||||
|
on_message has 3 arguments.
|
||||||
|
The 1st arugment is this class object.
|
||||||
|
The passing 2nd arugment is utf-8 string which we get from the server.
|
||||||
|
The 3rd arugment is continue flag. if 0, the data continue
|
||||||
|
to next frame data
|
||||||
|
keep_running: a boolean flag indicating whether the app's main loop
|
||||||
|
should keep running, defaults to True
|
||||||
|
get_mask_key: a callable to produce new mask keys,
|
||||||
|
see the WebSocket.set_mask_key's docstring for more information
|
||||||
|
subprotocols: array of available sub protocols. default is None.
|
||||||
|
"""
|
||||||
|
self.url = url
|
||||||
|
self.header = header
|
||||||
|
self.cookie = cookie
|
||||||
|
self.on_open = on_open
|
||||||
|
self.on_message = on_message
|
||||||
|
self.on_error = on_error
|
||||||
|
self.on_close = on_close
|
||||||
|
self.on_ping = on_ping
|
||||||
|
self.on_pong = on_pong
|
||||||
|
self.on_cont_message = on_cont_message
|
||||||
|
self.keep_running = keep_running
|
||||||
|
self.get_mask_key = get_mask_key
|
||||||
|
self.sock = None
|
||||||
|
self.last_ping_tm = 0
|
||||||
|
self.subprotocols = subprotocols
|
||||||
|
|
||||||
|
def send(self, data, opcode=ABNF.OPCODE_TEXT):
|
||||||
|
"""
|
||||||
|
send message.
|
||||||
|
data: message to send. If you set opcode to OPCODE_TEXT,
|
||||||
|
data must be utf-8 string or unicode.
|
||||||
|
opcode: operation code of data. default is OPCODE_TEXT.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not self.sock or self.sock.send(data, opcode) == 0:
|
||||||
|
raise WebSocketConnectionClosedException("Connection is already closed.")
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
"""
|
||||||
|
close websocket connection.
|
||||||
|
"""
|
||||||
|
self.keep_running = False
|
||||||
|
if self.sock:
|
||||||
|
self.sock.close()
|
||||||
|
|
||||||
|
def _send_ping(self, interval, event):
|
||||||
|
while not event.wait(interval):
|
||||||
|
self.last_ping_tm = time.time()
|
||||||
|
if self.sock:
|
||||||
|
self.sock.ping()
|
||||||
|
|
||||||
|
def run_forever(self, sockopt=None, sslopt=None,
|
||||||
|
ping_interval=0, ping_timeout=None,
|
||||||
|
http_proxy_host=None, http_proxy_port=None,
|
||||||
|
http_no_proxy=None, http_proxy_auth=None,
|
||||||
|
skip_utf8_validation=False,
|
||||||
|
host=None, origin=None):
|
||||||
|
"""
|
||||||
|
run event loop for WebSocket framework.
|
||||||
|
This loop is infinite loop and is alive during websocket is available.
|
||||||
|
sockopt: values for socket.setsockopt.
|
||||||
|
sockopt must be tuple
|
||||||
|
and each element is argument of sock.setscokopt.
|
||||||
|
sslopt: ssl socket optional dict.
|
||||||
|
ping_interval: automatically send "ping" command
|
||||||
|
every specified period(second)
|
||||||
|
if set to 0, not send automatically.
|
||||||
|
ping_timeout: timeout(second) if the pong message is not recieved.
|
||||||
|
http_proxy_host: http proxy host name.
|
||||||
|
http_proxy_port: http proxy port. If not set, set to 80.
|
||||||
|
http_no_proxy: host names, which doesn't use proxy.
|
||||||
|
skip_utf8_validation: skip utf8 validation.
|
||||||
|
host: update host header.
|
||||||
|
origin: update origin header.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not ping_timeout or ping_timeout <= 0:
|
||||||
|
ping_timeout = None
|
||||||
|
if sockopt is None:
|
||||||
|
sockopt = []
|
||||||
|
if sslopt is None:
|
||||||
|
sslopt = {}
|
||||||
|
if self.sock:
|
||||||
|
raise WebSocketException("socket is already opened")
|
||||||
|
thread = None
|
||||||
|
close_frame = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.sock = WebSocket(self.get_mask_key,
|
||||||
|
sockopt=sockopt, sslopt=sslopt,
|
||||||
|
fire_cont_frame=self.on_cont_message and True or False,
|
||||||
|
skip_utf8_validation=skip_utf8_validation)
|
||||||
|
self.sock.settimeout(getdefaulttimeout())
|
||||||
|
self.sock.connect(self.url, header=self.header, cookie=self.cookie,
|
||||||
|
http_proxy_host=http_proxy_host,
|
||||||
|
http_proxy_port=http_proxy_port,
|
||||||
|
http_no_proxy=http_no_proxy, http_proxy_auth=http_proxy_auth,
|
||||||
|
subprotocols=self.subprotocols,
|
||||||
|
host=host, origin=origin)
|
||||||
|
self._callback(self.on_open)
|
||||||
|
|
||||||
|
if ping_interval:
|
||||||
|
event = threading.Event()
|
||||||
|
thread = threading.Thread(target=self._send_ping, args=(ping_interval, event))
|
||||||
|
thread.setDaemon(True)
|
||||||
|
thread.start()
|
||||||
|
|
||||||
|
while self.sock.connected:
|
||||||
|
r, w, e = select.select((self.sock.sock, ), (), (), ping_timeout)
|
||||||
|
if not self.keep_running:
|
||||||
|
break
|
||||||
|
if ping_timeout and self.last_ping_tm and time.time() - self.last_ping_tm > ping_timeout:
|
||||||
|
self.last_ping_tm = 0
|
||||||
|
raise WebSocketTimeoutException("ping timed out")
|
||||||
|
|
||||||
|
if r:
|
||||||
|
op_code, frame = self.sock.recv_data_frame(True)
|
||||||
|
if op_code == ABNF.OPCODE_CLOSE:
|
||||||
|
close_frame = frame
|
||||||
|
break
|
||||||
|
elif op_code == ABNF.OPCODE_PING:
|
||||||
|
self._callback(self.on_ping, frame.data)
|
||||||
|
elif op_code == ABNF.OPCODE_PONG:
|
||||||
|
self._callback(self.on_pong, frame.data)
|
||||||
|
elif op_code == ABNF.OPCODE_CONT and self.on_cont_message:
|
||||||
|
self._callback(self.on_cont_message, frame.data, frame.fin)
|
||||||
|
else:
|
||||||
|
data = frame.data
|
||||||
|
if six.PY3 and frame.opcode == ABNF.OPCODE_TEXT:
|
||||||
|
data = data.decode("utf-8")
|
||||||
|
self._callback(self.on_message, data)
|
||||||
|
except Exception as e:
|
||||||
|
self._callback(self.on_error, e)
|
||||||
|
finally:
|
||||||
|
if thread:
|
||||||
|
event.set()
|
||||||
|
thread.join()
|
||||||
|
self.keep_running = False
|
||||||
|
self.sock.close()
|
||||||
|
self._callback(self.on_close,
|
||||||
|
*self._get_close_args(close_frame.data if close_frame else None))
|
||||||
|
self.sock = None
|
||||||
|
|
||||||
|
def _get_close_args(self, data):
|
||||||
|
""" this functions extracts the code, reason from the close body
|
||||||
|
if they exists, and if the self.on_close except three arguments """
|
||||||
|
import inspect
|
||||||
|
# if the on_close callback is "old", just return empty list
|
||||||
|
if not self.on_close or len(inspect.getargspec(self.on_close).args) != 3:
|
||||||
|
return []
|
||||||
|
|
||||||
|
if data and len(data) >= 2:
|
||||||
|
code = 256*six.byte2int(data[0:1]) + six.byte2int(data[1:2])
|
||||||
|
reason = data[2:].decode('utf-8')
|
||||||
|
return [code, reason]
|
||||||
|
|
||||||
|
return [None, None]
|
||||||
|
|
||||||
|
def _callback(self, callback, *args):
|
||||||
|
if callback:
|
||||||
|
try:
|
||||||
|
callback(self, *args)
|
||||||
|
except Exception as e:
|
||||||
|
error(e)
|
||||||
|
if isEnabledForDebug():
|
||||||
|
_, _, tb = sys.exc_info()
|
||||||
|
traceback.print_tb(tb)
|
482
lib/websocket/_core.py
Normal file
482
lib/websocket/_core.py
Normal file
|
@ -0,0 +1,482 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
|
||||||
|
import six
|
||||||
|
import socket
|
||||||
|
|
||||||
|
if six.PY3:
|
||||||
|
from base64 import encodebytes as base64encode
|
||||||
|
else:
|
||||||
|
from base64 import encodestring as base64encode
|
||||||
|
|
||||||
|
import struct
|
||||||
|
import threading
|
||||||
|
|
||||||
|
# websocket modules
|
||||||
|
from ._exceptions import *
|
||||||
|
from ._abnf import *
|
||||||
|
from ._socket import *
|
||||||
|
from ._utils import *
|
||||||
|
from ._url import *
|
||||||
|
from ._logging import *
|
||||||
|
from ._http import *
|
||||||
|
from ._handshake import *
|
||||||
|
from ._ssl_compat import *
|
||||||
|
|
||||||
|
"""
|
||||||
|
websocket python client.
|
||||||
|
=========================
|
||||||
|
|
||||||
|
This version support only hybi-13.
|
||||||
|
Please see http://tools.ietf.org/html/rfc6455 for protocol.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def create_connection(url, timeout=None, **options):
|
||||||
|
"""
|
||||||
|
connect to url and return websocket object.
|
||||||
|
|
||||||
|
Connect to url and return the WebSocket object.
|
||||||
|
Passing optional timeout parameter will set the timeout on the socket.
|
||||||
|
If no timeout is supplied,
|
||||||
|
the global default timeout setting returned by getdefauttimeout() is used.
|
||||||
|
You can customize using 'options'.
|
||||||
|
If you set "header" list object, you can set your own custom header.
|
||||||
|
|
||||||
|
>>> conn = create_connection("ws://echo.websocket.org/",
|
||||||
|
... header=["User-Agent: MyProgram",
|
||||||
|
... "x-custom: header"])
|
||||||
|
|
||||||
|
|
||||||
|
timeout: socket timeout time. This value is integer.
|
||||||
|
if you set None for this value,
|
||||||
|
it means "use default_timeout value"
|
||||||
|
|
||||||
|
|
||||||
|
options: "header" -> custom http header list.
|
||||||
|
"cookie" -> cookie value.
|
||||||
|
"origin" -> custom origin url.
|
||||||
|
"host" -> custom host header string.
|
||||||
|
"http_proxy_host" - http proxy host name.
|
||||||
|
"http_proxy_port" - http proxy port. If not set, set to 80.
|
||||||
|
"http_no_proxy" - host names, which doesn't use proxy.
|
||||||
|
"http_proxy_auth" - http proxy auth infomation.
|
||||||
|
tuple of username and password.
|
||||||
|
default is None
|
||||||
|
"enable_multithread" -> enable lock for multithread.
|
||||||
|
"sockopt" -> socket options
|
||||||
|
"sslopt" -> ssl option
|
||||||
|
"subprotocols" - array of available sub protocols.
|
||||||
|
default is None.
|
||||||
|
"skip_utf8_validation" - skip utf8 validation.
|
||||||
|
"""
|
||||||
|
sockopt = options.get("sockopt", [])
|
||||||
|
sslopt = options.get("sslopt", {})
|
||||||
|
fire_cont_frame = options.get("fire_cont_frame", False)
|
||||||
|
enable_multithread = options.get("enable_multithread", False)
|
||||||
|
skip_utf8_validation = options.get("skip_utf8_validation", False)
|
||||||
|
websock = WebSocket(sockopt=sockopt, sslopt=sslopt,
|
||||||
|
fire_cont_frame=fire_cont_frame,
|
||||||
|
enable_multithread=enable_multithread,
|
||||||
|
skip_utf8_validation=skip_utf8_validation)
|
||||||
|
websock.settimeout(timeout if timeout is not None else getdefaulttimeout())
|
||||||
|
websock.connect(url, **options)
|
||||||
|
return websock
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocket(object):
|
||||||
|
"""
|
||||||
|
Low level WebSocket interface.
|
||||||
|
This class is based on
|
||||||
|
The WebSocket protocol draft-hixie-thewebsocketprotocol-76
|
||||||
|
http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76
|
||||||
|
|
||||||
|
We can connect to the websocket server and send/recieve data.
|
||||||
|
The following example is a echo client.
|
||||||
|
|
||||||
|
>>> import websocket
|
||||||
|
>>> ws = websocket.WebSocket()
|
||||||
|
>>> ws.connect("ws://echo.websocket.org")
|
||||||
|
>>> ws.send("Hello, Server")
|
||||||
|
>>> ws.recv()
|
||||||
|
'Hello, Server'
|
||||||
|
>>> ws.close()
|
||||||
|
|
||||||
|
get_mask_key: a callable to produce new mask keys, see the set_mask_key
|
||||||
|
function's docstring for more details
|
||||||
|
sockopt: values for socket.setsockopt.
|
||||||
|
sockopt must be tuple and each element is argument of sock.setscokopt.
|
||||||
|
sslopt: dict object for ssl socket option.
|
||||||
|
fire_cont_frame: fire recv event for each cont frame. default is False
|
||||||
|
enable_multithread: if set to True, lock send method.
|
||||||
|
skip_utf8_validation: skip utf8 validation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, get_mask_key=None, sockopt=None, sslopt=None,
|
||||||
|
fire_cont_frame=False, enable_multithread=False,
|
||||||
|
skip_utf8_validation=False):
|
||||||
|
"""
|
||||||
|
Initalize WebSocket object.
|
||||||
|
"""
|
||||||
|
self.sock_opt = sock_opt(sockopt, sslopt)
|
||||||
|
self.handshake_response = None
|
||||||
|
self.sock = None
|
||||||
|
|
||||||
|
self.connected = False
|
||||||
|
self.get_mask_key = get_mask_key
|
||||||
|
# These buffer over the build-up of a single frame.
|
||||||
|
self.frame_buffer = frame_buffer(self._recv, skip_utf8_validation)
|
||||||
|
self.cont_frame = continuous_frame(fire_cont_frame, skip_utf8_validation)
|
||||||
|
|
||||||
|
if enable_multithread:
|
||||||
|
self.lock = threading.Lock()
|
||||||
|
else:
|
||||||
|
self.lock = NoLock()
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
"""
|
||||||
|
Allow iteration over websocket, implying sequential `recv` executions.
|
||||||
|
"""
|
||||||
|
while True:
|
||||||
|
yield self.recv()
|
||||||
|
|
||||||
|
def __next__(self):
|
||||||
|
return self.recv()
|
||||||
|
|
||||||
|
def next(self):
|
||||||
|
return self.__next__()
|
||||||
|
|
||||||
|
def fileno(self):
|
||||||
|
return self.sock.fileno()
|
||||||
|
|
||||||
|
def set_mask_key(self, func):
|
||||||
|
"""
|
||||||
|
set function to create musk key. You can custumize mask key generator.
|
||||||
|
Mainly, this is for testing purpose.
|
||||||
|
|
||||||
|
func: callable object. the fuct must 1 argument as integer.
|
||||||
|
The argument means length of mask key.
|
||||||
|
This func must be return string(byte array),
|
||||||
|
which length is argument specified.
|
||||||
|
"""
|
||||||
|
self.get_mask_key = func
|
||||||
|
|
||||||
|
def gettimeout(self):
|
||||||
|
"""
|
||||||
|
Get the websocket timeout(second).
|
||||||
|
"""
|
||||||
|
return self.sock_opt.timeout
|
||||||
|
|
||||||
|
def settimeout(self, timeout):
|
||||||
|
"""
|
||||||
|
Set the timeout to the websocket.
|
||||||
|
|
||||||
|
timeout: timeout time(second).
|
||||||
|
"""
|
||||||
|
self.sock_opt.timeout = timeout
|
||||||
|
if self.sock:
|
||||||
|
self.sock.settimeout(timeout)
|
||||||
|
|
||||||
|
timeout = property(gettimeout, settimeout)
|
||||||
|
|
||||||
|
def getsubprotocol(self):
|
||||||
|
"""
|
||||||
|
get subprotocol
|
||||||
|
"""
|
||||||
|
if self.handshake_response:
|
||||||
|
return self.handshake_response.subprotocol
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
subprotocol = property(getsubprotocol)
|
||||||
|
|
||||||
|
def getstatus(self):
|
||||||
|
"""
|
||||||
|
get handshake status
|
||||||
|
"""
|
||||||
|
if self.handshake_response:
|
||||||
|
return self.handshake_response.status
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
status = property(getstatus)
|
||||||
|
|
||||||
|
def getheaders(self):
|
||||||
|
"""
|
||||||
|
get handshake response header
|
||||||
|
"""
|
||||||
|
if self.handshake_response:
|
||||||
|
return self.handshake_response.headers
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
headers = property(getheaders)
|
||||||
|
|
||||||
|
def connect(self, url, **options):
|
||||||
|
"""
|
||||||
|
Connect to url. url is websocket url scheme.
|
||||||
|
ie. ws://host:port/resource
|
||||||
|
You can customize using 'options'.
|
||||||
|
If you set "header" list object, you can set your own custom header.
|
||||||
|
|
||||||
|
>>> ws = WebSocket()
|
||||||
|
>>> ws.connect("ws://echo.websocket.org/",
|
||||||
|
... header=["User-Agent: MyProgram",
|
||||||
|
... "x-custom: header"])
|
||||||
|
|
||||||
|
timeout: socket timeout time. This value is integer.
|
||||||
|
if you set None for this value,
|
||||||
|
it means "use default_timeout value"
|
||||||
|
|
||||||
|
options: "header" -> custom http header list.
|
||||||
|
"cookie" -> cookie value.
|
||||||
|
"origin" -> custom origin url.
|
||||||
|
"host" -> custom host header string.
|
||||||
|
"http_proxy_host" - http proxy host name.
|
||||||
|
"http_proxy_port" - http proxy port. If not set, set to 80.
|
||||||
|
"http_no_proxy" - host names, which doesn't use proxy.
|
||||||
|
"http_proxy_auth" - http proxy auth infomation.
|
||||||
|
tuple of username and password.
|
||||||
|
defualt is None
|
||||||
|
"subprotocols" - array of available sub protocols.
|
||||||
|
default is None.
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.sock, addrs = connect(url, self.sock_opt, proxy_info(**options))
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.handshake_response = handshake(self.sock, *addrs, **options)
|
||||||
|
self.connected = True
|
||||||
|
except:
|
||||||
|
if self.sock:
|
||||||
|
self.sock.close()
|
||||||
|
self.sock = None
|
||||||
|
raise
|
||||||
|
|
||||||
|
def send(self, payload, opcode=ABNF.OPCODE_TEXT):
|
||||||
|
"""
|
||||||
|
Send the data as string.
|
||||||
|
|
||||||
|
payload: Payload must be utf-8 string or unicode,
|
||||||
|
if the opcode is OPCODE_TEXT.
|
||||||
|
Otherwise, it must be string(byte array)
|
||||||
|
|
||||||
|
opcode: operation code to send. Please see OPCODE_XXX.
|
||||||
|
"""
|
||||||
|
|
||||||
|
frame = ABNF.create_frame(payload, opcode)
|
||||||
|
return self.send_frame(frame)
|
||||||
|
|
||||||
|
def send_frame(self, frame):
|
||||||
|
"""
|
||||||
|
Send the data frame.
|
||||||
|
|
||||||
|
frame: frame data created by ABNF.create_frame
|
||||||
|
|
||||||
|
>>> ws = create_connection("ws://echo.websocket.org/")
|
||||||
|
>>> frame = ABNF.create_frame("Hello", ABNF.OPCODE_TEXT)
|
||||||
|
>>> ws.send_frame(frame)
|
||||||
|
>>> cont_frame = ABNF.create_frame("My name is ", ABNF.OPCODE_CONT, 0)
|
||||||
|
>>> ws.send_frame(frame)
|
||||||
|
>>> cont_frame = ABNF.create_frame("Foo Bar", ABNF.OPCODE_CONT, 1)
|
||||||
|
>>> ws.send_frame(frame)
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self.get_mask_key:
|
||||||
|
frame.get_mask_key = self.get_mask_key
|
||||||
|
data = frame.format()
|
||||||
|
length = len(data)
|
||||||
|
trace("send: " + repr(data))
|
||||||
|
|
||||||
|
with self.lock:
|
||||||
|
while data:
|
||||||
|
l = self._send(data)
|
||||||
|
data = data[l:]
|
||||||
|
|
||||||
|
return length
|
||||||
|
|
||||||
|
def send_binary(self, payload):
|
||||||
|
return self.send(payload, ABNF.OPCODE_BINARY)
|
||||||
|
|
||||||
|
def ping(self, payload=""):
|
||||||
|
"""
|
||||||
|
send ping data.
|
||||||
|
|
||||||
|
payload: data payload to send server.
|
||||||
|
"""
|
||||||
|
if isinstance(payload, six.text_type):
|
||||||
|
payload = payload.encode("utf-8")
|
||||||
|
self.send(payload, ABNF.OPCODE_PING)
|
||||||
|
|
||||||
|
def pong(self, payload):
|
||||||
|
"""
|
||||||
|
send pong data.
|
||||||
|
|
||||||
|
payload: data payload to send server.
|
||||||
|
"""
|
||||||
|
if isinstance(payload, six.text_type):
|
||||||
|
payload = payload.encode("utf-8")
|
||||||
|
self.send(payload, ABNF.OPCODE_PONG)
|
||||||
|
|
||||||
|
def recv(self):
|
||||||
|
"""
|
||||||
|
Receive string data(byte array) from the server.
|
||||||
|
|
||||||
|
return value: string(byte array) value.
|
||||||
|
"""
|
||||||
|
opcode, data = self.recv_data()
|
||||||
|
if six.PY3 and opcode == ABNF.OPCODE_TEXT:
|
||||||
|
return data.decode("utf-8")
|
||||||
|
elif opcode == ABNF.OPCODE_TEXT or opcode == ABNF.OPCODE_BINARY:
|
||||||
|
return data
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def recv_data(self, control_frame=False):
|
||||||
|
"""
|
||||||
|
Recieve data with operation code.
|
||||||
|
|
||||||
|
control_frame: a boolean flag indicating whether to return control frame
|
||||||
|
data, defaults to False
|
||||||
|
|
||||||
|
return value: tuple of operation code and string(byte array) value.
|
||||||
|
"""
|
||||||
|
opcode, frame = self.recv_data_frame(control_frame)
|
||||||
|
return opcode, frame.data
|
||||||
|
|
||||||
|
def recv_data_frame(self, control_frame=False):
|
||||||
|
"""
|
||||||
|
Recieve data with operation code.
|
||||||
|
|
||||||
|
control_frame: a boolean flag indicating whether to return control frame
|
||||||
|
data, defaults to False
|
||||||
|
|
||||||
|
return value: tuple of operation code and string(byte array) value.
|
||||||
|
"""
|
||||||
|
while True:
|
||||||
|
frame = self.recv_frame()
|
||||||
|
if not frame:
|
||||||
|
# handle error:
|
||||||
|
# 'NoneType' object has no attribute 'opcode'
|
||||||
|
raise WebSocketProtocolException("Not a valid frame %s" % frame)
|
||||||
|
elif frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY, ABNF.OPCODE_CONT):
|
||||||
|
self.cont_frame.validate(frame)
|
||||||
|
self.cont_frame.add(frame)
|
||||||
|
|
||||||
|
if self.cont_frame.is_fire(frame):
|
||||||
|
return self.cont_frame.extract(frame)
|
||||||
|
|
||||||
|
elif frame.opcode == ABNF.OPCODE_CLOSE:
|
||||||
|
self.send_close()
|
||||||
|
return (frame.opcode, frame)
|
||||||
|
elif frame.opcode == ABNF.OPCODE_PING:
|
||||||
|
if len(frame.data) < 126:
|
||||||
|
self.pong(frame.data)
|
||||||
|
else:
|
||||||
|
raise WebSocketProtocolException("Ping message is too long")
|
||||||
|
if control_frame:
|
||||||
|
return (frame.opcode, frame)
|
||||||
|
elif frame.opcode == ABNF.OPCODE_PONG:
|
||||||
|
if control_frame:
|
||||||
|
return (frame.opcode, frame)
|
||||||
|
|
||||||
|
def recv_frame(self):
|
||||||
|
"""
|
||||||
|
recieve data as frame from server.
|
||||||
|
|
||||||
|
return value: ABNF frame object.
|
||||||
|
"""
|
||||||
|
return self.frame_buffer.recv_frame()
|
||||||
|
|
||||||
|
def send_close(self, status=STATUS_NORMAL, reason=six.b("")):
|
||||||
|
"""
|
||||||
|
send close data to the server.
|
||||||
|
|
||||||
|
status: status code to send. see STATUS_XXX.
|
||||||
|
|
||||||
|
reason: the reason to close. This must be string or bytes.
|
||||||
|
"""
|
||||||
|
if status < 0 or status >= ABNF.LENGTH_16:
|
||||||
|
raise ValueError("code is invalid range")
|
||||||
|
self.connected = False
|
||||||
|
self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE)
|
||||||
|
|
||||||
|
def close(self, status=STATUS_NORMAL, reason=six.b("")):
|
||||||
|
"""
|
||||||
|
Close Websocket object
|
||||||
|
|
||||||
|
status: status code to send. see STATUS_XXX.
|
||||||
|
|
||||||
|
reason: the reason to close. This must be string.
|
||||||
|
"""
|
||||||
|
if self.connected:
|
||||||
|
if status < 0 or status >= ABNF.LENGTH_16:
|
||||||
|
raise ValueError("code is invalid range")
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.connected = False
|
||||||
|
self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE)
|
||||||
|
timeout = self.sock.gettimeout()
|
||||||
|
self.sock.settimeout(3)
|
||||||
|
try:
|
||||||
|
frame = self.recv_frame()
|
||||||
|
if isEnabledForError():
|
||||||
|
recv_status = struct.unpack("!H", frame.data)[0]
|
||||||
|
if recv_status != STATUS_NORMAL:
|
||||||
|
error("close status: " + repr(recv_status))
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
self.sock.settimeout(timeout)
|
||||||
|
self.sock.shutdown(socket.SHUT_RDWR)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.shutdown()
|
||||||
|
|
||||||
|
def abort(self):
|
||||||
|
"""
|
||||||
|
Low-level asynchonous abort, wakes up other threads that are waiting in recv_*
|
||||||
|
"""
|
||||||
|
if self.connected:
|
||||||
|
self.sock.shutdown(socket.SHUT_RDWR)
|
||||||
|
|
||||||
|
def shutdown(self):
|
||||||
|
"close socket, immediately."
|
||||||
|
if self.sock:
|
||||||
|
self.sock.close()
|
||||||
|
self.sock = None
|
||||||
|
self.connected = False
|
||||||
|
|
||||||
|
def _send(self, data):
|
||||||
|
return send(self.sock, data)
|
||||||
|
|
||||||
|
def _recv(self, bufsize):
|
||||||
|
try:
|
||||||
|
return recv(self.sock, bufsize)
|
||||||
|
except WebSocketConnectionClosedException:
|
||||||
|
if self.sock:
|
||||||
|
self.sock.close()
|
||||||
|
self.sock = None
|
||||||
|
self.connected = False
|
||||||
|
raise
|
65
lib/websocket/_exceptions.py
Normal file
65
lib/websocket/_exceptions.py
Normal file
|
@ -0,0 +1,65 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
define websocket exceptions
|
||||||
|
"""
|
||||||
|
|
||||||
|
class WebSocketException(Exception):
|
||||||
|
"""
|
||||||
|
websocket exeception class.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
class WebSocketProtocolException(WebSocketException):
|
||||||
|
"""
|
||||||
|
If the webscoket protocol is invalid, this exception will be raised.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
class WebSocketPayloadException(WebSocketException):
|
||||||
|
"""
|
||||||
|
If the webscoket payload is invalid, this exception will be raised.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
class WebSocketConnectionClosedException(WebSocketException):
|
||||||
|
"""
|
||||||
|
If remote host closed the connection or some network error happened,
|
||||||
|
this exception will be raised.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
class WebSocketTimeoutException(WebSocketException):
|
||||||
|
"""
|
||||||
|
WebSocketTimeoutException will be raised at socket timeout during read/write data.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
class WebSocketProxyException(WebSocketException):
|
||||||
|
"""
|
||||||
|
WebSocketProxyException will be raised when proxy error occured.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
155
lib/websocket/_handshake.py
Normal file
155
lib/websocket/_handshake.py
Normal file
|
@ -0,0 +1,155 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import six
|
||||||
|
if six.PY3:
|
||||||
|
from base64 import encodebytes as base64encode
|
||||||
|
else:
|
||||||
|
from base64 import encodestring as base64encode
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
from ._logging import *
|
||||||
|
from ._url import *
|
||||||
|
from ._socket import*
|
||||||
|
from ._http import *
|
||||||
|
from ._exceptions import *
|
||||||
|
|
||||||
|
__all__ = ["handshake_response", "handshake"]
|
||||||
|
|
||||||
|
# websocket supported version.
|
||||||
|
VERSION = 13
|
||||||
|
|
||||||
|
|
||||||
|
class handshake_response(object):
|
||||||
|
def __init__(self, status, headers, subprotocol):
|
||||||
|
self.status = status
|
||||||
|
self.headers = headers
|
||||||
|
self.subprotocol = subprotocol
|
||||||
|
|
||||||
|
|
||||||
|
def handshake(sock, hostname, port, resource, **options):
|
||||||
|
headers, key = _get_handshake_headers(resource, hostname, port, options)
|
||||||
|
|
||||||
|
header_str = "\r\n".join(headers)
|
||||||
|
send(sock, header_str)
|
||||||
|
dump("request header", header_str)
|
||||||
|
|
||||||
|
status, resp = _get_resp_headers(sock)
|
||||||
|
success, subproto = _validate(resp, key, options.get("subprotocols"))
|
||||||
|
if not success:
|
||||||
|
raise WebSocketException("Invalid WebSocket Header")
|
||||||
|
|
||||||
|
return handshake_response(status, resp, subproto)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_handshake_headers(resource, host, port, options):
|
||||||
|
headers = []
|
||||||
|
headers.append("GET %s HTTP/1.1" % resource)
|
||||||
|
headers.append("Upgrade: websocket")
|
||||||
|
headers.append("Connection: Upgrade")
|
||||||
|
if port == 80:
|
||||||
|
hostport = host
|
||||||
|
else:
|
||||||
|
hostport = "%s:%d" % (host, port)
|
||||||
|
|
||||||
|
if "host" in options and options["host"]:
|
||||||
|
headers.append("Host: %s" % options["host"])
|
||||||
|
else:
|
||||||
|
headers.append("Host: %s" % hostport)
|
||||||
|
|
||||||
|
if "origin" in options and options["origin"]:
|
||||||
|
headers.append("Origin: %s" % options["origin"])
|
||||||
|
else:
|
||||||
|
headers.append("Origin: http://%s" % hostport)
|
||||||
|
|
||||||
|
key = _create_sec_websocket_key()
|
||||||
|
headers.append("Sec-WebSocket-Key: %s" % key)
|
||||||
|
headers.append("Sec-WebSocket-Version: %s" % VERSION)
|
||||||
|
|
||||||
|
subprotocols = options.get("subprotocols")
|
||||||
|
if subprotocols:
|
||||||
|
headers.append("Sec-WebSocket-Protocol: %s" % ",".join(subprotocols))
|
||||||
|
|
||||||
|
if "header" in options:
|
||||||
|
headers.extend(options["header"])
|
||||||
|
|
||||||
|
cookie = options.get("cookie", None)
|
||||||
|
|
||||||
|
if cookie:
|
||||||
|
headers.append("Cookie: %s" % cookie)
|
||||||
|
|
||||||
|
headers.append("")
|
||||||
|
headers.append("")
|
||||||
|
|
||||||
|
return headers, key
|
||||||
|
|
||||||
|
|
||||||
|
def _get_resp_headers(sock, success_status=101):
|
||||||
|
status, resp_headers = read_headers(sock)
|
||||||
|
if status != success_status:
|
||||||
|
raise WebSocketException("Handshake status %d" % status)
|
||||||
|
return status, resp_headers
|
||||||
|
|
||||||
|
_HEADERS_TO_CHECK = {
|
||||||
|
"upgrade": "websocket",
|
||||||
|
"connection": "upgrade",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _validate(headers, key, subprotocols):
|
||||||
|
subproto = None
|
||||||
|
for k, v in _HEADERS_TO_CHECK.items():
|
||||||
|
r = headers.get(k, None)
|
||||||
|
if not r:
|
||||||
|
return False, None
|
||||||
|
r = r.lower()
|
||||||
|
if v != r:
|
||||||
|
return False, None
|
||||||
|
|
||||||
|
if subprotocols:
|
||||||
|
subproto = headers.get("sec-websocket-protocol", None).lower()
|
||||||
|
if not subproto or subproto not in [s.lower() for s in subprotocols]:
|
||||||
|
error("Invalid subprotocol: " + str(subprotocols))
|
||||||
|
return False, None
|
||||||
|
|
||||||
|
result = headers.get("sec-websocket-accept", None)
|
||||||
|
if not result:
|
||||||
|
return False, None
|
||||||
|
result = result.lower()
|
||||||
|
|
||||||
|
if isinstance(result, six.text_type):
|
||||||
|
result = result.encode('utf-8')
|
||||||
|
|
||||||
|
value = (key + "258EAFA5-E914-47DA-95CA-C5AB0DC85B11").encode('utf-8')
|
||||||
|
hashed = base64encode(hashlib.sha1(value).digest()).strip().lower()
|
||||||
|
success = (hashed == result)
|
||||||
|
if success:
|
||||||
|
return True, subproto
|
||||||
|
else:
|
||||||
|
return False, None
|
||||||
|
|
||||||
|
|
||||||
|
def _create_sec_websocket_key():
|
||||||
|
uid = uuid.uuid4()
|
||||||
|
return base64encode(uid.bytes).decode('utf-8').strip()
|
215
lib/websocket/_http.py
Normal file
215
lib/websocket/_http.py
Normal file
|
@ -0,0 +1,215 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import six
|
||||||
|
import socket
|
||||||
|
import errno
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
if six.PY3:
|
||||||
|
from base64 import encodebytes as base64encode
|
||||||
|
else:
|
||||||
|
from base64 import encodestring as base64encode
|
||||||
|
|
||||||
|
from ._logging import *
|
||||||
|
from ._url import *
|
||||||
|
from ._socket import*
|
||||||
|
from ._exceptions import *
|
||||||
|
from ._ssl_compat import *
|
||||||
|
|
||||||
|
__all__ = ["proxy_info", "connect", "read_headers"]
|
||||||
|
|
||||||
|
class proxy_info(object):
|
||||||
|
def __init__(self, **options):
|
||||||
|
self.host = options.get("http_proxy_host", None)
|
||||||
|
if self.host:
|
||||||
|
self.port = options.get("http_proxy_port", 0)
|
||||||
|
self.auth = options.get("http_proxy_auth", None)
|
||||||
|
self.no_proxy = options.get("http_no_proxy", None)
|
||||||
|
else:
|
||||||
|
self.port = 0
|
||||||
|
self.auth = None
|
||||||
|
self.no_proxy = None
|
||||||
|
|
||||||
|
def connect(url, options, proxy):
|
||||||
|
hostname, port, resource, is_secure = parse_url(url)
|
||||||
|
addrinfo_list, need_tunnel, auth = _get_addrinfo_list(hostname, port, is_secure, proxy)
|
||||||
|
if not addrinfo_list:
|
||||||
|
raise WebSocketException(
|
||||||
|
"Host not found.: " + hostname + ":" + str(port))
|
||||||
|
|
||||||
|
sock = None
|
||||||
|
try:
|
||||||
|
sock = _open_socket(addrinfo_list, options.sockopt, options.timeout)
|
||||||
|
if need_tunnel:
|
||||||
|
sock = _tunnel(sock, hostname, port, auth)
|
||||||
|
|
||||||
|
if is_secure:
|
||||||
|
if HAVE_SSL:
|
||||||
|
sock = _ssl_socket(sock, options.sslopt, hostname)
|
||||||
|
else:
|
||||||
|
raise WebSocketException("SSL not available.")
|
||||||
|
|
||||||
|
return sock, (hostname, port, resource)
|
||||||
|
except:
|
||||||
|
if sock:
|
||||||
|
sock.close()
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def _get_addrinfo_list(hostname, port, is_secure, proxy):
|
||||||
|
phost, pport, pauth = get_proxy_info(hostname, is_secure,
|
||||||
|
proxy.host, proxy.port, proxy.auth, proxy.no_proxy)
|
||||||
|
if not phost:
|
||||||
|
addrinfo_list = socket.getaddrinfo(hostname, port, 0, 0, socket.SOL_TCP)
|
||||||
|
return addrinfo_list, False, None
|
||||||
|
else:
|
||||||
|
pport = pport and pport or 80
|
||||||
|
addrinfo_list = socket.getaddrinfo(phost, pport, 0, 0, socket.SOL_TCP)
|
||||||
|
return addrinfo_list, True, pauth
|
||||||
|
|
||||||
|
|
||||||
|
def _open_socket(addrinfo_list, sockopt, timeout):
|
||||||
|
err = None
|
||||||
|
for addrinfo in addrinfo_list:
|
||||||
|
family = addrinfo[0]
|
||||||
|
sock = socket.socket(family)
|
||||||
|
sock.settimeout(timeout)
|
||||||
|
for opts in DEFAULT_SOCKET_OPTION:
|
||||||
|
sock.setsockopt(*opts)
|
||||||
|
for opts in sockopt:
|
||||||
|
sock.setsockopt(*opts)
|
||||||
|
|
||||||
|
address = addrinfo[4]
|
||||||
|
try:
|
||||||
|
sock.connect(address)
|
||||||
|
except socket.error as error:
|
||||||
|
error.remote_ip = str(address[0])
|
||||||
|
if error.errno in (errno.ECONNREFUSED, ):
|
||||||
|
err = error
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
raise err
|
||||||
|
|
||||||
|
return sock
|
||||||
|
|
||||||
|
|
||||||
|
def _can_use_sni():
|
||||||
|
return (six.PY2 and sys.version_info[1] >= 7 and sys.version_info[2] >= 9) or (six.PY3 and sys.version_info[2] >= 2)
|
||||||
|
|
||||||
|
|
||||||
|
def _wrap_sni_socket(sock, sslopt, hostname, check_hostname):
|
||||||
|
context = ssl.SSLContext(sslopt.get('ssl_version', ssl.PROTOCOL_SSLv23))
|
||||||
|
|
||||||
|
context.load_verify_locations(cafile=sslopt.get('ca_certs', None))
|
||||||
|
# see https://github.com/liris/websocket-client/commit/b96a2e8fa765753e82eea531adb19716b52ca3ca#commitcomment-10803153
|
||||||
|
context.verify_mode = sslopt['cert_reqs']
|
||||||
|
if HAVE_CONTEXT_CHECK_HOSTNAME:
|
||||||
|
context.check_hostname = check_hostname
|
||||||
|
if 'ciphers' in sslopt:
|
||||||
|
context.set_ciphers(sslopt['ciphers'])
|
||||||
|
|
||||||
|
return context.wrap_socket(
|
||||||
|
sock,
|
||||||
|
do_handshake_on_connect=sslopt.get('do_handshake_on_connect', True),
|
||||||
|
suppress_ragged_eofs=sslopt.get('suppress_ragged_eofs', True),
|
||||||
|
server_hostname=hostname,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _ssl_socket(sock, user_sslopt, hostname):
|
||||||
|
sslopt = dict(cert_reqs=ssl.CERT_REQUIRED)
|
||||||
|
certPath = os.path.join(
|
||||||
|
os.path.dirname(__file__), "cacert.pem")
|
||||||
|
if os.path.isfile(certPath):
|
||||||
|
sslopt['ca_certs'] = certPath
|
||||||
|
sslopt.update(user_sslopt)
|
||||||
|
check_hostname = sslopt["cert_reqs"] != ssl.CERT_NONE and sslopt.pop('check_hostname', True)
|
||||||
|
|
||||||
|
if _can_use_sni():
|
||||||
|
sock = _wrap_sni_socket(sock, sslopt, hostname, check_hostname)
|
||||||
|
else:
|
||||||
|
sslopt.pop('check_hostname', True)
|
||||||
|
sock = ssl.wrap_socket(sock, **sslopt)
|
||||||
|
|
||||||
|
if not HAVE_CONTEXT_CHECK_HOSTNAME and check_hostname:
|
||||||
|
match_hostname(sock.getpeercert(), hostname)
|
||||||
|
|
||||||
|
return sock
|
||||||
|
|
||||||
|
def _tunnel(sock, host, port, auth):
|
||||||
|
debug("Connecting proxy...")
|
||||||
|
connect_header = "CONNECT %s:%d HTTP/1.0\r\n" % (host, port)
|
||||||
|
# TODO: support digest auth.
|
||||||
|
if auth and auth[0]:
|
||||||
|
auth_str = auth[0]
|
||||||
|
if auth[1]:
|
||||||
|
auth_str += ":" + auth[1]
|
||||||
|
encoded_str = base64encode(auth_str.encode()).strip().decode()
|
||||||
|
connect_header += "Proxy-Authorization: Basic %s\r\n" % encoded_str
|
||||||
|
connect_header += "\r\n"
|
||||||
|
dump("request header", connect_header)
|
||||||
|
|
||||||
|
send(sock, connect_header)
|
||||||
|
|
||||||
|
try:
|
||||||
|
status, resp_headers = read_headers(sock)
|
||||||
|
except Exception as e:
|
||||||
|
raise WebSocketProxyException(str(e))
|
||||||
|
|
||||||
|
if status != 200:
|
||||||
|
raise WebSocketProxyException(
|
||||||
|
"failed CONNECT via proxy status: %r" + status)
|
||||||
|
|
||||||
|
return sock
|
||||||
|
|
||||||
|
def read_headers(sock):
|
||||||
|
status = None
|
||||||
|
headers = {}
|
||||||
|
trace("--- response header ---")
|
||||||
|
|
||||||
|
while True:
|
||||||
|
line = recv_line(sock)
|
||||||
|
line = line.decode('utf-8').strip()
|
||||||
|
if not line:
|
||||||
|
break
|
||||||
|
trace(line)
|
||||||
|
if not status:
|
||||||
|
|
||||||
|
status_info = line.split(" ", 2)
|
||||||
|
status = int(status_info[1])
|
||||||
|
else:
|
||||||
|
kv = line.split(":", 1)
|
||||||
|
if len(kv) == 2:
|
||||||
|
key, value = kv
|
||||||
|
headers[key.lower()] = value.strip().lower()
|
||||||
|
else:
|
||||||
|
raise WebSocketException("Invalid header")
|
||||||
|
|
||||||
|
trace("-----------------------")
|
||||||
|
|
||||||
|
return status, headers
|
71
lib/websocket/_logging.py
Normal file
71
lib/websocket/_logging.py
Normal file
|
@ -0,0 +1,71 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
_logger = logging.getLogger()
|
||||||
|
_traceEnabled = False
|
||||||
|
|
||||||
|
__all__ = ["enableTrace", "dump", "error", "debug", "trace",
|
||||||
|
"isEnabledForError", "isEnabledForDebug"]
|
||||||
|
|
||||||
|
|
||||||
|
def enableTrace(tracable):
|
||||||
|
"""
|
||||||
|
turn on/off the tracability.
|
||||||
|
|
||||||
|
tracable: boolean value. if set True, tracability is enabled.
|
||||||
|
"""
|
||||||
|
global _traceEnabled
|
||||||
|
_traceEnabled = tracable
|
||||||
|
if tracable:
|
||||||
|
if not _logger.handlers:
|
||||||
|
_logger.addHandler(logging.StreamHandler())
|
||||||
|
_logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
|
||||||
|
def dump(title, message):
|
||||||
|
if _traceEnabled:
|
||||||
|
_logger.debug("--- " + title + " ---")
|
||||||
|
_logger.debug(message)
|
||||||
|
_logger.debug("-----------------------")
|
||||||
|
|
||||||
|
|
||||||
|
def error(msg):
|
||||||
|
_logger.error(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def debug(msg):
|
||||||
|
_logger.debug(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def trace(msg):
|
||||||
|
if _traceEnabled:
|
||||||
|
_logger.debug(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def isEnabledForError():
|
||||||
|
return _logger.isEnabledFor(logging.ERROR)
|
||||||
|
|
||||||
|
|
||||||
|
def isEnabledForDebug():
|
||||||
|
return _logger.isEnabledFor(logging.DEBUG)
|
121
lib/websocket/_socket.py
Normal file
121
lib/websocket/_socket.py
Normal file
|
@ -0,0 +1,121 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import socket
|
||||||
|
import six
|
||||||
|
|
||||||
|
from ._exceptions import *
|
||||||
|
from ._utils import *
|
||||||
|
from ._ssl_compat import *
|
||||||
|
|
||||||
|
DEFAULT_SOCKET_OPTION = [(socket.SOL_TCP, socket.TCP_NODELAY, 1)]
|
||||||
|
if hasattr(socket, "SO_KEEPALIVE"):
|
||||||
|
DEFAULT_SOCKET_OPTION.append((socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1))
|
||||||
|
if hasattr(socket, "TCP_KEEPIDLE"):
|
||||||
|
DEFAULT_SOCKET_OPTION.append((socket.SOL_TCP, socket.TCP_KEEPIDLE, 30))
|
||||||
|
if hasattr(socket, "TCP_KEEPINTVL"):
|
||||||
|
DEFAULT_SOCKET_OPTION.append((socket.SOL_TCP, socket.TCP_KEEPINTVL, 10))
|
||||||
|
if hasattr(socket, "TCP_KEEPCNT"):
|
||||||
|
DEFAULT_SOCKET_OPTION.append((socket.SOL_TCP, socket.TCP_KEEPCNT, 3))
|
||||||
|
|
||||||
|
_default_timeout = None
|
||||||
|
|
||||||
|
__all__ = ["DEFAULT_SOCKET_OPTION", "sock_opt", "setdefaulttimeout", "getdefaulttimeout",
|
||||||
|
"recv", "recv_line", "send"]
|
||||||
|
|
||||||
|
class sock_opt(object):
|
||||||
|
def __init__(self, sockopt, sslopt):
|
||||||
|
if sockopt is None:
|
||||||
|
sockopt = []
|
||||||
|
if sslopt is None:
|
||||||
|
sslopt = {}
|
||||||
|
self.sockopt = sockopt
|
||||||
|
self.sslopt = sslopt
|
||||||
|
self.timeout = None
|
||||||
|
|
||||||
|
def setdefaulttimeout(timeout):
|
||||||
|
"""
|
||||||
|
Set the global timeout setting to connect.
|
||||||
|
|
||||||
|
timeout: default socket timeout time. This value is second.
|
||||||
|
"""
|
||||||
|
global _default_timeout
|
||||||
|
_default_timeout = timeout
|
||||||
|
|
||||||
|
|
||||||
|
def getdefaulttimeout():
|
||||||
|
"""
|
||||||
|
Return the global timeout setting(second) to connect.
|
||||||
|
"""
|
||||||
|
return _default_timeout
|
||||||
|
|
||||||
|
|
||||||
|
def recv(sock, bufsize):
|
||||||
|
if not sock:
|
||||||
|
raise WebSocketConnectionClosedException("socket is already closed.")
|
||||||
|
|
||||||
|
try:
|
||||||
|
bytes = sock.recv(bufsize)
|
||||||
|
except socket.timeout as e:
|
||||||
|
message = extract_err_message(e)
|
||||||
|
raise WebSocketTimeoutException(message)
|
||||||
|
except SSLError as e:
|
||||||
|
message = extract_err_message(e)
|
||||||
|
if message == "The read operation timed out":
|
||||||
|
raise WebSocketTimeoutException(message)
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
if not bytes:
|
||||||
|
raise WebSocketConnectionClosedException("Connection is already closed.")
|
||||||
|
|
||||||
|
return bytes
|
||||||
|
|
||||||
|
|
||||||
|
def recv_line(sock):
|
||||||
|
line = []
|
||||||
|
while True:
|
||||||
|
c = recv(sock, 1)
|
||||||
|
line.append(c)
|
||||||
|
if c == six.b("\n"):
|
||||||
|
break
|
||||||
|
return six.b("").join(line)
|
||||||
|
|
||||||
|
|
||||||
|
def send(sock, data):
|
||||||
|
if isinstance(data, six.text_type):
|
||||||
|
data = data.encode('utf-8')
|
||||||
|
|
||||||
|
if not sock:
|
||||||
|
raise WebSocketConnectionClosedException("socket is already closed.")
|
||||||
|
|
||||||
|
try:
|
||||||
|
return sock.send(data)
|
||||||
|
except socket.timeout as e:
|
||||||
|
message = extract_err_message(e)
|
||||||
|
raise WebSocketTimeoutException(message)
|
||||||
|
except Exception as e:
|
||||||
|
message = extract_err_message(e)
|
||||||
|
if message and "timed out" in message:
|
||||||
|
raise WebSocketTimeoutException(message)
|
||||||
|
else:
|
||||||
|
raise
|
45
lib/websocket/_ssl_compat.py
Normal file
45
lib/websocket/_ssl_compat.py
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
__all__ = ["HAVE_SSL", "ssl", "SSLError"]
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ssl
|
||||||
|
from ssl import SSLError
|
||||||
|
if hasattr(ssl, 'SSLContext') and hasattr(ssl.SSLContext, 'check_hostname'):
|
||||||
|
HAVE_CONTEXT_CHECK_HOSTNAME = True
|
||||||
|
else:
|
||||||
|
HAVE_CONTEXT_CHECK_HOSTNAME = False
|
||||||
|
if hasattr(ssl, "match_hostname"):
|
||||||
|
from ssl import match_hostname
|
||||||
|
else:
|
||||||
|
from backports.ssl_match_hostname import match_hostname
|
||||||
|
__all__.append("match_hostname")
|
||||||
|
__all__.append("HAVE_CONTEXT_CHECK_HOSTNAME")
|
||||||
|
|
||||||
|
HAVE_SSL = True
|
||||||
|
except ImportError:
|
||||||
|
# dummy class of SSLError for ssl none-support environment.
|
||||||
|
class SSLError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
HAVE_SSL = False
|
126
lib/websocket/_url.py
Normal file
126
lib/websocket/_url.py
Normal file
|
@ -0,0 +1,126 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from six.moves.urllib.parse import urlparse
|
||||||
|
import os
|
||||||
|
|
||||||
|
__all__ = ["parse_url", "get_proxy_info"]
|
||||||
|
|
||||||
|
|
||||||
|
def parse_url(url):
|
||||||
|
"""
|
||||||
|
parse url and the result is tuple of
|
||||||
|
(hostname, port, resource path and the flag of secure mode)
|
||||||
|
|
||||||
|
url: url string.
|
||||||
|
"""
|
||||||
|
if ":" not in url:
|
||||||
|
raise ValueError("url is invalid")
|
||||||
|
|
||||||
|
scheme, url = url.split(":", 1)
|
||||||
|
|
||||||
|
parsed = urlparse(url, scheme="ws")
|
||||||
|
if parsed.hostname:
|
||||||
|
hostname = parsed.hostname
|
||||||
|
else:
|
||||||
|
raise ValueError("hostname is invalid")
|
||||||
|
port = 0
|
||||||
|
if parsed.port:
|
||||||
|
port = parsed.port
|
||||||
|
|
||||||
|
is_secure = False
|
||||||
|
if scheme == "ws":
|
||||||
|
if not port:
|
||||||
|
port = 80
|
||||||
|
elif scheme == "wss":
|
||||||
|
is_secure = True
|
||||||
|
if not port:
|
||||||
|
port = 443
|
||||||
|
else:
|
||||||
|
raise ValueError("scheme %s is invalid" % scheme)
|
||||||
|
|
||||||
|
if parsed.path:
|
||||||
|
resource = parsed.path
|
||||||
|
else:
|
||||||
|
resource = "/"
|
||||||
|
|
||||||
|
if parsed.query:
|
||||||
|
resource += "?" + parsed.query
|
||||||
|
|
||||||
|
return (hostname, port, resource, is_secure)
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_NO_PROXY_HOST = ["localhost", "127.0.0.1"]
|
||||||
|
|
||||||
|
|
||||||
|
def _is_no_proxy_host(hostname, no_proxy):
|
||||||
|
if not no_proxy:
|
||||||
|
v = os.environ.get("no_proxy", "").replace(" ", "")
|
||||||
|
no_proxy = v.split(",")
|
||||||
|
if not no_proxy:
|
||||||
|
no_proxy = DEFAULT_NO_PROXY_HOST
|
||||||
|
|
||||||
|
return hostname in no_proxy
|
||||||
|
|
||||||
|
|
||||||
|
def get_proxy_info(hostname, is_secure,
|
||||||
|
proxy_host=None, proxy_port=0, proxy_auth=None, no_proxy=None):
|
||||||
|
"""
|
||||||
|
try to retrieve proxy host and port from environment
|
||||||
|
if not provided in options.
|
||||||
|
result is (proxy_host, proxy_port, proxy_auth).
|
||||||
|
proxy_auth is tuple of username and password
|
||||||
|
of proxy authentication information.
|
||||||
|
|
||||||
|
hostname: websocket server name.
|
||||||
|
|
||||||
|
is_secure: is the connection secure? (wss)
|
||||||
|
looks for "https_proxy" in env
|
||||||
|
before falling back to "http_proxy"
|
||||||
|
|
||||||
|
options: "http_proxy_host" - http proxy host name.
|
||||||
|
"http_proxy_port" - http proxy port.
|
||||||
|
"http_no_proxy" - host names, which doesn't use proxy.
|
||||||
|
"http_proxy_auth" - http proxy auth infomation.
|
||||||
|
tuple of username and password.
|
||||||
|
defualt is None
|
||||||
|
"""
|
||||||
|
if _is_no_proxy_host(hostname, no_proxy):
|
||||||
|
return None, 0, None
|
||||||
|
|
||||||
|
if proxy_host:
|
||||||
|
port = proxy_port
|
||||||
|
auth = proxy_auth
|
||||||
|
return proxy_host, port, auth
|
||||||
|
|
||||||
|
env_keys = ["http_proxy"]
|
||||||
|
if is_secure:
|
||||||
|
env_keys.insert(0, "https_proxy")
|
||||||
|
|
||||||
|
for key in env_keys:
|
||||||
|
value = os.environ.get(key, None)
|
||||||
|
if value:
|
||||||
|
proxy = urlparse(value)
|
||||||
|
auth = (proxy.username, proxy.password) if proxy.username else None
|
||||||
|
return proxy.hostname, proxy.port, auth
|
||||||
|
|
||||||
|
return None, 0, None
|
88
lib/websocket/_utils.py
Normal file
88
lib/websocket/_utils.py
Normal file
|
@ -0,0 +1,88 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
__all__ = ["NoLock", "validate_utf8", "extract_err_message"]
|
||||||
|
|
||||||
|
class NoLock(object):
|
||||||
|
def __enter__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __exit__(self,type, value, traceback):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# UTF-8 validator
|
||||||
|
# python implementation of http://bjoern.hoehrmann.de/utf-8/decoder/dfa/
|
||||||
|
|
||||||
|
UTF8_ACCEPT = 0
|
||||||
|
UTF8_REJECT=12
|
||||||
|
|
||||||
|
_UTF8D = [
|
||||||
|
# The first part of the table maps bytes to character classes that
|
||||||
|
# to reduce the size of the transition table and create bitmasks.
|
||||||
|
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
|
||||||
|
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
|
||||||
|
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
|
||||||
|
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
|
||||||
|
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,
|
||||||
|
7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,
|
||||||
|
8,8,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
|
||||||
|
10,3,3,3,3,3,3,3,3,3,3,3,3,4,3,3, 11,6,6,6,5,8,8,8,8,8,8,8,8,8,8,8,
|
||||||
|
|
||||||
|
# The second part is a transition table that maps a combination
|
||||||
|
# of a state of the automaton and a character class to a state.
|
||||||
|
0,12,24,36,60,96,84,12,12,12,48,72, 12,12,12,12,12,12,12,12,12,12,12,12,
|
||||||
|
12, 0,12,12,12,12,12, 0,12, 0,12,12, 12,24,12,12,12,12,12,24,12,24,12,12,
|
||||||
|
12,12,12,12,12,12,12,24,12,12,12,12, 12,24,12,12,12,12,12,12,12,24,12,12,
|
||||||
|
12,12,12,12,12,12,12,36,12,36,12,12, 12,36,12,12,12,12,12,36,12,36,12,12,
|
||||||
|
12,36,12,12,12,12,12,12,12,12,12,12, ]
|
||||||
|
|
||||||
|
def _decode(state, codep, ch):
|
||||||
|
tp = _UTF8D[ch]
|
||||||
|
|
||||||
|
codep = (ch & 0x3f ) | (codep << 6) if (state != UTF8_ACCEPT) else (0xff >> tp) & (ch)
|
||||||
|
state = _UTF8D[256 + state + tp]
|
||||||
|
|
||||||
|
return state, codep;
|
||||||
|
|
||||||
|
def validate_utf8(utfbytes):
|
||||||
|
"""
|
||||||
|
validate utf8 byte string.
|
||||||
|
utfbytes: utf byte string to check.
|
||||||
|
return value: if valid utf8 string, return true. Otherwise, return false.
|
||||||
|
"""
|
||||||
|
state = UTF8_ACCEPT
|
||||||
|
codep = 0
|
||||||
|
for i in utfbytes:
|
||||||
|
if six.PY2:
|
||||||
|
i = ord(i)
|
||||||
|
state, codep = _decode(state, codep, i)
|
||||||
|
if state == UTF8_REJECT:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def extract_err_message(exception):
|
||||||
|
return getattr(exception, 'strerror', str(exception))
|
4966
lib/websocket/cacert.pem
Normal file
4966
lib/websocket/cacert.pem
Normal file
File diff suppressed because it is too large
Load diff
0
lib/websocket/tests/__init__.py
Normal file
0
lib/websocket/tests/__init__.py
Normal file
6
lib/websocket/tests/data/header01.txt
Normal file
6
lib/websocket/tests/data/header01.txt
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
HTTP/1.1 101 WebSocket Protocol Handshake
|
||||||
|
Connection: Upgrade
|
||||||
|
Upgrade: WebSocket
|
||||||
|
Sec-WebSocket-Accept: Kxep+hNu9n51529fGidYu7a3wO0=
|
||||||
|
some_header: something
|
||||||
|
|
6
lib/websocket/tests/data/header02.txt
Normal file
6
lib/websocket/tests/data/header02.txt
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
HTTP/1.1 101 WebSocket Protocol Handshake
|
||||||
|
Connection: Upgrade
|
||||||
|
Upgrade WebSocket
|
||||||
|
Sec-WebSocket-Accept: Kxep+hNu9n51529fGidYu7a3wO0=
|
||||||
|
some_header: something
|
||||||
|
|
660
lib/websocket/tests/test_websocket.py
Normal file
660
lib/websocket/tests/test_websocket.py
Normal file
|
@ -0,0 +1,660 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
|
||||||
|
import six
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import base64
|
||||||
|
import socket
|
||||||
|
try:
|
||||||
|
from ssl import SSLError
|
||||||
|
except ImportError:
|
||||||
|
# dummy class of SSLError for ssl none-support environment.
|
||||||
|
class SSLError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
if sys.version_info[0] == 2 and sys.version_info[1] < 7:
|
||||||
|
import unittest2 as unittest
|
||||||
|
else:
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
if six.PY3:
|
||||||
|
from base64 import decodebytes as base64decode
|
||||||
|
else:
|
||||||
|
from base64 import decodestring as base64decode
|
||||||
|
|
||||||
|
|
||||||
|
# websocket-client
|
||||||
|
import websocket as ws
|
||||||
|
from websocket._handshake import _create_sec_websocket_key
|
||||||
|
from websocket._url import parse_url, get_proxy_info
|
||||||
|
from websocket._utils import validate_utf8
|
||||||
|
from websocket._handshake import _validate as _validate_header
|
||||||
|
from websocket._http import read_headers
|
||||||
|
|
||||||
|
|
||||||
|
# Skip test to access the internet.
|
||||||
|
TEST_WITH_INTERNET = os.environ.get('TEST_WITH_INTERNET', '0') == '1'
|
||||||
|
|
||||||
|
# Skip Secure WebSocket test.
|
||||||
|
TEST_SECURE_WS = True
|
||||||
|
TRACABLE = False
|
||||||
|
|
||||||
|
|
||||||
|
def create_mask_key(n):
|
||||||
|
return "abcd"
|
||||||
|
|
||||||
|
|
||||||
|
class SockMock(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.data = []
|
||||||
|
self.sent = []
|
||||||
|
|
||||||
|
def add_packet(self, data):
|
||||||
|
self.data.append(data)
|
||||||
|
|
||||||
|
def recv(self, bufsize):
|
||||||
|
if self.data:
|
||||||
|
e = self.data.pop(0)
|
||||||
|
if isinstance(e, Exception):
|
||||||
|
raise e
|
||||||
|
if len(e) > bufsize:
|
||||||
|
self.data.insert(0, e[bufsize:])
|
||||||
|
return e[:bufsize]
|
||||||
|
|
||||||
|
def send(self, data):
|
||||||
|
self.sent.append(data)
|
||||||
|
return len(data)
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class HeaderSockMock(SockMock):
|
||||||
|
|
||||||
|
def __init__(self, fname):
|
||||||
|
SockMock.__init__(self)
|
||||||
|
path = os.path.join(os.path.dirname(__file__), fname)
|
||||||
|
with open(path, "rb") as f:
|
||||||
|
self.add_packet(f.read())
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketTest(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
ws.enableTrace(TRACABLE)
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def testDefaultTimeout(self):
|
||||||
|
self.assertEqual(ws.getdefaulttimeout(), None)
|
||||||
|
ws.setdefaulttimeout(10)
|
||||||
|
self.assertEqual(ws.getdefaulttimeout(), 10)
|
||||||
|
ws.setdefaulttimeout(None)
|
||||||
|
|
||||||
|
def testParseUrl(self):
|
||||||
|
p = parse_url("ws://www.example.com/r")
|
||||||
|
self.assertEqual(p[0], "www.example.com")
|
||||||
|
self.assertEqual(p[1], 80)
|
||||||
|
self.assertEqual(p[2], "/r")
|
||||||
|
self.assertEqual(p[3], False)
|
||||||
|
|
||||||
|
p = parse_url("ws://www.example.com/r/")
|
||||||
|
self.assertEqual(p[0], "www.example.com")
|
||||||
|
self.assertEqual(p[1], 80)
|
||||||
|
self.assertEqual(p[2], "/r/")
|
||||||
|
self.assertEqual(p[3], False)
|
||||||
|
|
||||||
|
p = parse_url("ws://www.example.com/")
|
||||||
|
self.assertEqual(p[0], "www.example.com")
|
||||||
|
self.assertEqual(p[1], 80)
|
||||||
|
self.assertEqual(p[2], "/")
|
||||||
|
self.assertEqual(p[3], False)
|
||||||
|
|
||||||
|
p = parse_url("ws://www.example.com")
|
||||||
|
self.assertEqual(p[0], "www.example.com")
|
||||||
|
self.assertEqual(p[1], 80)
|
||||||
|
self.assertEqual(p[2], "/")
|
||||||
|
self.assertEqual(p[3], False)
|
||||||
|
|
||||||
|
p = parse_url("ws://www.example.com:8080/r")
|
||||||
|
self.assertEqual(p[0], "www.example.com")
|
||||||
|
self.assertEqual(p[1], 8080)
|
||||||
|
self.assertEqual(p[2], "/r")
|
||||||
|
self.assertEqual(p[3], False)
|
||||||
|
|
||||||
|
p = parse_url("ws://www.example.com:8080/")
|
||||||
|
self.assertEqual(p[0], "www.example.com")
|
||||||
|
self.assertEqual(p[1], 8080)
|
||||||
|
self.assertEqual(p[2], "/")
|
||||||
|
self.assertEqual(p[3], False)
|
||||||
|
|
||||||
|
p = parse_url("ws://www.example.com:8080")
|
||||||
|
self.assertEqual(p[0], "www.example.com")
|
||||||
|
self.assertEqual(p[1], 8080)
|
||||||
|
self.assertEqual(p[2], "/")
|
||||||
|
self.assertEqual(p[3], False)
|
||||||
|
|
||||||
|
p = parse_url("wss://www.example.com:8080/r")
|
||||||
|
self.assertEqual(p[0], "www.example.com")
|
||||||
|
self.assertEqual(p[1], 8080)
|
||||||
|
self.assertEqual(p[2], "/r")
|
||||||
|
self.assertEqual(p[3], True)
|
||||||
|
|
||||||
|
p = parse_url("wss://www.example.com:8080/r?key=value")
|
||||||
|
self.assertEqual(p[0], "www.example.com")
|
||||||
|
self.assertEqual(p[1], 8080)
|
||||||
|
self.assertEqual(p[2], "/r?key=value")
|
||||||
|
self.assertEqual(p[3], True)
|
||||||
|
|
||||||
|
self.assertRaises(ValueError, parse_url, "http://www.example.com/r")
|
||||||
|
|
||||||
|
if sys.version_info[0] == 2 and sys.version_info[1] < 7:
|
||||||
|
return
|
||||||
|
|
||||||
|
p = parse_url("ws://[2a03:4000:123:83::3]/r")
|
||||||
|
self.assertEqual(p[0], "2a03:4000:123:83::3")
|
||||||
|
self.assertEqual(p[1], 80)
|
||||||
|
self.assertEqual(p[2], "/r")
|
||||||
|
self.assertEqual(p[3], False)
|
||||||
|
|
||||||
|
p = parse_url("ws://[2a03:4000:123:83::3]:8080/r")
|
||||||
|
self.assertEqual(p[0], "2a03:4000:123:83::3")
|
||||||
|
self.assertEqual(p[1], 8080)
|
||||||
|
self.assertEqual(p[2], "/r")
|
||||||
|
self.assertEqual(p[3], False)
|
||||||
|
|
||||||
|
p = parse_url("wss://[2a03:4000:123:83::3]/r")
|
||||||
|
self.assertEqual(p[0], "2a03:4000:123:83::3")
|
||||||
|
self.assertEqual(p[1], 443)
|
||||||
|
self.assertEqual(p[2], "/r")
|
||||||
|
self.assertEqual(p[3], True)
|
||||||
|
|
||||||
|
p = parse_url("wss://[2a03:4000:123:83::3]:8080/r")
|
||||||
|
self.assertEqual(p[0], "2a03:4000:123:83::3")
|
||||||
|
self.assertEqual(p[1], 8080)
|
||||||
|
self.assertEqual(p[2], "/r")
|
||||||
|
self.assertEqual(p[3], True)
|
||||||
|
|
||||||
|
def testWSKey(self):
|
||||||
|
key = _create_sec_websocket_key()
|
||||||
|
self.assertTrue(key != 24)
|
||||||
|
self.assertTrue(six.u("¥n") not in key)
|
||||||
|
|
||||||
|
def testWsUtils(self):
|
||||||
|
key = "c6b8hTg4EeGb2gQMztV1/g=="
|
||||||
|
required_header = {
|
||||||
|
"upgrade": "websocket",
|
||||||
|
"connection": "upgrade",
|
||||||
|
"sec-websocket-accept": "Kxep+hNu9n51529fGidYu7a3wO0=",
|
||||||
|
}
|
||||||
|
self.assertEqual(_validate_header(required_header, key, None), (True, None))
|
||||||
|
|
||||||
|
header = required_header.copy()
|
||||||
|
header["upgrade"] = "http"
|
||||||
|
self.assertEqual(_validate_header(header, key, None), (False, None))
|
||||||
|
del header["upgrade"]
|
||||||
|
self.assertEqual(_validate_header(header, key, None), (False, None))
|
||||||
|
|
||||||
|
header = required_header.copy()
|
||||||
|
header["connection"] = "something"
|
||||||
|
self.assertEqual(_validate_header(header, key, None), (False, None))
|
||||||
|
del header["connection"]
|
||||||
|
self.assertEqual(_validate_header(header, key, None), (False, None))
|
||||||
|
|
||||||
|
header = required_header.copy()
|
||||||
|
header["sec-websocket-accept"] = "something"
|
||||||
|
self.assertEqual(_validate_header(header, key, None), (False, None))
|
||||||
|
del header["sec-websocket-accept"]
|
||||||
|
self.assertEqual(_validate_header(header, key, None), (False, None))
|
||||||
|
|
||||||
|
header = required_header.copy()
|
||||||
|
header["sec-websocket-protocol"] = "sub1"
|
||||||
|
self.assertEqual(_validate_header(header, key, ["sub1", "sub2"]), (True, "sub1"))
|
||||||
|
self.assertEqual(_validate_header(header, key, ["sub2", "sub3"]), (False, None))
|
||||||
|
|
||||||
|
header = required_header.copy()
|
||||||
|
header["sec-websocket-protocol"] = "sUb1"
|
||||||
|
self.assertEqual(_validate_header(header, key, ["Sub1", "suB2"]), (True, "sub1"))
|
||||||
|
|
||||||
|
|
||||||
|
def testReadHeader(self):
|
||||||
|
status, header = read_headers(HeaderSockMock("data/header01.txt"))
|
||||||
|
self.assertEqual(status, 101)
|
||||||
|
self.assertEqual(header["connection"], "upgrade")
|
||||||
|
|
||||||
|
HeaderSockMock("data/header02.txt")
|
||||||
|
self.assertRaises(ws.WebSocketException, read_headers, HeaderSockMock("data/header02.txt"))
|
||||||
|
|
||||||
|
def testSend(self):
|
||||||
|
# TODO: add longer frame data
|
||||||
|
sock = ws.WebSocket()
|
||||||
|
sock.set_mask_key(create_mask_key)
|
||||||
|
s = sock.sock = HeaderSockMock("data/header01.txt")
|
||||||
|
sock.send("Hello")
|
||||||
|
self.assertEqual(s.sent[0], six.b("\x81\x85abcd)\x07\x0f\x08\x0e"))
|
||||||
|
|
||||||
|
sock.send("こんにちは")
|
||||||
|
self.assertEqual(s.sent[1], six.b("\x81\x8fabcd\x82\xe3\xf0\x87\xe3\xf1\x80\xe5\xca\x81\xe2\xc5\x82\xe3\xcc"))
|
||||||
|
|
||||||
|
sock.send(u"こんにちは")
|
||||||
|
self.assertEqual(s.sent[1], six.b("\x81\x8fabcd\x82\xe3\xf0\x87\xe3\xf1\x80\xe5\xca\x81\xe2\xc5\x82\xe3\xcc"))
|
||||||
|
|
||||||
|
sock.send("x" * 127)
|
||||||
|
|
||||||
|
def testRecv(self):
|
||||||
|
# TODO: add longer frame data
|
||||||
|
sock = ws.WebSocket()
|
||||||
|
s = sock.sock = SockMock()
|
||||||
|
something = six.b("\x81\x8fabcd\x82\xe3\xf0\x87\xe3\xf1\x80\xe5\xca\x81\xe2\xc5\x82\xe3\xcc")
|
||||||
|
s.add_packet(something)
|
||||||
|
data = sock.recv()
|
||||||
|
self.assertEqual(data, "こんにちは")
|
||||||
|
|
||||||
|
s.add_packet(six.b("\x81\x85abcd)\x07\x0f\x08\x0e"))
|
||||||
|
data = sock.recv()
|
||||||
|
self.assertEqual(data, "Hello")
|
||||||
|
|
||||||
|
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
|
||||||
|
def testIter(self):
|
||||||
|
count = 2
|
||||||
|
for rsvp in ws.create_connection('ws://stream.meetup.com/2/rsvps'):
|
||||||
|
count -= 1
|
||||||
|
if count == 0:
|
||||||
|
break
|
||||||
|
|
||||||
|
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
|
||||||
|
def testNext(self):
|
||||||
|
sock = ws.create_connection('ws://stream.meetup.com/2/rsvps')
|
||||||
|
self.assertEqual(str, type(next(sock)))
|
||||||
|
|
||||||
|
def testInternalRecvStrict(self):
|
||||||
|
sock = ws.WebSocket()
|
||||||
|
s = sock.sock = SockMock()
|
||||||
|
s.add_packet(six.b("foo"))
|
||||||
|
s.add_packet(socket.timeout())
|
||||||
|
s.add_packet(six.b("bar"))
|
||||||
|
# s.add_packet(SSLError("The read operation timed out"))
|
||||||
|
s.add_packet(six.b("baz"))
|
||||||
|
with self.assertRaises(ws.WebSocketTimeoutException):
|
||||||
|
data = sock.frame_buffer.recv_strict(9)
|
||||||
|
# if six.PY2:
|
||||||
|
# with self.assertRaises(ws.WebSocketTimeoutException):
|
||||||
|
# data = sock._recv_strict(9)
|
||||||
|
# else:
|
||||||
|
# with self.assertRaises(SSLError):
|
||||||
|
# data = sock._recv_strict(9)
|
||||||
|
data = sock.frame_buffer.recv_strict(9)
|
||||||
|
self.assertEqual(data, six.b("foobarbaz"))
|
||||||
|
with self.assertRaises(ws.WebSocketConnectionClosedException):
|
||||||
|
data = sock.frame_buffer.recv_strict(1)
|
||||||
|
|
||||||
|
def testRecvTimeout(self):
|
||||||
|
sock = ws.WebSocket()
|
||||||
|
s = sock.sock = SockMock()
|
||||||
|
s.add_packet(six.b("\x81"))
|
||||||
|
s.add_packet(socket.timeout())
|
||||||
|
s.add_packet(six.b("\x8dabcd\x29\x07\x0f\x08\x0e"))
|
||||||
|
s.add_packet(socket.timeout())
|
||||||
|
s.add_packet(six.b("\x4e\x43\x33\x0e\x10\x0f\x00\x40"))
|
||||||
|
with self.assertRaises(ws.WebSocketTimeoutException):
|
||||||
|
data = sock.recv()
|
||||||
|
with self.assertRaises(ws.WebSocketTimeoutException):
|
||||||
|
data = sock.recv()
|
||||||
|
data = sock.recv()
|
||||||
|
self.assertEqual(data, "Hello, World!")
|
||||||
|
with self.assertRaises(ws.WebSocketConnectionClosedException):
|
||||||
|
data = sock.recv()
|
||||||
|
|
||||||
|
def testRecvWithSimpleFragmentation(self):
|
||||||
|
sock = ws.WebSocket()
|
||||||
|
s = sock.sock = SockMock()
|
||||||
|
# OPCODE=TEXT, FIN=0, MSG="Brevity is "
|
||||||
|
s.add_packet(six.b("\x01\x8babcd#\x10\x06\x12\x08\x16\x1aD\x08\x11C"))
|
||||||
|
# OPCODE=CONT, FIN=1, MSG="the soul of wit"
|
||||||
|
s.add_packet(six.b("\x80\x8fabcd\x15\n\x06D\x12\r\x16\x08A\r\x05D\x16\x0b\x17"))
|
||||||
|
data = sock.recv()
|
||||||
|
self.assertEqual(data, "Brevity is the soul of wit")
|
||||||
|
with self.assertRaises(ws.WebSocketConnectionClosedException):
|
||||||
|
sock.recv()
|
||||||
|
|
||||||
|
def testRecvWithFireEventOfFragmentation(self):
|
||||||
|
sock = ws.WebSocket(fire_cont_frame=True)
|
||||||
|
s = sock.sock = SockMock()
|
||||||
|
# OPCODE=TEXT, FIN=0, MSG="Brevity is "
|
||||||
|
s.add_packet(six.b("\x01\x8babcd#\x10\x06\x12\x08\x16\x1aD\x08\x11C"))
|
||||||
|
# OPCODE=CONT, FIN=0, MSG="Brevity is "
|
||||||
|
s.add_packet(six.b("\x00\x8babcd#\x10\x06\x12\x08\x16\x1aD\x08\x11C"))
|
||||||
|
# OPCODE=CONT, FIN=1, MSG="the soul of wit"
|
||||||
|
s.add_packet(six.b("\x80\x8fabcd\x15\n\x06D\x12\r\x16\x08A\r\x05D\x16\x0b\x17"))
|
||||||
|
|
||||||
|
_, data = sock.recv_data()
|
||||||
|
self.assertEqual(data, six.b("Brevity is "))
|
||||||
|
_, data = sock.recv_data()
|
||||||
|
self.assertEqual(data, six.b("Brevity is "))
|
||||||
|
_, data = sock.recv_data()
|
||||||
|
self.assertEqual(data, six.b("the soul of wit"))
|
||||||
|
|
||||||
|
# OPCODE=CONT, FIN=0, MSG="Brevity is "
|
||||||
|
s.add_packet(six.b("\x80\x8babcd#\x10\x06\x12\x08\x16\x1aD\x08\x11C"))
|
||||||
|
|
||||||
|
with self.assertRaises(ws.WebSocketException):
|
||||||
|
sock.recv_data()
|
||||||
|
|
||||||
|
with self.assertRaises(ws.WebSocketConnectionClosedException):
|
||||||
|
sock.recv()
|
||||||
|
|
||||||
|
def testClose(self):
|
||||||
|
sock = ws.WebSocket()
|
||||||
|
sock.sock = SockMock()
|
||||||
|
sock.connected = True
|
||||||
|
sock.close()
|
||||||
|
self.assertEqual(sock.connected, False)
|
||||||
|
|
||||||
|
sock = ws.WebSocket()
|
||||||
|
s = sock.sock = SockMock()
|
||||||
|
sock.connected = True
|
||||||
|
s.add_packet(six.b('\x88\x80\x17\x98p\x84'))
|
||||||
|
sock.recv()
|
||||||
|
self.assertEqual(sock.connected, False)
|
||||||
|
|
||||||
|
def testRecvContFragmentation(self):
|
||||||
|
sock = ws.WebSocket()
|
||||||
|
s = sock.sock = SockMock()
|
||||||
|
# OPCODE=CONT, FIN=1, MSG="the soul of wit"
|
||||||
|
s.add_packet(six.b("\x80\x8fabcd\x15\n\x06D\x12\r\x16\x08A\r\x05D\x16\x0b\x17"))
|
||||||
|
self.assertRaises(ws.WebSocketException, sock.recv)
|
||||||
|
|
||||||
|
def testRecvWithProlongedFragmentation(self):
|
||||||
|
sock = ws.WebSocket()
|
||||||
|
s = sock.sock = SockMock()
|
||||||
|
# OPCODE=TEXT, FIN=0, MSG="Once more unto the breach, "
|
||||||
|
s.add_packet(six.b("\x01\x9babcd.\x0c\x00\x01A\x0f\x0c\x16\x04B\x16\n\x15" \
|
||||||
|
"\rC\x10\t\x07C\x06\x13\x07\x02\x07\tNC"))
|
||||||
|
# OPCODE=CONT, FIN=0, MSG="dear friends, "
|
||||||
|
s.add_packet(six.b("\x00\x8eabcd\x05\x07\x02\x16A\x04\x11\r\x04\x0c\x07" \
|
||||||
|
"\x17MB"))
|
||||||
|
# OPCODE=CONT, FIN=1, MSG="once more"
|
||||||
|
s.add_packet(six.b("\x80\x89abcd\x0e\x0c\x00\x01A\x0f\x0c\x16\x04"))
|
||||||
|
data = sock.recv()
|
||||||
|
self.assertEqual(
|
||||||
|
data,
|
||||||
|
"Once more unto the breach, dear friends, once more")
|
||||||
|
with self.assertRaises(ws.WebSocketConnectionClosedException):
|
||||||
|
sock.recv()
|
||||||
|
|
||||||
|
def testRecvWithFragmentationAndControlFrame(self):
|
||||||
|
sock = ws.WebSocket()
|
||||||
|
sock.set_mask_key(create_mask_key)
|
||||||
|
s = sock.sock = SockMock()
|
||||||
|
# OPCODE=TEXT, FIN=0, MSG="Too much "
|
||||||
|
s.add_packet(six.b("\x01\x89abcd5\r\x0cD\x0c\x17\x00\x0cA"))
|
||||||
|
# OPCODE=PING, FIN=1, MSG="Please PONG this"
|
||||||
|
s.add_packet(six.b("\x89\x90abcd1\x0e\x06\x05\x12\x07C4.,$D\x15\n\n\x17"))
|
||||||
|
# OPCODE=CONT, FIN=1, MSG="of a good thing"
|
||||||
|
s.add_packet(six.b("\x80\x8fabcd\x0e\x04C\x05A\x05\x0c\x0b\x05B\x17\x0c" \
|
||||||
|
"\x08\x0c\x04"))
|
||||||
|
data = sock.recv()
|
||||||
|
self.assertEqual(data, "Too much of a good thing")
|
||||||
|
with self.assertRaises(ws.WebSocketConnectionClosedException):
|
||||||
|
sock.recv()
|
||||||
|
self.assertEqual(
|
||||||
|
s.sent[0],
|
||||||
|
six.b("\x8a\x90abcd1\x0e\x06\x05\x12\x07C4.,$D\x15\n\n\x17"))
|
||||||
|
|
||||||
|
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
|
||||||
|
def testWebSocket(self):
|
||||||
|
s = ws.create_connection("ws://echo.websocket.org/")
|
||||||
|
self.assertNotEqual(s, None)
|
||||||
|
s.send("Hello, World")
|
||||||
|
result = s.recv()
|
||||||
|
self.assertEqual(result, "Hello, World")
|
||||||
|
|
||||||
|
s.send(u"こにゃにゃちは、世界")
|
||||||
|
result = s.recv()
|
||||||
|
self.assertEqual(result, "こにゃにゃちは、世界")
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
|
||||||
|
def testPingPong(self):
|
||||||
|
s = ws.create_connection("ws://echo.websocket.org/")
|
||||||
|
self.assertNotEqual(s, None)
|
||||||
|
s.ping("Hello")
|
||||||
|
s.pong("Hi")
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
|
||||||
|
@unittest.skipUnless(TEST_SECURE_WS, "wss://echo.websocket.org doesn't work well.")
|
||||||
|
def testSecureWebSocket(self):
|
||||||
|
if 1:
|
||||||
|
import ssl
|
||||||
|
s = ws.create_connection("wss://echo.websocket.org/")
|
||||||
|
self.assertNotEqual(s, None)
|
||||||
|
self.assertTrue(isinstance(s.sock, ssl.SSLSocket))
|
||||||
|
s.send("Hello, World")
|
||||||
|
result = s.recv()
|
||||||
|
self.assertEqual(result, "Hello, World")
|
||||||
|
s.send(u"こにゃにゃちは、世界")
|
||||||
|
result = s.recv()
|
||||||
|
self.assertEqual(result, "こにゃにゃちは、世界")
|
||||||
|
s.close()
|
||||||
|
#except:
|
||||||
|
# pass
|
||||||
|
|
||||||
|
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
|
||||||
|
def testWebSocketWihtCustomHeader(self):
|
||||||
|
s = ws.create_connection("ws://echo.websocket.org/",
|
||||||
|
headers={"User-Agent": "PythonWebsocketClient"})
|
||||||
|
self.assertNotEqual(s, None)
|
||||||
|
s.send("Hello, World")
|
||||||
|
result = s.recv()
|
||||||
|
self.assertEqual(result, "Hello, World")
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
|
||||||
|
def testAfterClose(self):
|
||||||
|
s = ws.create_connection("ws://echo.websocket.org/")
|
||||||
|
self.assertNotEqual(s, None)
|
||||||
|
s.close()
|
||||||
|
self.assertRaises(ws.WebSocketConnectionClosedException, s.send, "Hello")
|
||||||
|
self.assertRaises(ws.WebSocketConnectionClosedException, s.recv)
|
||||||
|
|
||||||
|
def testUUID4(self):
|
||||||
|
""" WebSocket key should be a UUID4.
|
||||||
|
"""
|
||||||
|
key = _create_sec_websocket_key()
|
||||||
|
u = uuid.UUID(bytes=base64decode(key.encode("utf-8")))
|
||||||
|
self.assertEqual(4, u.version)
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketAppTest(unittest.TestCase):
|
||||||
|
|
||||||
|
class NotSetYet(object):
|
||||||
|
""" A marker class for signalling that a value hasn't been set yet.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
ws.enableTrace(TRACABLE)
|
||||||
|
|
||||||
|
WebSocketAppTest.keep_running_open = WebSocketAppTest.NotSetYet()
|
||||||
|
WebSocketAppTest.keep_running_close = WebSocketAppTest.NotSetYet()
|
||||||
|
WebSocketAppTest.get_mask_key_id = WebSocketAppTest.NotSetYet()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
WebSocketAppTest.keep_running_open = WebSocketAppTest.NotSetYet()
|
||||||
|
WebSocketAppTest.keep_running_close = WebSocketAppTest.NotSetYet()
|
||||||
|
WebSocketAppTest.get_mask_key_id = WebSocketAppTest.NotSetYet()
|
||||||
|
|
||||||
|
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
|
||||||
|
def testKeepRunning(self):
|
||||||
|
""" A WebSocketApp should keep running as long as its self.keep_running
|
||||||
|
is not False (in the boolean context).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def on_open(self, *args, **kwargs):
|
||||||
|
""" Set the keep_running flag for later inspection and immediately
|
||||||
|
close the connection.
|
||||||
|
"""
|
||||||
|
WebSocketAppTest.keep_running_open = self.keep_running
|
||||||
|
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
def on_close(self, *args, **kwargs):
|
||||||
|
""" Set the keep_running flag for the test to use.
|
||||||
|
"""
|
||||||
|
WebSocketAppTest.keep_running_close = self.keep_running
|
||||||
|
|
||||||
|
app = ws.WebSocketApp('ws://echo.websocket.org/', on_open=on_open, on_close=on_close)
|
||||||
|
app.run_forever()
|
||||||
|
|
||||||
|
self.assertFalse(isinstance(WebSocketAppTest.keep_running_open,
|
||||||
|
WebSocketAppTest.NotSetYet))
|
||||||
|
|
||||||
|
self.assertFalse(isinstance(WebSocketAppTest.keep_running_close,
|
||||||
|
WebSocketAppTest.NotSetYet))
|
||||||
|
|
||||||
|
self.assertEqual(True, WebSocketAppTest.keep_running_open)
|
||||||
|
self.assertEqual(False, WebSocketAppTest.keep_running_close)
|
||||||
|
|
||||||
|
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
|
||||||
|
def testSockMaskKey(self):
|
||||||
|
""" A WebSocketApp should forward the received mask_key function down
|
||||||
|
to the actual socket.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def my_mask_key_func():
|
||||||
|
pass
|
||||||
|
|
||||||
|
def on_open(self, *args, **kwargs):
|
||||||
|
""" Set the value so the test can use it later on and immediately
|
||||||
|
close the connection.
|
||||||
|
"""
|
||||||
|
WebSocketAppTest.get_mask_key_id = id(self.get_mask_key)
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
app = ws.WebSocketApp('ws://echo.websocket.org/', on_open=on_open, get_mask_key=my_mask_key_func)
|
||||||
|
app.run_forever()
|
||||||
|
|
||||||
|
# Note: We can't use 'is' for comparing the functions directly, need to use 'id'.
|
||||||
|
self.assertEqual(WebSocketAppTest.get_mask_key_id, id(my_mask_key_func))
|
||||||
|
|
||||||
|
|
||||||
|
class SockOptTest(unittest.TestCase):
|
||||||
|
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
|
||||||
|
def testSockOpt(self):
|
||||||
|
sockopt = ((socket.IPPROTO_TCP, socket.TCP_NODELAY, 1),)
|
||||||
|
s = ws.create_connection("ws://echo.websocket.org", sockopt=sockopt)
|
||||||
|
self.assertNotEqual(s.sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY), 0)
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
class UtilsTest(unittest.TestCase):
|
||||||
|
def testUtf8Validator(self):
|
||||||
|
state = validate_utf8(six.b('\xf0\x90\x80\x80'))
|
||||||
|
self.assertEqual(state, True)
|
||||||
|
state = validate_utf8(six.b('\xce\xba\xe1\xbd\xb9\xcf\x83\xce\xbc\xce\xb5\xed\xa0\x80edited'))
|
||||||
|
self.assertEqual(state, False)
|
||||||
|
state = validate_utf8(six.b(''))
|
||||||
|
self.assertEqual(state, True)
|
||||||
|
|
||||||
|
class ProxyInfoTest(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.http_proxy = os.environ.get("http_proxy", None)
|
||||||
|
self.https_proxy = os.environ.get("https_proxy", None)
|
||||||
|
if "http_proxy" in os.environ:
|
||||||
|
del os.environ["http_proxy"]
|
||||||
|
if "https_proxy" in os.environ:
|
||||||
|
del os.environ["https_proxy"]
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
if self.http_proxy:
|
||||||
|
os.environ["http_proxy"] = self.http_proxy
|
||||||
|
elif "http_proxy" in os.environ:
|
||||||
|
del os.environ["http_proxy"]
|
||||||
|
|
||||||
|
if self.https_proxy:
|
||||||
|
os.environ["https_proxy"] = self.https_proxy
|
||||||
|
elif "https_proxy" in os.environ:
|
||||||
|
del os.environ["https_proxy"]
|
||||||
|
|
||||||
|
|
||||||
|
def testProxyFromArgs(self):
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False, proxy_host="localhost"), ("localhost", 0, None))
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False, proxy_host="localhost", proxy_port=3128), ("localhost", 3128, None))
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", True, proxy_host="localhost"), ("localhost", 0, None))
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", True, proxy_host="localhost", proxy_port=3128), ("localhost", 3128, None))
|
||||||
|
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False, proxy_host="localhost", proxy_auth=("a", "b")),
|
||||||
|
("localhost", 0, ("a", "b")))
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False, proxy_host="localhost", proxy_port=3128, proxy_auth=("a", "b")),
|
||||||
|
("localhost", 3128, ("a", "b")))
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", True, proxy_host="localhost", proxy_auth=("a", "b")),
|
||||||
|
("localhost", 0, ("a", "b")))
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", True, proxy_host="localhost", proxy_port=3128, proxy_auth=("a", "b")),
|
||||||
|
("localhost", 3128, ("a", "b")))
|
||||||
|
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", True, proxy_host="localhost", proxy_port=3128, no_proxy=["example.com"], proxy_auth=("a", "b")),
|
||||||
|
("localhost", 3128, ("a", "b")))
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", True, proxy_host="localhost", proxy_port=3128, no_proxy=["echo.websocket.org"], proxy_auth=("a", "b")),
|
||||||
|
(None, 0, None))
|
||||||
|
|
||||||
|
|
||||||
|
def testProxyFromEnv(self):
|
||||||
|
os.environ["http_proxy"] = "http://localhost/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", None, None))
|
||||||
|
os.environ["http_proxy"] = "http://localhost:3128/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", 3128, None))
|
||||||
|
|
||||||
|
os.environ["http_proxy"] = "http://localhost/"
|
||||||
|
os.environ["https_proxy"] = "http://localhost2/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", None, None))
|
||||||
|
os.environ["http_proxy"] = "http://localhost:3128/"
|
||||||
|
os.environ["https_proxy"] = "http://localhost2:3128/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", 3128, None))
|
||||||
|
|
||||||
|
os.environ["http_proxy"] = "http://localhost/"
|
||||||
|
os.environ["https_proxy"] = "http://localhost2/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", True), ("localhost2", None, None))
|
||||||
|
os.environ["http_proxy"] = "http://localhost:3128/"
|
||||||
|
os.environ["https_proxy"] = "http://localhost2:3128/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", True), ("localhost2", 3128, None))
|
||||||
|
|
||||||
|
|
||||||
|
os.environ["http_proxy"] = "http://a:b@localhost/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", None, ("a", "b")))
|
||||||
|
os.environ["http_proxy"] = "http://a:b@localhost:3128/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", 3128, ("a", "b")))
|
||||||
|
|
||||||
|
os.environ["http_proxy"] = "http://a:b@localhost/"
|
||||||
|
os.environ["https_proxy"] = "http://a:b@localhost2/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", None, ("a", "b")))
|
||||||
|
os.environ["http_proxy"] = "http://a:b@localhost:3128/"
|
||||||
|
os.environ["https_proxy"] = "http://a:b@localhost2:3128/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", 3128, ("a", "b")))
|
||||||
|
|
||||||
|
os.environ["http_proxy"] = "http://a:b@localhost/"
|
||||||
|
os.environ["https_proxy"] = "http://a:b@localhost2/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", True), ("localhost2", None, ("a", "b")))
|
||||||
|
os.environ["http_proxy"] = "http://a:b@localhost:3128/"
|
||||||
|
os.environ["https_proxy"] = "http://a:b@localhost2:3128/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", True), ("localhost2", 3128, ("a", "b")))
|
||||||
|
|
||||||
|
os.environ["http_proxy"] = "http://a:b@localhost/"
|
||||||
|
os.environ["https_proxy"] = "http://a:b@localhost2/"
|
||||||
|
os.environ["no_proxy"] = "example1.com,example2.com"
|
||||||
|
self.assertEqual(get_proxy_info("example.1.com", True), ("localhost2", None, ("a", "b")))
|
||||||
|
os.environ["http_proxy"] = "http://a:b@localhost:3128/"
|
||||||
|
os.environ["https_proxy"] = "http://a:b@localhost2:3128/"
|
||||||
|
os.environ["no_proxy"] = "example1.com,example2.com, echo.websocket.org"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", True), (None, 0, None))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
|
@ -31,7 +31,7 @@ except ImportError:
|
||||||
from apscheduler.schedulers.background import BackgroundScheduler
|
from apscheduler.schedulers.background import BackgroundScheduler
|
||||||
from apscheduler.triggers.interval import IntervalTrigger
|
from apscheduler.triggers.interval import IntervalTrigger
|
||||||
|
|
||||||
from plexpy import versioncheck, logger, monitor, plextv
|
from plexpy import versioncheck, logger, activity_pinger, plextv
|
||||||
import plexpy.config
|
import plexpy.config
|
||||||
|
|
||||||
PROG_DIR = None
|
PROG_DIR = None
|
||||||
|
@ -71,6 +71,7 @@ COMMITS_BEHIND = None
|
||||||
|
|
||||||
UMASK = None
|
UMASK = None
|
||||||
|
|
||||||
|
POLLING_FAILOVER = False
|
||||||
|
|
||||||
def initialize(config_file):
|
def initialize(config_file):
|
||||||
with INIT_LOCK:
|
with INIT_LOCK:
|
||||||
|
@ -80,6 +81,7 @@ def initialize(config_file):
|
||||||
global CURRENT_VERSION
|
global CURRENT_VERSION
|
||||||
global LATEST_VERSION
|
global LATEST_VERSION
|
||||||
global UMASK
|
global UMASK
|
||||||
|
global POLLING_FAILOVER
|
||||||
|
|
||||||
CONFIG = plexpy.config.Config(config_file)
|
CONFIG = plexpy.config.Config(config_file)
|
||||||
|
|
||||||
|
@ -279,7 +281,11 @@ def initialize_scheduler():
|
||||||
|
|
||||||
if CONFIG.PMS_IP and CONFIG.PMS_TOKEN:
|
if CONFIG.PMS_IP and CONFIG.PMS_TOKEN:
|
||||||
schedule_job(plextv.get_real_pms_url, 'Refresh Plex Server URLs', hours=12, minutes=0, seconds=0)
|
schedule_job(plextv.get_real_pms_url, 'Refresh Plex Server URLs', hours=12, minutes=0, seconds=0)
|
||||||
schedule_job(monitor.check_active_sessions, 'Check for active sessions', hours=0, minutes=0, seconds=seconds)
|
|
||||||
|
# If we're not using websockets then fall back to polling
|
||||||
|
if not CONFIG.MONITORING_USE_WEBSOCKET or POLLING_FAILOVER:
|
||||||
|
schedule_job(activity_pinger.check_active_sessions, 'Check for active sessions',
|
||||||
|
hours=0, minutes=0, seconds=seconds)
|
||||||
|
|
||||||
# Refresh the users list
|
# Refresh the users list
|
||||||
if CONFIG.REFRESH_USERS_INTERVAL:
|
if CONFIG.REFRESH_USERS_INTERVAL:
|
||||||
|
@ -355,12 +361,12 @@ def dbcheck():
|
||||||
'audio_channels INTEGER, transcode_protocol TEXT, transcode_container TEXT, '
|
'audio_channels INTEGER, transcode_protocol TEXT, transcode_container TEXT, '
|
||||||
'transcode_video_codec TEXT, transcode_audio_codec TEXT, transcode_audio_channels INTEGER,'
|
'transcode_video_codec TEXT, transcode_audio_codec TEXT, transcode_audio_channels INTEGER,'
|
||||||
'transcode_width INTEGER, transcode_height INTEGER, buffer_count INTEGER DEFAULT 0, '
|
'transcode_width INTEGER, transcode_height INTEGER, buffer_count INTEGER DEFAULT 0, '
|
||||||
'buffer_last_triggered INTEGER)'
|
'buffer_last_triggered INTEGER, last_paused INTEGER)'
|
||||||
)
|
)
|
||||||
|
|
||||||
# session_history table :: This is a history table which logs essential stream details
|
# session_history table :: This is a history table which logs essential stream details
|
||||||
c_db.execute(
|
c_db.execute(
|
||||||
'CREATE TABLE IF NOT EXISTS session_history (id INTEGER PRIMARY KEY AUTOINCREMENT, '
|
'CREATE TABLE IF NOT EXISTS session_history (id INTEGER PRIMARY KEY AUTOINCREMENT, reference_id INTEGER, '
|
||||||
'started INTEGER, stopped INTEGER, rating_key INTEGER, user_id INTEGER, user TEXT, '
|
'started INTEGER, stopped INTEGER, rating_key INTEGER, user_id INTEGER, user TEXT, '
|
||||||
'ip_address TEXT, paused_counter INTEGER DEFAULT 0, player TEXT, platform TEXT, machine_id TEXT, '
|
'ip_address TEXT, paused_counter INTEGER DEFAULT 0, player TEXT, platform TEXT, machine_id TEXT, '
|
||||||
'parent_rating_key INTEGER, grandparent_rating_key INTEGER, media_type TEXT, view_offset INTEGER DEFAULT 0)'
|
'parent_rating_key INTEGER, grandparent_rating_key INTEGER, media_type TEXT, view_offset INTEGER DEFAULT 0)'
|
||||||
|
@ -603,12 +609,44 @@ def dbcheck():
|
||||||
'ALTER TABLE users ADD COLUMN custom_avatar_url TEXT'
|
'ALTER TABLE users ADD COLUMN custom_avatar_url TEXT'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Upgrade sessions table from earlier versions
|
||||||
|
try:
|
||||||
|
c_db.execute('SELECT last_paused from sessions')
|
||||||
|
except sqlite3.OperationalError:
|
||||||
|
logger.debug(u"Altering database. Updating database table sessions.")
|
||||||
|
c_db.execute(
|
||||||
|
'ALTER TABLE sessions ADD COLUMN last_paused INTEGER'
|
||||||
|
)
|
||||||
|
|
||||||
# Add "Local" user to database as default unauthenticated user.
|
# Add "Local" user to database as default unauthenticated user.
|
||||||
result = c_db.execute('SELECT id FROM users WHERE username = "Local"')
|
result = c_db.execute('SELECT id FROM users WHERE username = "Local"')
|
||||||
if not result.fetchone():
|
if not result.fetchone():
|
||||||
logger.debug(u'User "Local" does not exist. Adding user.')
|
logger.debug(u'User "Local" does not exist. Adding user.')
|
||||||
c_db.execute('INSERT INTO users (user_id, username) VALUES (0, "Local")')
|
c_db.execute('INSERT INTO users (user_id, username) VALUES (0, "Local")')
|
||||||
|
|
||||||
|
# Upgrade session_history table from earlier versions
|
||||||
|
try:
|
||||||
|
c_db.execute('SELECT reference_id from session_history')
|
||||||
|
except sqlite3.OperationalError:
|
||||||
|
logger.debug(u"Altering database. Updating database table session_history.")
|
||||||
|
c_db.execute(
|
||||||
|
'ALTER TABLE session_history ADD COLUMN reference_id INTEGER DEFAULT 0'
|
||||||
|
)
|
||||||
|
# Set reference_id to the first row where (user_id = previous row, rating_key != previous row) and user_id = user_id
|
||||||
|
c_db.execute(
|
||||||
|
'UPDATE session_history ' \
|
||||||
|
'SET reference_id = (SELECT (CASE \
|
||||||
|
WHEN (SELECT MIN(id) FROM session_history WHERE id > ( \
|
||||||
|
SELECT MAX(id) FROM session_history \
|
||||||
|
WHERE (user_id = t1.user_id AND rating_key <> t1.rating_key AND id < t1.id)) AND user_id = t1.user_id) IS NULL \
|
||||||
|
THEN (SELECT MIN(id) FROM session_history WHERE (user_id = t1.user_id)) \
|
||||||
|
ELSE (SELECT MIN(id) FROM session_history WHERE id > ( \
|
||||||
|
SELECT MAX(id) FROM session_history \
|
||||||
|
WHERE (user_id = t1.user_id AND rating_key <> t1.rating_key AND id < t1.id)) AND user_id = t1.user_id) END) ' \
|
||||||
|
'FROM session_history AS t1 ' \
|
||||||
|
'WHERE t1.id = session_history.id) '
|
||||||
|
)
|
||||||
|
|
||||||
conn_db.commit()
|
conn_db.commit()
|
||||||
c_db.close()
|
c_db.close()
|
||||||
|
|
||||||
|
@ -616,10 +654,6 @@ def shutdown(restart=False, update=False):
|
||||||
cherrypy.engine.exit()
|
cherrypy.engine.exit()
|
||||||
SCHED.shutdown(wait=False)
|
SCHED.shutdown(wait=False)
|
||||||
|
|
||||||
# Clear any sessions in the db - Not sure yet if we should do this. More testing required
|
|
||||||
# logger.debug(u'Clearing Plex sessions.')
|
|
||||||
# monitor.drop_session_db()
|
|
||||||
|
|
||||||
CONFIG.write()
|
CONFIG.write()
|
||||||
|
|
||||||
if not restart and not update:
|
if not restart and not update:
|
||||||
|
|
215
plexpy/activity_handler.py
Normal file
215
plexpy/activity_handler.py
Normal file
|
@ -0,0 +1,215 @@
|
||||||
|
# This file is part of PlexPy.
|
||||||
|
#
|
||||||
|
# PlexPy is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# PlexPy is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import time
|
||||||
|
import plexpy
|
||||||
|
|
||||||
|
from plexpy import logger, pmsconnect, activity_processor, threading, notification_handler
|
||||||
|
|
||||||
|
|
||||||
|
class ActivityHandler(object):
|
||||||
|
|
||||||
|
def __init__(self, timeline):
|
||||||
|
self.timeline = timeline
|
||||||
|
# print timeline
|
||||||
|
|
||||||
|
def is_valid_session(self):
|
||||||
|
if 'sessionKey' in self.timeline:
|
||||||
|
if str(self.timeline['sessionKey']).isdigit():
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_session_key(self):
|
||||||
|
if self.is_valid_session():
|
||||||
|
return int(self.timeline['sessionKey'])
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_live_session(self):
|
||||||
|
pms_connect = pmsconnect.PmsConnect()
|
||||||
|
session_list = pms_connect.get_current_activity()
|
||||||
|
|
||||||
|
for session in session_list['sessions']:
|
||||||
|
if int(session['session_key']) == self.get_session_key():
|
||||||
|
return session
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def update_db_session(self):
|
||||||
|
# Update our session temp table values
|
||||||
|
monitor_proc = activity_processor.ActivityProcessor()
|
||||||
|
monitor_proc.write_session(session=self.get_live_session(), notify=False)
|
||||||
|
|
||||||
|
def on_start(self):
|
||||||
|
if self.is_valid_session():
|
||||||
|
logger.debug(u"PlexPy ActivityHandler :: Session %s has started." % str(self.get_session_key()))
|
||||||
|
|
||||||
|
# Fire off notifications
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=self.get_live_session(), notify_action='play')).start()
|
||||||
|
|
||||||
|
# Write the new session to our temp session table
|
||||||
|
self.update_db_session()
|
||||||
|
|
||||||
|
def on_stop(self, force_stop=False):
|
||||||
|
if self.is_valid_session():
|
||||||
|
logger.debug(u"PlexPy ActivityHandler :: Session %s has stopped." % str(self.get_session_key()))
|
||||||
|
|
||||||
|
# Set the session last_paused timestamp
|
||||||
|
ap = activity_processor.ActivityProcessor()
|
||||||
|
ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=None)
|
||||||
|
|
||||||
|
# Update the session state and viewOffset
|
||||||
|
# Set force_stop to true to disable the state set
|
||||||
|
if not force_stop:
|
||||||
|
ap.set_session_state(session_key=self.get_session_key(),
|
||||||
|
state=self.timeline['state'],
|
||||||
|
view_offset=self.timeline['viewOffset'])
|
||||||
|
|
||||||
|
# Retrieve the session data from our temp table
|
||||||
|
db_session = ap.get_session_by_key(session_key=self.get_session_key())
|
||||||
|
|
||||||
|
# Fire off notifications
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=db_session, notify_action='stop')).start()
|
||||||
|
|
||||||
|
# Write it to the history table
|
||||||
|
monitor_proc = activity_processor.ActivityProcessor()
|
||||||
|
monitor_proc.write_session_history(session=db_session)
|
||||||
|
|
||||||
|
# Remove the session from our temp session table
|
||||||
|
ap.delete_session(session_key=self.get_session_key())
|
||||||
|
|
||||||
|
def on_pause(self):
|
||||||
|
if self.is_valid_session():
|
||||||
|
logger.debug(u"PlexPy ActivityHandler :: Session %s has been paused." % str(self.get_session_key()))
|
||||||
|
|
||||||
|
# Set the session last_paused timestamp
|
||||||
|
ap = activity_processor.ActivityProcessor()
|
||||||
|
ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=int(time.time()))
|
||||||
|
|
||||||
|
# Update the session state and viewOffset
|
||||||
|
ap.set_session_state(session_key=self.get_session_key(),
|
||||||
|
state=self.timeline['state'],
|
||||||
|
view_offset=self.timeline['viewOffset'])
|
||||||
|
|
||||||
|
# Retrieve the session data from our temp table
|
||||||
|
db_session = ap.get_session_by_key(session_key=self.get_session_key())
|
||||||
|
|
||||||
|
# Fire off notifications
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=db_session, notify_action='pause')).start()
|
||||||
|
|
||||||
|
def on_resume(self):
|
||||||
|
if self.is_valid_session():
|
||||||
|
logger.debug(u"PlexPy ActivityHandler :: Session %s has been resumed." % str(self.get_session_key()))
|
||||||
|
|
||||||
|
# Set the session last_paused timestamp
|
||||||
|
ap = activity_processor.ActivityProcessor()
|
||||||
|
ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=None)
|
||||||
|
|
||||||
|
# Update the session state and viewOffset
|
||||||
|
ap.set_session_state(session_key=self.get_session_key(),
|
||||||
|
state=self.timeline['state'],
|
||||||
|
view_offset=self.timeline['viewOffset'])
|
||||||
|
|
||||||
|
# Retrieve the session data from our temp table
|
||||||
|
db_session = ap.get_session_by_key(session_key=self.get_session_key())
|
||||||
|
|
||||||
|
# Fire off notifications
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=db_session, notify_action='resume')).start()
|
||||||
|
|
||||||
|
def on_buffer(self):
|
||||||
|
if self.is_valid_session():
|
||||||
|
logger.debug(u"PlexPy ActivityHandler :: Session %s is buffering." % self.get_session_key())
|
||||||
|
ap = activity_processor.ActivityProcessor()
|
||||||
|
db_stream = ap.get_session_by_key(session_key=self.get_session_key())
|
||||||
|
|
||||||
|
# Increment our buffer count
|
||||||
|
ap.increment_session_buffer_count(session_key=self.get_session_key())
|
||||||
|
|
||||||
|
# Get our current buffer count
|
||||||
|
current_buffer_count = ap.get_session_buffer_count(self.get_session_key())
|
||||||
|
logger.debug(u"PlexPy ActivityHandler :: Session %s buffer count is %s." %
|
||||||
|
(self.get_session_key(), current_buffer_count))
|
||||||
|
|
||||||
|
# Get our last triggered time
|
||||||
|
buffer_last_triggered = ap.get_session_buffer_trigger_time(self.get_session_key())
|
||||||
|
|
||||||
|
time_since_last_trigger = 0
|
||||||
|
if buffer_last_triggered:
|
||||||
|
logger.debug(u"PlexPy ActivityHandler :: Session %s buffer last triggered at %s." %
|
||||||
|
(self.get_session_key(), buffer_last_triggered))
|
||||||
|
time_since_last_trigger = int(time.time()) - int(buffer_last_triggered)
|
||||||
|
|
||||||
|
if current_buffer_count >= plexpy.CONFIG.BUFFER_THRESHOLD and time_since_last_trigger == 0 or \
|
||||||
|
time_since_last_trigger >= plexpy.CONFIG.BUFFER_WAIT:
|
||||||
|
ap.set_session_buffer_trigger_time(session_key=self.get_session_key())
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=db_stream, notify_action='buffer')).start()
|
||||||
|
|
||||||
|
# This function receives events from our websocket connection
|
||||||
|
def process(self):
|
||||||
|
if self.is_valid_session():
|
||||||
|
from plexpy import helpers
|
||||||
|
|
||||||
|
ap = activity_processor.ActivityProcessor()
|
||||||
|
db_session = ap.get_session_by_key(session_key=self.get_session_key())
|
||||||
|
|
||||||
|
this_state = self.timeline['state']
|
||||||
|
this_key = str(self.timeline['ratingKey'])
|
||||||
|
|
||||||
|
# If we already have this session in the temp table, check for state changes
|
||||||
|
if db_session:
|
||||||
|
last_state = db_session['state']
|
||||||
|
last_key = str(db_session['rating_key'])
|
||||||
|
|
||||||
|
# Make sure the same item is being played
|
||||||
|
if this_key == last_key:
|
||||||
|
# Update the session state and viewOffset
|
||||||
|
if this_state == 'playing':
|
||||||
|
ap.set_session_state(session_key=self.get_session_key(),
|
||||||
|
state=this_state,
|
||||||
|
view_offset=self.timeline['viewOffset'])
|
||||||
|
# Start our state checks
|
||||||
|
if this_state != last_state:
|
||||||
|
if this_state == 'paused':
|
||||||
|
self.on_pause()
|
||||||
|
elif last_state == 'paused' and this_state == 'playing':
|
||||||
|
self.on_resume()
|
||||||
|
elif this_state == 'stopped':
|
||||||
|
self.on_stop()
|
||||||
|
elif this_state == 'buffering':
|
||||||
|
self.on_buffer()
|
||||||
|
# If a client doesn't register stop events (I'm looking at you PHT!) check if the ratingKey has changed
|
||||||
|
else:
|
||||||
|
# Manually stop and start
|
||||||
|
# Set force_stop so that we don't overwrite our last viewOffset
|
||||||
|
self.on_stop(force_stop=True)
|
||||||
|
self.on_start()
|
||||||
|
|
||||||
|
# Monitor if the stream has reached the watch percentage for notifications
|
||||||
|
# The only purpose of this is for notifications
|
||||||
|
progress_percent = helpers.get_percent(self.timeline['viewOffset'], db_session['duration'])
|
||||||
|
if progress_percent >= plexpy.CONFIG.NOTIFY_WATCHED_PERCENT and this_state != 'buffering':
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=db_session, notify_action='watched')).start()
|
||||||
|
|
||||||
|
else:
|
||||||
|
# We don't have this session in our table yet, start a new one.
|
||||||
|
if this_state != 'buffering':
|
||||||
|
self.on_start()
|
164
plexpy/activity_pinger.py
Normal file
164
plexpy/activity_pinger.py
Normal file
|
@ -0,0 +1,164 @@
|
||||||
|
# This file is part of PlexPy.
|
||||||
|
#
|
||||||
|
# PlexPy is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# PlexPy is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from plexpy import logger, pmsconnect, notification_handler, database, helpers, activity_processor
|
||||||
|
|
||||||
|
import threading
|
||||||
|
import plexpy
|
||||||
|
import time
|
||||||
|
|
||||||
|
monitor_lock = threading.Lock()
|
||||||
|
|
||||||
|
|
||||||
|
def check_active_sessions(ws_request=False):
|
||||||
|
|
||||||
|
with monitor_lock:
|
||||||
|
pms_connect = pmsconnect.PmsConnect()
|
||||||
|
session_list = pms_connect.get_current_activity()
|
||||||
|
monitor_db = database.MonitorDatabase()
|
||||||
|
monitor_process = activity_processor.ActivityProcessor()
|
||||||
|
# logger.debug(u"PlexPy Monitor :: Checking for active streams.")
|
||||||
|
|
||||||
|
if session_list:
|
||||||
|
media_container = session_list['sessions']
|
||||||
|
|
||||||
|
# Check our temp table for what we must do with the new streams
|
||||||
|
db_streams = monitor_db.select('SELECT started, session_key, rating_key, media_type, title, parent_title, '
|
||||||
|
'grandparent_title, user_id, user, friendly_name, ip_address, player, '
|
||||||
|
'platform, machine_id, parent_rating_key, grandparent_rating_key, state, '
|
||||||
|
'view_offset, duration, video_decision, audio_decision, width, height, '
|
||||||
|
'container, video_codec, audio_codec, bitrate, video_resolution, '
|
||||||
|
'video_framerate, aspect_ratio, audio_channels, transcode_protocol, '
|
||||||
|
'transcode_container, transcode_video_codec, transcode_audio_codec, '
|
||||||
|
'transcode_audio_channels, transcode_width, transcode_height, '
|
||||||
|
'paused_counter, last_paused '
|
||||||
|
'FROM sessions')
|
||||||
|
for stream in db_streams:
|
||||||
|
if any(d['session_key'] == str(stream['session_key']) and d['rating_key'] == str(stream['rating_key'])
|
||||||
|
for d in media_container):
|
||||||
|
# The user's session is still active
|
||||||
|
for session in media_container:
|
||||||
|
if session['session_key'] == str(stream['session_key']) and \
|
||||||
|
session['rating_key'] == str(stream['rating_key']):
|
||||||
|
# The user is still playing the same media item
|
||||||
|
# Here we can check the play states
|
||||||
|
if session['state'] != stream['state']:
|
||||||
|
if session['state'] == 'paused':
|
||||||
|
# Push any notifications -
|
||||||
|
# Push it on it's own thread so we don't hold up our db actions
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=stream, notify_action='pause')).start()
|
||||||
|
|
||||||
|
if session['state'] == 'playing' and stream['state'] == 'paused':
|
||||||
|
# Push any notifications -
|
||||||
|
# Push it on it's own thread so we don't hold up our db actions
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=stream, notify_action='resume')).start()
|
||||||
|
|
||||||
|
if stream['state'] == 'paused' and not ws_request:
|
||||||
|
# The stream is still paused so we need to increment the paused_counter
|
||||||
|
# Using the set config parameter as the interval, probably not the most accurate but
|
||||||
|
# it will have to do for now. If it's a websocket request don't use this method.
|
||||||
|
paused_counter = int(stream['paused_counter']) + plexpy.CONFIG.MONITORING_INTERVAL
|
||||||
|
monitor_db.action('UPDATE sessions SET paused_counter = ? '
|
||||||
|
'WHERE session_key = ? AND rating_key = ?',
|
||||||
|
[paused_counter, stream['session_key'], stream['rating_key']])
|
||||||
|
|
||||||
|
if session['state'] == 'buffering' and plexpy.CONFIG.BUFFER_THRESHOLD > 0:
|
||||||
|
# The stream is buffering so we need to increment the buffer_count
|
||||||
|
# We're going just increment on every monitor ping,
|
||||||
|
# would be difficult to keep track otherwise
|
||||||
|
monitor_db.action('UPDATE sessions SET buffer_count = buffer_count + 1 '
|
||||||
|
'WHERE session_key = ? AND rating_key = ?',
|
||||||
|
[stream['session_key'], stream['rating_key']])
|
||||||
|
|
||||||
|
# Check the current buffer count and last buffer to determine if we should notify
|
||||||
|
buffer_values = monitor_db.select('SELECT buffer_count, buffer_last_triggered '
|
||||||
|
'FROM sessions '
|
||||||
|
'WHERE session_key = ? AND rating_key = ?',
|
||||||
|
[stream['session_key'], stream['rating_key']])
|
||||||
|
|
||||||
|
if buffer_values[0]['buffer_count'] >= plexpy.CONFIG.BUFFER_THRESHOLD:
|
||||||
|
# Push any notifications -
|
||||||
|
# Push it on it's own thread so we don't hold up our db actions
|
||||||
|
# Our first buffer notification
|
||||||
|
if buffer_values[0]['buffer_count'] == plexpy.CONFIG.BUFFER_THRESHOLD:
|
||||||
|
logger.info(u"PlexPy Monitor :: User '%s' has triggered a buffer warning."
|
||||||
|
% stream['user'])
|
||||||
|
# Set the buffer trigger time
|
||||||
|
monitor_db.action('UPDATE sessions '
|
||||||
|
'SET buffer_last_triggered = strftime("%s","now") '
|
||||||
|
'WHERE session_key = ? AND rating_key = ?',
|
||||||
|
[stream['session_key'], stream['rating_key']])
|
||||||
|
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=stream, notify_action='buffer')).start()
|
||||||
|
else:
|
||||||
|
# Subsequent buffer notifications after wait time
|
||||||
|
if int(time.time()) > buffer_values[0]['buffer_last_triggered'] + \
|
||||||
|
plexpy.CONFIG.BUFFER_WAIT:
|
||||||
|
logger.info(u"PlexPy Monitor :: User '%s' has triggered multiple buffer warnings."
|
||||||
|
% stream['user'])
|
||||||
|
# Set the buffer trigger time
|
||||||
|
monitor_db.action('UPDATE sessions '
|
||||||
|
'SET buffer_last_triggered = strftime("%s","now") '
|
||||||
|
'WHERE session_key = ? AND rating_key = ?',
|
||||||
|
[stream['session_key'], stream['rating_key']])
|
||||||
|
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=stream, notify_action='buffer')).start()
|
||||||
|
|
||||||
|
logger.debug(u"PlexPy Monitor :: Stream buffering. Count is now %s. Last triggered %s."
|
||||||
|
% (buffer_values[0][0], buffer_values[0][1]))
|
||||||
|
|
||||||
|
# Check if the user has reached the offset in the media we defined as the "watched" percent
|
||||||
|
# Don't trigger if state is buffer as some clients push the progress to the end when
|
||||||
|
# buffering on start.
|
||||||
|
if session['view_offset'] and session['duration'] and session['state'] != 'buffering':
|
||||||
|
if helpers.get_percent(session['view_offset'],
|
||||||
|
session['duration']) > plexpy.CONFIG.NOTIFY_WATCHED_PERCENT:
|
||||||
|
# Push any notifications -
|
||||||
|
# Push it on it's own thread so we don't hold up our db actions
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=stream, notify_action='watched')).start()
|
||||||
|
|
||||||
|
else:
|
||||||
|
# The user has stopped playing a stream
|
||||||
|
logger.debug(u"PlexPy Monitor :: Removing sessionKey %s ratingKey %s from session queue"
|
||||||
|
% (stream['session_key'], stream['rating_key']))
|
||||||
|
monitor_db.action('DELETE FROM sessions WHERE session_key = ? AND rating_key = ?',
|
||||||
|
[stream['session_key'], stream['rating_key']])
|
||||||
|
|
||||||
|
# Check if the user has reached the offset in the media we defined as the "watched" percent
|
||||||
|
if stream['view_offset'] and stream['duration']:
|
||||||
|
if helpers.get_percent(stream['view_offset'],
|
||||||
|
stream['duration']) > plexpy.CONFIG.NOTIFY_WATCHED_PERCENT:
|
||||||
|
# Push any notifications -
|
||||||
|
# Push it on it's own thread so we don't hold up our db actions
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=stream, notify_action='watched')).start()
|
||||||
|
|
||||||
|
# Push any notifications - Push it on it's own thread so we don't hold up our db actions
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=stream, notify_action='stop')).start()
|
||||||
|
|
||||||
|
# Write the item history on playback stop
|
||||||
|
monitor_process.write_session_history(session=stream)
|
||||||
|
|
||||||
|
# Process the newly received session data
|
||||||
|
for session in media_container:
|
||||||
|
monitor_process.write_session(session)
|
||||||
|
else:
|
||||||
|
logger.debug(u"PlexPy Monitor :: Unable to read session list.")
|
400
plexpy/activity_processor.py
Normal file
400
plexpy/activity_processor.py
Normal file
|
@ -0,0 +1,400 @@
|
||||||
|
# This file is part of PlexPy.
|
||||||
|
#
|
||||||
|
# PlexPy is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# PlexPy is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from plexpy import logger, pmsconnect, notification_handler, log_reader, database
|
||||||
|
|
||||||
|
import threading
|
||||||
|
import plexpy
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
|
||||||
|
|
||||||
|
class ActivityProcessor(object):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.db = database.MonitorDatabase()
|
||||||
|
|
||||||
|
def write_session(self, session=None, notify=True):
|
||||||
|
if session:
|
||||||
|
values = {'session_key': session['session_key'],
|
||||||
|
'rating_key': session['rating_key'],
|
||||||
|
'media_type': session['media_type'],
|
||||||
|
'state': session['state'],
|
||||||
|
'user_id': session['user_id'],
|
||||||
|
'user': session['user'],
|
||||||
|
'machine_id': session['machine_id'],
|
||||||
|
'title': session['title'],
|
||||||
|
'parent_title': session['parent_title'],
|
||||||
|
'grandparent_title': session['grandparent_title'],
|
||||||
|
'friendly_name': session['friendly_name'],
|
||||||
|
'player': session['player'],
|
||||||
|
'platform': session['platform'],
|
||||||
|
'parent_rating_key': session['parent_rating_key'],
|
||||||
|
'grandparent_rating_key': session['grandparent_rating_key'],
|
||||||
|
'view_offset': session['view_offset'],
|
||||||
|
'duration': session['duration'],
|
||||||
|
'video_decision': session['video_decision'],
|
||||||
|
'audio_decision': session['audio_decision'],
|
||||||
|
'width': session['width'],
|
||||||
|
'height': session['height'],
|
||||||
|
'container': session['container'],
|
||||||
|
'video_codec': session['video_codec'],
|
||||||
|
'audio_codec': session['audio_codec'],
|
||||||
|
'bitrate': session['bitrate'],
|
||||||
|
'video_resolution': session['video_resolution'],
|
||||||
|
'video_framerate': session['video_framerate'],
|
||||||
|
'aspect_ratio': session['aspect_ratio'],
|
||||||
|
'audio_channels': session['audio_channels'],
|
||||||
|
'transcode_protocol': session['transcode_protocol'],
|
||||||
|
'transcode_container': session['transcode_container'],
|
||||||
|
'transcode_video_codec': session['transcode_video_codec'],
|
||||||
|
'transcode_audio_codec': session['transcode_audio_codec'],
|
||||||
|
'transcode_audio_channels': session['transcode_audio_channels'],
|
||||||
|
'transcode_width': session['transcode_width'],
|
||||||
|
'transcode_height': session['transcode_height']
|
||||||
|
}
|
||||||
|
|
||||||
|
keys = {'session_key': session['session_key'],
|
||||||
|
'rating_key': session['rating_key']}
|
||||||
|
|
||||||
|
result = self.db.upsert('sessions', values, keys)
|
||||||
|
|
||||||
|
if result == 'insert':
|
||||||
|
# Push any notifications - Push it on it's own thread so we don't hold up our db actions
|
||||||
|
if notify:
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=values, notify_action='play')).start()
|
||||||
|
|
||||||
|
started = int(time.time())
|
||||||
|
|
||||||
|
# Try and grab IP address from logs
|
||||||
|
if plexpy.CONFIG.IP_LOGGING_ENABLE and plexpy.CONFIG.PMS_LOGS_FOLDER:
|
||||||
|
ip_address = self.find_session_ip(rating_key=session['rating_key'],
|
||||||
|
machine_id=session['machine_id'])
|
||||||
|
else:
|
||||||
|
ip_address = None
|
||||||
|
|
||||||
|
timestamp = {'started': started,
|
||||||
|
'ip_address': ip_address}
|
||||||
|
|
||||||
|
# If it's our first write then time stamp it.
|
||||||
|
self.db.upsert('sessions', timestamp, keys)
|
||||||
|
|
||||||
|
def write_session_history(self, session=None, import_metadata=None, is_import=False, import_ignore_interval=0):
|
||||||
|
from plexpy import users
|
||||||
|
|
||||||
|
user_data = users.Users()
|
||||||
|
user_details = user_data.get_user_friendly_name(user=session['user'])
|
||||||
|
|
||||||
|
if session:
|
||||||
|
logging_enabled = False
|
||||||
|
|
||||||
|
if is_import:
|
||||||
|
if str(session['stopped']).isdigit():
|
||||||
|
stopped = int(session['stopped'])
|
||||||
|
else:
|
||||||
|
stopped = int(time.time())
|
||||||
|
else:
|
||||||
|
stopped = int(time.time())
|
||||||
|
|
||||||
|
if plexpy.CONFIG.VIDEO_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \
|
||||||
|
(session['media_type'] == 'movie' or session['media_type'] == 'episode'):
|
||||||
|
logging_enabled = True
|
||||||
|
elif plexpy.CONFIG.MUSIC_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \
|
||||||
|
session['media_type'] == 'track':
|
||||||
|
logging_enabled = True
|
||||||
|
else:
|
||||||
|
logger.debug(u"PlexPy ActivityProcessor :: ratingKey %s not logged. Does not meet logging criteria. "
|
||||||
|
u"Media type is '%s'" % (session['rating_key'], session['media_type']))
|
||||||
|
|
||||||
|
if str(session['paused_counter']).isdigit():
|
||||||
|
real_play_time = stopped - session['started'] - int(session['paused_counter'])
|
||||||
|
else:
|
||||||
|
real_play_time = stopped - session['started']
|
||||||
|
|
||||||
|
if plexpy.CONFIG.LOGGING_IGNORE_INTERVAL and not is_import:
|
||||||
|
if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \
|
||||||
|
(real_play_time < int(plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)):
|
||||||
|
logging_enabled = False
|
||||||
|
logger.debug(u"PlexPy ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s "
|
||||||
|
u"seconds, so we're not logging it." %
|
||||||
|
(session['rating_key'], str(real_play_time), plexpy.CONFIG.LOGGING_IGNORE_INTERVAL))
|
||||||
|
if session['media_type'] == 'track' and not is_import:
|
||||||
|
if real_play_time < 15 and session['duration'] >= 30:
|
||||||
|
logging_enabled = False
|
||||||
|
logger.debug(u"PlexPy ActivityProcessor :: Play duration for ratingKey %s is %s secs, "
|
||||||
|
u"looks like it was skipped so we're not logging it" %
|
||||||
|
(session['rating_key'], str(real_play_time)))
|
||||||
|
elif is_import and import_ignore_interval:
|
||||||
|
if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \
|
||||||
|
(real_play_time < int(import_ignore_interval)):
|
||||||
|
logging_enabled = False
|
||||||
|
logger.debug(u"PlexPy ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s "
|
||||||
|
u"seconds, so we're not logging it." %
|
||||||
|
(session['rating_key'], str(real_play_time),
|
||||||
|
import_ignore_interval))
|
||||||
|
|
||||||
|
if not user_details['keep_history'] and not is_import:
|
||||||
|
logging_enabled = False
|
||||||
|
logger.debug(u"PlexPy ActivityProcessor :: History logging for user '%s' is disabled." % session['user'])
|
||||||
|
|
||||||
|
if logging_enabled:
|
||||||
|
# logger.debug(u"PlexPy ActivityProcessor :: Attempting to write to session_history table...")
|
||||||
|
query = 'INSERT INTO session_history (started, stopped, rating_key, parent_rating_key, ' \
|
||||||
|
'grandparent_rating_key, media_type, user_id, user, ip_address, paused_counter, player, ' \
|
||||||
|
'platform, machine_id, view_offset) VALUES ' \
|
||||||
|
'(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'
|
||||||
|
|
||||||
|
args = [session['started'], stopped, session['rating_key'], session['parent_rating_key'],
|
||||||
|
session['grandparent_rating_key'], session['media_type'], session['user_id'], session['user'],
|
||||||
|
session['ip_address'], session['paused_counter'], session['player'], session['platform'],
|
||||||
|
session['machine_id'], session['view_offset']]
|
||||||
|
|
||||||
|
# logger.debug(u"PlexPy ActivityProcessor :: Writing session_history transaction...")
|
||||||
|
self.db.action(query=query, args=args)
|
||||||
|
|
||||||
|
# Check if we should group the session, select the last two rows from the user
|
||||||
|
query = 'SELECT id, rating_key, user_id, reference_id FROM session_history \
|
||||||
|
WHERE user_id = ? ORDER BY id DESC LIMIT 2 '
|
||||||
|
|
||||||
|
args = [session['user_id']]
|
||||||
|
|
||||||
|
result = self.db.select(query=query, args=args)
|
||||||
|
|
||||||
|
new_session = {'id': result[0][0],
|
||||||
|
'rating_key': result[0][1],
|
||||||
|
'user_id': result[0][2],
|
||||||
|
'reference_id': result[0][3]}
|
||||||
|
|
||||||
|
if len(result) == 1:
|
||||||
|
prev_session = None
|
||||||
|
else:
|
||||||
|
prev_session = {'id': result[1][0],
|
||||||
|
'rating_key': result[1][1],
|
||||||
|
'user_id': result[1][2],
|
||||||
|
'reference_id': result[1][3]}
|
||||||
|
|
||||||
|
query = 'UPDATE session_history SET reference_id = ? WHERE id = ? '
|
||||||
|
# If rating_key is the same in the previous session, then set the reference_id to the previous row, else set the reference_id to the new id
|
||||||
|
if (prev_session is not None) and (prev_session['rating_key'] == new_session['rating_key']):
|
||||||
|
args = [prev_session['reference_id'], new_session['id']]
|
||||||
|
else:
|
||||||
|
args = [new_session['id'], new_session['id']]
|
||||||
|
|
||||||
|
self.db.action(query=query, args=args)
|
||||||
|
|
||||||
|
# logger.debug(u"PlexPy ActivityProcessor :: Successfully written history item, last id for session_history is %s"
|
||||||
|
# % last_id)
|
||||||
|
|
||||||
|
# Write the session_history_media_info table
|
||||||
|
# logger.debug(u"PlexPy ActivityProcessor :: Attempting to write to session_history_media_info table...")
|
||||||
|
query = 'INSERT INTO session_history_media_info (id, rating_key, video_decision, audio_decision, ' \
|
||||||
|
'duration, width, height, container, video_codec, audio_codec, bitrate, video_resolution, ' \
|
||||||
|
'video_framerate, aspect_ratio, audio_channels, transcode_protocol, transcode_container, ' \
|
||||||
|
'transcode_video_codec, transcode_audio_codec, transcode_audio_channels, transcode_width, ' \
|
||||||
|
'transcode_height) VALUES ' \
|
||||||
|
'(last_insert_rowid(), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'
|
||||||
|
|
||||||
|
args = [session['rating_key'], session['video_decision'], session['audio_decision'],
|
||||||
|
session['duration'], session['width'], session['height'], session['container'],
|
||||||
|
session['video_codec'], session['audio_codec'], session['bitrate'],
|
||||||
|
session['video_resolution'], session['video_framerate'], session['aspect_ratio'],
|
||||||
|
session['audio_channels'], session['transcode_protocol'], session['transcode_container'],
|
||||||
|
session['transcode_video_codec'], session['transcode_audio_codec'],
|
||||||
|
session['transcode_audio_channels'], session['transcode_width'], session['transcode_height']]
|
||||||
|
|
||||||
|
# logger.debug(u"PlexPy ActivityProcessor :: Writing session_history_media_info transaction...")
|
||||||
|
self.db.action(query=query, args=args)
|
||||||
|
|
||||||
|
if not is_import:
|
||||||
|
logger.debug(u"PlexPy ActivityProcessor :: Fetching metadata for item ratingKey %s" % session['rating_key'])
|
||||||
|
pms_connect = pmsconnect.PmsConnect()
|
||||||
|
result = pms_connect.get_metadata_details(rating_key=str(session['rating_key']))
|
||||||
|
metadata = result['metadata']
|
||||||
|
else:
|
||||||
|
metadata = import_metadata
|
||||||
|
|
||||||
|
# Write the session_history_metadata table
|
||||||
|
directors = ";".join(metadata['directors'])
|
||||||
|
writers = ";".join(metadata['writers'])
|
||||||
|
actors = ";".join(metadata['actors'])
|
||||||
|
genres = ";".join(metadata['genres'])
|
||||||
|
|
||||||
|
# Build media item title
|
||||||
|
if session['media_type'] == 'episode' or session['media_type'] == 'track':
|
||||||
|
full_title = '%s - %s' % (metadata['grandparent_title'], metadata['title'])
|
||||||
|
elif session['media_type'] == 'movie':
|
||||||
|
full_title = metadata['title']
|
||||||
|
else:
|
||||||
|
full_title = metadata['title']
|
||||||
|
|
||||||
|
# logger.debug(u"PlexPy ActivityProcessor :: Attempting to write to session_history_metadata table...")
|
||||||
|
query = 'INSERT INTO session_history_metadata (id, rating_key, parent_rating_key, ' \
|
||||||
|
'grandparent_rating_key, title, parent_title, grandparent_title, full_title, media_index, ' \
|
||||||
|
'parent_media_index, thumb, parent_thumb, grandparent_thumb, art, media_type, year, ' \
|
||||||
|
'originally_available_at, added_at, updated_at, last_viewed_at, content_rating, summary, ' \
|
||||||
|
'tagline, rating, duration, guid, directors, writers, actors, genres, studio) VALUES ' \
|
||||||
|
'(last_insert_rowid(), ' \
|
||||||
|
'?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'
|
||||||
|
|
||||||
|
args = [session['rating_key'], session['parent_rating_key'], session['grandparent_rating_key'],
|
||||||
|
session['title'], session['parent_title'], session['grandparent_title'], full_title,
|
||||||
|
metadata['index'], metadata['parent_index'], metadata['thumb'], metadata['parent_thumb'],
|
||||||
|
metadata['grandparent_thumb'], metadata['art'], session['media_type'], metadata['year'],
|
||||||
|
metadata['originally_available_at'], metadata['added_at'], metadata['updated_at'],
|
||||||
|
metadata['last_viewed_at'], metadata['content_rating'], metadata['summary'], metadata['tagline'],
|
||||||
|
metadata['rating'], metadata['duration'], metadata['guid'], directors, writers, actors, genres, metadata['studio']]
|
||||||
|
|
||||||
|
# logger.debug(u"PlexPy ActivityProcessor :: Writing session_history_metadata transaction...")
|
||||||
|
self.db.action(query=query, args=args)
|
||||||
|
|
||||||
|
def find_session_ip(self, rating_key=None, machine_id=None):
|
||||||
|
|
||||||
|
logger.debug(u"PlexPy ActivityProcessor :: Requesting log lines...")
|
||||||
|
log_lines = log_reader.get_log_tail(window=5000, parsed=False)
|
||||||
|
|
||||||
|
rating_key_line = 'ratingKey=' + rating_key
|
||||||
|
rating_key_line_2 = 'metadata%2F' + rating_key
|
||||||
|
machine_id_line = 'session=' + machine_id
|
||||||
|
|
||||||
|
for line in reversed(log_lines):
|
||||||
|
# We're good if we find a line with both machine id and rating key
|
||||||
|
# This is usually when there is a transcode session
|
||||||
|
if machine_id_line in line and (rating_key_line in line or rating_key_line_2 in line):
|
||||||
|
# Currently only checking for ipv4 addresses
|
||||||
|
ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}', line)
|
||||||
|
if ipv4:
|
||||||
|
# The logged IP will always be the first match and we don't want localhost entries
|
||||||
|
if ipv4[0] != '127.0.0.1':
|
||||||
|
logger.debug(u"PlexPy ActivityProcessor :: Matched IP address (%s) for stream ratingKey %s "
|
||||||
|
u"and machineIdentifier %s."
|
||||||
|
% (ipv4[0], rating_key, machine_id))
|
||||||
|
return ipv4[0]
|
||||||
|
|
||||||
|
logger.debug(u"PlexPy ActivityProcessor :: Unable to find IP address on first pass. "
|
||||||
|
u"Attempting fallback check in 5 seconds...")
|
||||||
|
|
||||||
|
# Wait for the log to catch up and read in new lines
|
||||||
|
time.sleep(5)
|
||||||
|
|
||||||
|
logger.debug(u"PlexPy ActivityProcessor :: Requesting log lines...")
|
||||||
|
log_lines = log_reader.get_log_tail(window=5000, parsed=False)
|
||||||
|
|
||||||
|
for line in reversed(log_lines):
|
||||||
|
if 'GET /:/timeline' in line and (rating_key_line in line or rating_key_line_2 in line):
|
||||||
|
# Currently only checking for ipv4 addresses
|
||||||
|
# This method can return the wrong IP address if more than one user
|
||||||
|
# starts watching the same media item around the same time.
|
||||||
|
ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}', line)
|
||||||
|
if ipv4:
|
||||||
|
# The logged IP will always be the first match and we don't want localhost entries
|
||||||
|
if ipv4[0] != '127.0.0.1':
|
||||||
|
logger.debug(u"PlexPy ActivityProcessor :: Matched IP address (%s) for stream ratingKey %s." %
|
||||||
|
(ipv4[0], rating_key))
|
||||||
|
return ipv4[0]
|
||||||
|
|
||||||
|
logger.debug(u"PlexPy ActivityProcessor :: Unable to find IP address on fallback search. Not logging IP address.")
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_session_by_key(self, session_key=None):
|
||||||
|
if str(session_key).isdigit():
|
||||||
|
result = self.db.select('SELECT started, session_key, rating_key, media_type, title, parent_title, '
|
||||||
|
'grandparent_title, user_id, user, friendly_name, ip_address, player, '
|
||||||
|
'platform, machine_id, parent_rating_key, grandparent_rating_key, state, '
|
||||||
|
'view_offset, duration, video_decision, audio_decision, width, height, '
|
||||||
|
'container, video_codec, audio_codec, bitrate, video_resolution, '
|
||||||
|
'video_framerate, aspect_ratio, audio_channels, transcode_protocol, '
|
||||||
|
'transcode_container, transcode_video_codec, transcode_audio_codec, '
|
||||||
|
'transcode_audio_channels, transcode_width, transcode_height, '
|
||||||
|
'paused_counter, last_paused '
|
||||||
|
'FROM sessions WHERE session_key = ? LIMIT 1', args=[session_key])
|
||||||
|
for session in result:
|
||||||
|
if session:
|
||||||
|
return session
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def set_session_state(self, session_key=None, state=None, view_offset=0):
|
||||||
|
if str(session_key).isdigit() and str(view_offset).isdigit():
|
||||||
|
values = {'view_offset': int(view_offset)}
|
||||||
|
if state:
|
||||||
|
values['state'] = state
|
||||||
|
|
||||||
|
keys = {'session_key': session_key}
|
||||||
|
result = self.db.upsert('sessions', values, keys)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def delete_session(self, session_key=None):
|
||||||
|
if str(session_key).isdigit():
|
||||||
|
self.db.action('DELETE FROM sessions WHERE session_key = ?', [session_key])
|
||||||
|
|
||||||
|
def set_session_last_paused(self, session_key=None, timestamp=None):
|
||||||
|
if str(session_key).isdigit():
|
||||||
|
result = self.db.select('SELECT last_paused, paused_counter '
|
||||||
|
'FROM sessions '
|
||||||
|
'WHERE session_key = ?', args=[session_key])
|
||||||
|
|
||||||
|
paused_counter = None
|
||||||
|
for session in result:
|
||||||
|
if session['last_paused']:
|
||||||
|
paused_offset = int(time.time()) - int(session['last_paused'])
|
||||||
|
paused_counter = int(session['paused_counter']) + int(paused_offset)
|
||||||
|
|
||||||
|
values = {'state': 'playing',
|
||||||
|
'last_paused': timestamp
|
||||||
|
}
|
||||||
|
if paused_counter:
|
||||||
|
values['paused_counter'] = paused_counter
|
||||||
|
|
||||||
|
keys = {'session_key': session_key}
|
||||||
|
self.db.upsert('sessions', values, keys)
|
||||||
|
|
||||||
|
def increment_session_buffer_count(self, session_key=None):
|
||||||
|
if str(session_key).isdigit():
|
||||||
|
self.db.action('UPDATE sessions SET buffer_count = buffer_count + 1 '
|
||||||
|
'WHERE session_key = ?',
|
||||||
|
[session_key])
|
||||||
|
|
||||||
|
def get_session_buffer_count(self, session_key=None):
|
||||||
|
if str(session_key).isdigit():
|
||||||
|
buffer_count = self.db.select_single('SELECT buffer_count '
|
||||||
|
'FROM sessions '
|
||||||
|
'WHERE session_key = ?',
|
||||||
|
[session_key])
|
||||||
|
if buffer_count:
|
||||||
|
return buffer_count
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def set_session_buffer_trigger_time(self, session_key=None):
|
||||||
|
if str(session_key).isdigit():
|
||||||
|
self.db.action('UPDATE sessions SET buffer_last_triggered = strftime("%s","now") '
|
||||||
|
'WHERE session_key = ?',
|
||||||
|
[session_key])
|
||||||
|
|
||||||
|
def get_session_buffer_trigger_time(self, session_key=None):
|
||||||
|
if str(session_key).isdigit():
|
||||||
|
last_time = self.db.select_single('SELECT buffer_last_triggered '
|
||||||
|
'FROM sessions '
|
||||||
|
'WHERE session_key = ?',
|
||||||
|
[session_key])
|
||||||
|
if last_time:
|
||||||
|
return last_time
|
||||||
|
|
||||||
|
return None
|
|
@ -73,6 +73,7 @@ _CONFIG_DEFINITIONS = {
|
||||||
'GIT_BRANCH': (str, 'General', 'master'),
|
'GIT_BRANCH': (str, 'General', 'master'),
|
||||||
'GIT_PATH': (str, 'General', ''),
|
'GIT_PATH': (str, 'General', ''),
|
||||||
'GIT_USER': (str, 'General', 'drzoidberg33'),
|
'GIT_USER': (str, 'General', 'drzoidberg33'),
|
||||||
|
'GROUP_HISTORY_TABLES': (int, 'General', 0),
|
||||||
'GROWL_ENABLED': (int, 'Growl', 0),
|
'GROWL_ENABLED': (int, 'Growl', 0),
|
||||||
'GROWL_HOST': (str, 'Growl', ''),
|
'GROWL_HOST': (str, 'Growl', ''),
|
||||||
'GROWL_PASSWORD': (str, 'Growl', ''),
|
'GROWL_PASSWORD': (str, 'Growl', ''),
|
||||||
|
@ -111,6 +112,7 @@ _CONFIG_DEFINITIONS = {
|
||||||
'MUSIC_NOTIFY_ON_PAUSE': (int, 'Monitoring', 0),
|
'MUSIC_NOTIFY_ON_PAUSE': (int, 'Monitoring', 0),
|
||||||
'MUSIC_LOGGING_ENABLE': (int, 'Monitoring', 0),
|
'MUSIC_LOGGING_ENABLE': (int, 'Monitoring', 0),
|
||||||
'MONITORING_INTERVAL': (int, 'Monitoring', 60),
|
'MONITORING_INTERVAL': (int, 'Monitoring', 60),
|
||||||
|
'MONITORING_USE_WEBSOCKET': (int, 'Monitoring', 0),
|
||||||
'NMA_APIKEY': (str, 'NMA', ''),
|
'NMA_APIKEY': (str, 'NMA', ''),
|
||||||
'NMA_ENABLED': (int, 'NMA', 0),
|
'NMA_ENABLED': (int, 'NMA', 0),
|
||||||
'NMA_PRIORITY': (int, 'NMA', 0),
|
'NMA_PRIORITY': (int, 'NMA', 0),
|
||||||
|
@ -120,6 +122,7 @@ _CONFIG_DEFINITIONS = {
|
||||||
'NMA_ON_RESUME': (int, 'NMA', 0),
|
'NMA_ON_RESUME': (int, 'NMA', 0),
|
||||||
'NMA_ON_BUFFER': (int, 'NMA', 0),
|
'NMA_ON_BUFFER': (int, 'NMA', 0),
|
||||||
'NMA_ON_WATCHED': (int, 'NMA', 0),
|
'NMA_ON_WATCHED': (int, 'NMA', 0),
|
||||||
|
'NOTIFY_CONSECUTIVE': (int, 'Monitoring', 1),
|
||||||
'NOTIFY_WATCHED_PERCENT': (int, 'Monitoring', 85),
|
'NOTIFY_WATCHED_PERCENT': (int, 'Monitoring', 85),
|
||||||
'NOTIFY_ON_START_SUBJECT_TEXT': (str, 'Monitoring', 'PlexPy ({server_name})'),
|
'NOTIFY_ON_START_SUBJECT_TEXT': (str, 'Monitoring', 'PlexPy ({server_name})'),
|
||||||
'NOTIFY_ON_START_BODY_TEXT': (str, 'Monitoring', '{user} ({player}) started playing {title}.'),
|
'NOTIFY_ON_START_BODY_TEXT': (str, 'Monitoring', '{user} ({player}) started playing {title}.'),
|
||||||
|
|
|
@ -18,7 +18,10 @@ from plexpy import logger
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import os
|
import os
|
||||||
import plexpy
|
import plexpy
|
||||||
|
import time
|
||||||
|
import threading
|
||||||
|
|
||||||
|
db_lock = threading.Lock()
|
||||||
|
|
||||||
def drop_session_db():
|
def drop_session_db():
|
||||||
monitor_db = MonitorDatabase()
|
monitor_db = MonitorDatabase()
|
||||||
|
@ -58,31 +61,37 @@ class MonitorDatabase(object):
|
||||||
self.connection.row_factory = sqlite3.Row
|
self.connection.row_factory = sqlite3.Row
|
||||||
|
|
||||||
def action(self, query, args=None, return_last_id=False):
|
def action(self, query, args=None, return_last_id=False):
|
||||||
|
|
||||||
if query is None:
|
if query is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
sql_result = None
|
with db_lock:
|
||||||
|
sql_result = None
|
||||||
|
attempts = 0
|
||||||
|
|
||||||
try:
|
while attempts < 5:
|
||||||
with self.connection as c:
|
try:
|
||||||
if args is None:
|
with self.connection as c:
|
||||||
sql_result = c.execute(query)
|
if args is None:
|
||||||
else:
|
sql_result = c.execute(query)
|
||||||
sql_result = c.execute(query, args)
|
else:
|
||||||
|
sql_result = c.execute(query, args)
|
||||||
|
# Our transaction was successful, leave the loop
|
||||||
|
break
|
||||||
|
|
||||||
except sqlite3.OperationalError, e:
|
except sqlite3.OperationalError, e:
|
||||||
if "unable to open database file" in e.message or "database is locked" in e.message:
|
if "unable to open database file" in e.message or "database is locked" in e.message:
|
||||||
logger.warn('Database Error: %s', e)
|
logger.warn('Database Error: %s', e)
|
||||||
else:
|
attempts += 1
|
||||||
logger.error('Database error: %s', e)
|
time.sleep(1)
|
||||||
raise
|
else:
|
||||||
|
logger.error('Database error: %s', e)
|
||||||
|
raise
|
||||||
|
|
||||||
except sqlite3.DatabaseError, e:
|
except sqlite3.DatabaseError, e:
|
||||||
logger.error('Fatal Error executing %s :: %s', query, e)
|
logger.error('Fatal Error executing %s :: %s', query, e)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
return sql_result
|
return sql_result
|
||||||
|
|
||||||
def select(self, query, args=None):
|
def select(self, query, args=None):
|
||||||
|
|
||||||
|
|
|
@ -26,47 +26,48 @@ class DataFactory(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def get_history(self, kwargs=None, custom_where=None):
|
def get_history(self, kwargs=None, custom_where=None, grouping=0, watched_percent=85):
|
||||||
data_tables = datatables.DataTables()
|
data_tables = datatables.DataTables()
|
||||||
|
|
||||||
|
group_by = ['session_history.reference_id'] if grouping else ['session_history.id']
|
||||||
|
|
||||||
columns = ['session_history.id',
|
columns = ['session_history.reference_id',
|
||||||
'session_history.started as date',
|
'session_history.id',
|
||||||
'(CASE WHEN users.friendly_name IS NULL THEN session_history'
|
'started AS date',
|
||||||
'.user ELSE users.friendly_name END) as friendly_name',
|
'MIN(started) AS started',
|
||||||
'session_history.player',
|
'MAX(stopped) AS stopped',
|
||||||
'session_history.ip_address',
|
'SUM(CASE WHEN stopped > 0 THEN (stopped - started) ELSE 0 END) - \
|
||||||
'session_history_metadata.full_title as full_title',
|
SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS duration',
|
||||||
|
'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS paused_counter',
|
||||||
|
'session_history.user_id',
|
||||||
|
'session_history.user',
|
||||||
|
'(CASE WHEN users.friendly_name IS NULL THEN user ELSE users.friendly_name END) as friendly_name',
|
||||||
|
'player',
|
||||||
|
'ip_address',
|
||||||
|
'session_history_metadata.media_type',
|
||||||
|
'session_history_metadata.rating_key',
|
||||||
|
'session_history_metadata.parent_rating_key',
|
||||||
|
'session_history_metadata.grandparent_rating_key',
|
||||||
|
'session_history_metadata.full_title',
|
||||||
|
'session_history_metadata.parent_title',
|
||||||
|
'session_history_metadata.year',
|
||||||
|
'session_history_metadata.media_index',
|
||||||
|
'session_history_metadata.parent_media_index',
|
||||||
'session_history_metadata.thumb',
|
'session_history_metadata.thumb',
|
||||||
'session_history_metadata.parent_thumb',
|
'session_history_metadata.parent_thumb',
|
||||||
'session_history_metadata.grandparent_thumb',
|
'session_history_metadata.grandparent_thumb',
|
||||||
'session_history_metadata.media_index',
|
'((CASE WHEN view_offset IS NULL THEN 0.1 ELSE view_offset * 1.0 END) / \
|
||||||
'session_history_metadata.parent_media_index',
|
(CASE WHEN session_history_metadata.duration IS NULL THEN 1.0 ELSE session_history_metadata.duration * 1.0 END) * 100) AS percent_complete',
|
||||||
'session_history_metadata.parent_title',
|
|
||||||
'session_history_metadata.year',
|
|
||||||
'session_history.started',
|
|
||||||
'session_history.paused_counter',
|
|
||||||
'session_history.stopped',
|
|
||||||
'round((julianday(datetime(session_history.stopped, "unixepoch", "localtime")) - \
|
|
||||||
julianday(datetime(session_history.started, "unixepoch", "localtime"))) * 86400) - \
|
|
||||||
(CASE WHEN session_history.paused_counter IS NULL THEN 0 \
|
|
||||||
ELSE session_history.paused_counter END) as duration',
|
|
||||||
'((CASE WHEN session_history.view_offset IS NULL THEN 0.1 ELSE \
|
|
||||||
session_history.view_offset * 1.0 END) / \
|
|
||||||
(CASE WHEN session_history_metadata.duration IS NULL THEN 1.0 ELSE \
|
|
||||||
session_history_metadata.duration * 1.0 END) * 100) as percent_complete',
|
|
||||||
'session_history.grandparent_rating_key as grandparent_rating_key',
|
|
||||||
'session_history.parent_rating_key as parent_rating_key',
|
|
||||||
'session_history.rating_key as rating_key',
|
|
||||||
'session_history.user',
|
|
||||||
'session_history_metadata.media_type',
|
|
||||||
'session_history_media_info.video_decision',
|
'session_history_media_info.video_decision',
|
||||||
'session_history.user_id as user_id'
|
'session_history_media_info.audio_decision',
|
||||||
|
'COUNT(*) AS group_count',
|
||||||
|
'GROUP_CONCAT(session_history.id) AS group_ids'
|
||||||
]
|
]
|
||||||
try:
|
try:
|
||||||
query = data_tables.ssp_query(table_name='session_history',
|
query = data_tables.ssp_query(table_name='session_history',
|
||||||
columns=columns,
|
columns=columns,
|
||||||
custom_where=custom_where,
|
custom_where=custom_where,
|
||||||
group_by=[],
|
group_by=group_by,
|
||||||
join_types=['LEFT OUTER JOIN',
|
join_types=['LEFT OUTER JOIN',
|
||||||
'JOIN',
|
'JOIN',
|
||||||
'JOIN'],
|
'JOIN'],
|
||||||
|
@ -86,7 +87,7 @@ class DataFactory(object):
|
||||||
'error': 'Unable to execute database query.'}
|
'error': 'Unable to execute database query.'}
|
||||||
|
|
||||||
history = query['result']
|
history = query['result']
|
||||||
|
|
||||||
rows = []
|
rows = []
|
||||||
for item in history:
|
for item in history:
|
||||||
if item["media_type"] == 'episode' and item["parent_thumb"]:
|
if item["media_type"] == 'episode' and item["parent_thumb"]:
|
||||||
|
@ -96,33 +97,44 @@ class DataFactory(object):
|
||||||
else:
|
else:
|
||||||
thumb = item["thumb"]
|
thumb = item["thumb"]
|
||||||
|
|
||||||
row = {"id": item['id'],
|
if item['percent_complete'] >= watched_percent:
|
||||||
"date": item['date'],
|
watched_status = 1
|
||||||
"friendly_name": item['friendly_name'],
|
elif item['percent_complete'] >= watched_percent/2:
|
||||||
"player": item["player"],
|
watched_status = 0.5
|
||||||
"ip_address": item["ip_address"],
|
else:
|
||||||
"full_title": item["full_title"],
|
watched_status = 0
|
||||||
"thumb": thumb,
|
|
||||||
"media_index": item["media_index"],
|
row = {"reference_id": item["reference_id"],
|
||||||
"parent_media_index": item["parent_media_index"],
|
"id": item["id"],
|
||||||
"parent_title": item["parent_title"],
|
"date": item["date"],
|
||||||
"year": item["year"],
|
|
||||||
"started": item["started"],
|
"started": item["started"],
|
||||||
"paused_counter": item["paused_counter"],
|
|
||||||
"stopped": item["stopped"],
|
"stopped": item["stopped"],
|
||||||
"duration": item["duration"],
|
"duration": item["duration"],
|
||||||
"percent_complete": item["percent_complete"],
|
"paused_counter": item["paused_counter"],
|
||||||
"grandparent_rating_key": item["grandparent_rating_key"],
|
"user_id": item["user_id"],
|
||||||
"parent_rating_key": item["parent_rating_key"],
|
|
||||||
"rating_key": item["rating_key"],
|
|
||||||
"user": item["user"],
|
"user": item["user"],
|
||||||
|
"friendly_name": item["friendly_name"],
|
||||||
|
"player": item["player"],
|
||||||
|
"ip_address": item["ip_address"],
|
||||||
"media_type": item["media_type"],
|
"media_type": item["media_type"],
|
||||||
|
"rating_key": item["rating_key"],
|
||||||
|
"parent_rating_key": item["parent_rating_key"],
|
||||||
|
"grandparent_rating_key": item["grandparent_rating_key"],
|
||||||
|
"full_title": item["full_title"],
|
||||||
|
"parent_title": item["parent_title"],
|
||||||
|
"year": item["year"],
|
||||||
|
"media_index": item["media_index"],
|
||||||
|
"parent_media_index": item["parent_media_index"],
|
||||||
|
"thumb": thumb,
|
||||||
"video_decision": item["video_decision"],
|
"video_decision": item["video_decision"],
|
||||||
"user_id": item["user_id"]
|
"audio_decision": item["audio_decision"],
|
||||||
|
"watched_status": watched_status,
|
||||||
|
"group_count": item["group_count"],
|
||||||
|
"group_ids": item["group_ids"]
|
||||||
}
|
}
|
||||||
|
|
||||||
rows.append(row)
|
rows.append(row)
|
||||||
|
|
||||||
dict = {'recordsFiltered': query['filteredCount'],
|
dict = {'recordsFiltered': query['filteredCount'],
|
||||||
'recordsTotal': query['totalCount'],
|
'recordsTotal': query['totalCount'],
|
||||||
'data': rows,
|
'data': rows,
|
||||||
|
@ -490,11 +502,17 @@ class DataFactory(object):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
for item in result:
|
for item in result:
|
||||||
|
# Rename Mystery platform names
|
||||||
|
platform_names = {'Mystery 3': 'Playstation 3',
|
||||||
|
'Mystery 4': 'Playstation 4',
|
||||||
|
'Mystery 5': 'Xbox 360'}
|
||||||
|
platform_type = platform_names.get(item[0], item[0])
|
||||||
|
|
||||||
row = {'platform': item[0],
|
row = {'platform': item[0],
|
||||||
'total_plays': item[1],
|
'total_plays': item[1],
|
||||||
'total_duration': item[2],
|
'total_duration': item[2],
|
||||||
'last_play': item[3],
|
'last_play': item[3],
|
||||||
'platform_type': item[0],
|
'platform_type': platform_type,
|
||||||
'title': '',
|
'title': '',
|
||||||
'thumb': '',
|
'thumb': '',
|
||||||
'grandparent_thumb': '',
|
'grandparent_thumb': '',
|
||||||
|
@ -778,3 +796,203 @@ class DataFactory(object):
|
||||||
return 'Deleted all items for user_id %s.' % user_id
|
return 'Deleted all items for user_id %s.' % user_id
|
||||||
else:
|
else:
|
||||||
return 'Unable to delete items. Input user_id not valid.'
|
return 'Unable to delete items. Input user_id not valid.'
|
||||||
|
|
||||||
|
def get_search_query(self, rating_key=''):
|
||||||
|
monitor_db = database.MonitorDatabase()
|
||||||
|
|
||||||
|
if rating_key:
|
||||||
|
query = 'SELECT rating_key, parent_rating_key, grandparent_rating_key, title, parent_title, grandparent_title, ' \
|
||||||
|
'media_index, parent_media_index, year, media_type ' \
|
||||||
|
'FROM session_history_metadata ' \
|
||||||
|
'WHERE rating_key = ? ' \
|
||||||
|
'OR parent_rating_key = ? ' \
|
||||||
|
'OR grandparent_rating_key = ? ' \
|
||||||
|
'LIMIT 1'
|
||||||
|
result = monitor_db.select(query=query, args=[rating_key, rating_key, rating_key])
|
||||||
|
else:
|
||||||
|
result = []
|
||||||
|
|
||||||
|
query = {}
|
||||||
|
query_string = None
|
||||||
|
media_type = None
|
||||||
|
|
||||||
|
for item in result:
|
||||||
|
title = item['title']
|
||||||
|
parent_title = item['parent_title']
|
||||||
|
grandparent_title = item['grandparent_title']
|
||||||
|
media_index = item['media_index']
|
||||||
|
parent_media_index = item['parent_media_index']
|
||||||
|
year = item['year']
|
||||||
|
|
||||||
|
if str(item['rating_key']) == rating_key:
|
||||||
|
query_string = item['title']
|
||||||
|
media_type = item['media_type']
|
||||||
|
|
||||||
|
elif str(item['parent_rating_key']) == rating_key:
|
||||||
|
if item['media_type'] == 'episode':
|
||||||
|
query_string = item['grandparent_title']
|
||||||
|
media_type = 'season'
|
||||||
|
elif item['media_type'] == 'track':
|
||||||
|
query_string = item['parent_title']
|
||||||
|
media_type = 'album'
|
||||||
|
|
||||||
|
elif str(item['grandparent_rating_key']) == rating_key:
|
||||||
|
if item['media_type'] == 'episode':
|
||||||
|
query_string = item['grandparent_title']
|
||||||
|
media_type = 'show'
|
||||||
|
elif item['media_type'] == 'track':
|
||||||
|
query_string = item['grandparent_title']
|
||||||
|
media_type = 'artist'
|
||||||
|
|
||||||
|
if query_string and media_type:
|
||||||
|
query = {'query_string': query_string.replace('"', ''),
|
||||||
|
'title': title,
|
||||||
|
'parent_title': parent_title,
|
||||||
|
'grandparent_title': grandparent_title,
|
||||||
|
'media_index': media_index,
|
||||||
|
'parent_media_index': parent_media_index,
|
||||||
|
'year': year,
|
||||||
|
'media_type': media_type,
|
||||||
|
'rating_key': rating_key
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return query
|
||||||
|
|
||||||
|
def get_rating_keys_list(self, rating_key='', media_type=''):
|
||||||
|
monitor_db = database.MonitorDatabase()
|
||||||
|
|
||||||
|
if media_type == 'movie':
|
||||||
|
key_list = {0: {'rating_key': int(rating_key)}}
|
||||||
|
return key_list
|
||||||
|
|
||||||
|
if media_type == 'artist' or media_type == 'album' or media_type == 'track':
|
||||||
|
match_type = 'title'
|
||||||
|
else:
|
||||||
|
match_type = 'index'
|
||||||
|
|
||||||
|
# Get the grandparent rating key
|
||||||
|
try:
|
||||||
|
query = 'SELECT rating_key, parent_rating_key, grandparent_rating_key ' \
|
||||||
|
'FROM session_history_metadata ' \
|
||||||
|
'WHERE rating_key = ? ' \
|
||||||
|
'OR parent_rating_key = ? ' \
|
||||||
|
'OR grandparent_rating_key = ? ' \
|
||||||
|
'LIMIT 1'
|
||||||
|
result = monitor_db.select(query=query, args=[rating_key, rating_key, rating_key])
|
||||||
|
|
||||||
|
grandparent_rating_key = result[0]['grandparent_rating_key']
|
||||||
|
|
||||||
|
except:
|
||||||
|
logger.warn("Unable to execute database query.")
|
||||||
|
return {}
|
||||||
|
|
||||||
|
query = 'SELECT rating_key, parent_rating_key, grandparent_rating_key, title, parent_title, grandparent_title, ' \
|
||||||
|
'media_index, parent_media_index ' \
|
||||||
|
'FROM session_history_metadata ' \
|
||||||
|
'WHERE {0} = ? ' \
|
||||||
|
'GROUP BY {1} '
|
||||||
|
|
||||||
|
# get grandparent_rating_keys
|
||||||
|
grandparents = {}
|
||||||
|
result = monitor_db.select(query=query.format('grandparent_rating_key', 'grandparent_rating_key'),
|
||||||
|
args=[grandparent_rating_key])
|
||||||
|
for item in result:
|
||||||
|
# get parent_rating_keys
|
||||||
|
parents = {}
|
||||||
|
result = monitor_db.select(query=query.format('grandparent_rating_key', 'parent_rating_key'),
|
||||||
|
args=[item['grandparent_rating_key']])
|
||||||
|
for item in result:
|
||||||
|
# get rating_keys
|
||||||
|
children = {}
|
||||||
|
result = monitor_db.select(query=query.format('parent_rating_key', 'rating_key'),
|
||||||
|
args=[item['parent_rating_key']])
|
||||||
|
for item in result:
|
||||||
|
key = item['media_index']
|
||||||
|
children.update({key: {'rating_key': item['rating_key']}})
|
||||||
|
|
||||||
|
key = item['parent_media_index'] if match_type == 'index' else item['parent_title']
|
||||||
|
parents.update({key:
|
||||||
|
{'rating_key': item['parent_rating_key'],
|
||||||
|
'children': children}
|
||||||
|
})
|
||||||
|
|
||||||
|
key = 0 if match_type == 'index' else item['grandparent_title']
|
||||||
|
grandparents.update({key:
|
||||||
|
{'rating_key': item['grandparent_rating_key'],
|
||||||
|
'children': parents}
|
||||||
|
})
|
||||||
|
|
||||||
|
key_list = grandparents
|
||||||
|
|
||||||
|
return key_list
|
||||||
|
|
||||||
|
def update_rating_key(self, old_key_list='', new_key_list='', media_type=''):
|
||||||
|
monitor_db = database.MonitorDatabase()
|
||||||
|
|
||||||
|
# function to map rating keys pairs
|
||||||
|
def get_pairs(old, new):
|
||||||
|
pairs = {}
|
||||||
|
for k, v in old.iteritems():
|
||||||
|
if k in new:
|
||||||
|
if v['rating_key'] != new[k]['rating_key']:
|
||||||
|
pairs.update({v['rating_key']: new[k]['rating_key']})
|
||||||
|
if 'children' in old[k]:
|
||||||
|
pairs.update(get_pairs(old[k]['children'], new[k]['children']))
|
||||||
|
|
||||||
|
return pairs
|
||||||
|
|
||||||
|
# map rating keys pairs
|
||||||
|
mapping = {}
|
||||||
|
if old_key_list and new_key_list:
|
||||||
|
mapping = get_pairs(old_key_list, new_key_list)
|
||||||
|
|
||||||
|
if mapping:
|
||||||
|
logger.info(u"PlexPy DataFactory :: Updating rating keys in the database.")
|
||||||
|
for old_key, new_key in mapping.iteritems():
|
||||||
|
# check rating_key (3 tables)
|
||||||
|
monitor_db.action('UPDATE session_history SET rating_key = ? WHERE rating_key = ?',
|
||||||
|
[new_key, old_key])
|
||||||
|
monitor_db.action('UPDATE session_history_media_info SET rating_key = ? WHERE rating_key = ?',
|
||||||
|
[new_key, old_key])
|
||||||
|
monitor_db.action('UPDATE session_history_metadata SET rating_key = ? WHERE rating_key = ?',
|
||||||
|
[new_key, old_key])
|
||||||
|
|
||||||
|
# check parent_rating_key (2 tables)
|
||||||
|
monitor_db.action('UPDATE session_history SET parent_rating_key = ? WHERE parent_rating_key = ?',
|
||||||
|
[new_key, old_key])
|
||||||
|
monitor_db.action('UPDATE session_history_metadata SET parent_rating_key = ? WHERE parent_rating_key = ?',
|
||||||
|
[new_key, old_key])
|
||||||
|
|
||||||
|
# check grandparent_rating_key (2 tables)
|
||||||
|
monitor_db.action('UPDATE session_history SET grandparent_rating_key = ? WHERE grandparent_rating_key = ?',
|
||||||
|
[new_key, old_key])
|
||||||
|
monitor_db.action('UPDATE session_history_metadata SET grandparent_rating_key = ? WHERE grandparent_rating_key = ?',
|
||||||
|
[new_key, old_key])
|
||||||
|
|
||||||
|
# check thumb (1 table)
|
||||||
|
monitor_db.action('UPDATE session_history_metadata SET thumb = replace(thumb, ?, ?) \
|
||||||
|
WHERE thumb LIKE "/library/metadata/%s/thumb/%%"' % old_key,
|
||||||
|
[old_key, new_key])
|
||||||
|
|
||||||
|
# check parent_thumb (1 table)
|
||||||
|
monitor_db.action('UPDATE session_history_metadata SET parent_thumb = replace(parent_thumb, ?, ?) \
|
||||||
|
WHERE parent_thumb LIKE "/library/metadata/%s/thumb/%%"' % old_key,
|
||||||
|
[old_key, new_key])
|
||||||
|
|
||||||
|
# check grandparent_thumb (1 table)
|
||||||
|
monitor_db.action('UPDATE session_history_metadata SET grandparent_thumb = replace(grandparent_thumb, ?, ?) \
|
||||||
|
WHERE grandparent_thumb LIKE "/library/metadata/%s/thumb/%%"' % old_key,
|
||||||
|
[old_key, new_key])
|
||||||
|
|
||||||
|
# check art (1 table)
|
||||||
|
monitor_db.action('UPDATE session_history_metadata SET art = replace(art, ?, ?) \
|
||||||
|
WHERE art LIKE "/library/metadata/%s/art/%%"' % old_key,
|
||||||
|
[old_key, new_key])
|
||||||
|
|
||||||
|
return 'Updated rating key in database.'
|
||||||
|
else:
|
||||||
|
return 'No updated rating key needed in database. No changes were made.'
|
||||||
|
# for debugging
|
||||||
|
#return mapping
|
|
@ -81,10 +81,11 @@ class HTTPHandler(object):
|
||||||
logger.warn(u"Failed to access uri endpoint %s with error %s" % (uri, e))
|
logger.warn(u"Failed to access uri endpoint %s with error %s" % (uri, e))
|
||||||
return None
|
return None
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logger.warn(u"Failed to access uri endpoint %s. Is your server maybe accepting SSL connections only?" % uri)
|
logger.warn(u"Failed to access uri endpoint %s. Is your server maybe accepting SSL connections only? %s" % (uri, e))
|
||||||
return None
|
return None
|
||||||
except:
|
except:
|
||||||
logger.warn(u"Failed to access uri endpoint %s with Uncaught exception." % uri)
|
logger.warn(u"Failed to access uri endpoint %s with Uncaught exception." % uri)
|
||||||
|
return None
|
||||||
|
|
||||||
if request_status == 200:
|
if request_status == 200:
|
||||||
if output_format == 'dict':
|
if output_format == 'dict':
|
||||||
|
|
|
@ -1,412 +0,0 @@
|
||||||
# This file is part of PlexPy.
|
|
||||||
#
|
|
||||||
# PlexPy is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# PlexPy is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
from plexpy import logger, pmsconnect, notification_handler, log_reader, common, database, helpers
|
|
||||||
|
|
||||||
import threading
|
|
||||||
import plexpy
|
|
||||||
import re
|
|
||||||
import time
|
|
||||||
|
|
||||||
monitor_lock = threading.Lock()
|
|
||||||
|
|
||||||
def check_active_sessions():
|
|
||||||
|
|
||||||
with monitor_lock:
|
|
||||||
pms_connect = pmsconnect.PmsConnect()
|
|
||||||
session_list = pms_connect.get_current_activity()
|
|
||||||
monitor_db = database.MonitorDatabase()
|
|
||||||
monitor_process = MonitorProcessing()
|
|
||||||
# logger.debug(u"PlexPy Monitor :: Checking for active streams.")
|
|
||||||
|
|
||||||
if session_list:
|
|
||||||
media_container = session_list['sessions']
|
|
||||||
|
|
||||||
# Check our temp table for what we must do with the new streams
|
|
||||||
db_streams = monitor_db.select('SELECT started, session_key, rating_key, media_type, title, parent_title, '
|
|
||||||
'grandparent_title, user_id, user, friendly_name, ip_address, player, '
|
|
||||||
'platform, machine_id, parent_rating_key, grandparent_rating_key, state, '
|
|
||||||
'view_offset, duration, video_decision, audio_decision, width, height, '
|
|
||||||
'container, video_codec, audio_codec, bitrate, video_resolution, '
|
|
||||||
'video_framerate, aspect_ratio, audio_channels, transcode_protocol, '
|
|
||||||
'transcode_container, transcode_video_codec, transcode_audio_codec, '
|
|
||||||
'transcode_audio_channels, transcode_width, transcode_height, paused_counter '
|
|
||||||
'FROM sessions')
|
|
||||||
for stream in db_streams:
|
|
||||||
if any(d['session_key'] == str(stream['session_key']) and d['rating_key'] == str(stream['rating_key'])
|
|
||||||
for d in media_container):
|
|
||||||
# The user's session is still active
|
|
||||||
for session in media_container:
|
|
||||||
if session['session_key'] == str(stream['session_key']) and \
|
|
||||||
session['rating_key'] == str(stream['rating_key']):
|
|
||||||
# The user is still playing the same media item
|
|
||||||
# Here we can check the play states
|
|
||||||
if session['state'] != stream['state']:
|
|
||||||
if session['state'] == 'paused':
|
|
||||||
# Push any notifications -
|
|
||||||
# Push it on it's own thread so we don't hold up our db actions
|
|
||||||
threading.Thread(target=notification_handler.notify,
|
|
||||||
kwargs=dict(stream_data=stream, notify_action='pause')).start()
|
|
||||||
if session['state'] == 'playing' and stream['state'] == 'paused':
|
|
||||||
# Push any notifications -
|
|
||||||
# Push it on it's own thread so we don't hold up our db actions
|
|
||||||
threading.Thread(target=notification_handler.notify,
|
|
||||||
kwargs=dict(stream_data=stream, notify_action='resume')).start()
|
|
||||||
if stream['state'] == 'paused':
|
|
||||||
# The stream is still paused so we need to increment the paused_counter
|
|
||||||
# Using the set config parameter as the interval, probably not the most accurate but
|
|
||||||
# it will have to do for now.
|
|
||||||
paused_counter = int(stream['paused_counter']) + plexpy.CONFIG.MONITORING_INTERVAL
|
|
||||||
monitor_db.action('UPDATE sessions SET paused_counter = ? '
|
|
||||||
'WHERE session_key = ? AND rating_key = ?',
|
|
||||||
[paused_counter, stream['session_key'], stream['rating_key']])
|
|
||||||
if session['state'] == 'buffering' and plexpy.CONFIG.BUFFER_THRESHOLD > 0:
|
|
||||||
# The stream is buffering so we need to increment the buffer_count
|
|
||||||
# We're going just increment on every monitor ping,
|
|
||||||
# would be difficult to keep track otherwise
|
|
||||||
monitor_db.action('UPDATE sessions SET buffer_count = buffer_count + 1 '
|
|
||||||
'WHERE session_key = ? AND rating_key = ?',
|
|
||||||
[stream['session_key'], stream['rating_key']])
|
|
||||||
|
|
||||||
# Check the current buffer count and last buffer to determine if we should notify
|
|
||||||
buffer_values = monitor_db.select('SELECT buffer_count, buffer_last_triggered '
|
|
||||||
'FROM sessions '
|
|
||||||
'WHERE session_key = ? AND rating_key = ?',
|
|
||||||
[stream['session_key'], stream['rating_key']])
|
|
||||||
|
|
||||||
if buffer_values[0]['buffer_count'] >= plexpy.CONFIG.BUFFER_THRESHOLD:
|
|
||||||
# Push any notifications -
|
|
||||||
# Push it on it's own thread so we don't hold up our db actions
|
|
||||||
# Our first buffer notification
|
|
||||||
if buffer_values[0]['buffer_count'] == plexpy.CONFIG.BUFFER_THRESHOLD:
|
|
||||||
logger.info(u"PlexPy Monitor :: User '%s' has triggered a buffer warning."
|
|
||||||
% stream['user'])
|
|
||||||
# Set the buffer trigger time
|
|
||||||
monitor_db.action('UPDATE sessions '
|
|
||||||
'SET buffer_last_triggered = strftime("%s","now") '
|
|
||||||
'WHERE session_key = ? AND rating_key = ?',
|
|
||||||
[stream['session_key'], stream['rating_key']])
|
|
||||||
|
|
||||||
threading.Thread(target=notification_handler.notify,
|
|
||||||
kwargs=dict(stream_data=stream, notify_action='buffer')).start()
|
|
||||||
else:
|
|
||||||
# Subsequent buffer notifications after wait time
|
|
||||||
if int(time.time()) > buffer_values[0]['buffer_last_triggered'] + \
|
|
||||||
plexpy.CONFIG.BUFFER_WAIT:
|
|
||||||
logger.info(u"PlexPy Monitor :: User '%s' has triggered multiple buffer warnings."
|
|
||||||
% stream['user'])
|
|
||||||
# Set the buffer trigger time
|
|
||||||
monitor_db.action('UPDATE sessions '
|
|
||||||
'SET buffer_last_triggered = strftime("%s","now") '
|
|
||||||
'WHERE session_key = ? AND rating_key = ?',
|
|
||||||
[stream['session_key'], stream['rating_key']])
|
|
||||||
|
|
||||||
threading.Thread(target=notification_handler.notify,
|
|
||||||
kwargs=dict(stream_data=stream, notify_action='buffer')).start()
|
|
||||||
|
|
||||||
logger.debug(u"PlexPy Monitor :: Stream buffering. Count is now %s. Last triggered %s."
|
|
||||||
% (buffer_values[0][0], buffer_values[0][1]))
|
|
||||||
|
|
||||||
# Check if the user has reached the offset in the media we defined as the "watched" percent
|
|
||||||
# Don't trigger if state is buffer as some clients push the progress to the end when
|
|
||||||
# buffering on start.
|
|
||||||
if session['progress'] and session['duration'] and session['state'] != 'buffering':
|
|
||||||
if helpers.get_percent(session['progress'],
|
|
||||||
session['duration']) > plexpy.CONFIG.NOTIFY_WATCHED_PERCENT:
|
|
||||||
# Push any notifications -
|
|
||||||
# Push it on it's own thread so we don't hold up our db actions
|
|
||||||
threading.Thread(target=notification_handler.notify,
|
|
||||||
kwargs=dict(stream_data=stream, notify_action='watched')).start()
|
|
||||||
|
|
||||||
else:
|
|
||||||
# The user has stopped playing a stream
|
|
||||||
logger.debug(u"PlexPy Monitor :: Removing sessionKey %s ratingKey %s from session queue"
|
|
||||||
% (stream['session_key'], stream['rating_key']))
|
|
||||||
monitor_db.action('DELETE FROM sessions WHERE session_key = ? AND rating_key = ?',
|
|
||||||
[stream['session_key'], stream['rating_key']])
|
|
||||||
|
|
||||||
# Check if the user has reached the offset in the media we defined as the "watched" percent
|
|
||||||
if stream['view_offset'] and stream['duration']:
|
|
||||||
if helpers.get_percent(stream['view_offset'],
|
|
||||||
stream['duration']) > plexpy.CONFIG.NOTIFY_WATCHED_PERCENT:
|
|
||||||
# Push any notifications -
|
|
||||||
# Push it on it's own thread so we don't hold up our db actions
|
|
||||||
threading.Thread(target=notification_handler.notify,
|
|
||||||
kwargs=dict(stream_data=stream, notify_action='watched')).start()
|
|
||||||
|
|
||||||
# Push any notifications - Push it on it's own thread so we don't hold up our db actions
|
|
||||||
threading.Thread(target=notification_handler.notify,
|
|
||||||
kwargs=dict(stream_data=stream, notify_action='stop')).start()
|
|
||||||
|
|
||||||
# Write the item history on playback stop
|
|
||||||
monitor_process.write_session_history(session=stream)
|
|
||||||
|
|
||||||
# Process the newly received session data
|
|
||||||
for session in media_container:
|
|
||||||
monitor_process.write_session(session)
|
|
||||||
else:
|
|
||||||
logger.debug(u"PlexPy Monitor :: Unable to read session list.")
|
|
||||||
|
|
||||||
|
|
||||||
class MonitorProcessing(object):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.db = database.MonitorDatabase()
|
|
||||||
|
|
||||||
def write_session(self, session=None):
|
|
||||||
|
|
||||||
values = {'session_key': session['session_key'],
|
|
||||||
'rating_key': session['rating_key'],
|
|
||||||
'media_type': session['type'],
|
|
||||||
'state': session['state'],
|
|
||||||
'user_id': session['user_id'],
|
|
||||||
'user': session['user'],
|
|
||||||
'machine_id': session['machine_id'],
|
|
||||||
'title': session['title'],
|
|
||||||
'parent_title': session['parent_title'],
|
|
||||||
'grandparent_title': session['grandparent_title'],
|
|
||||||
'friendly_name': session['friendly_name'],
|
|
||||||
'player': session['player'],
|
|
||||||
'platform': session['platform'],
|
|
||||||
'parent_rating_key': session['parent_rating_key'],
|
|
||||||
'grandparent_rating_key': session['grandparent_rating_key'],
|
|
||||||
'view_offset': session['progress'],
|
|
||||||
'duration': session['duration'],
|
|
||||||
'video_decision': session['video_decision'],
|
|
||||||
'audio_decision': session['audio_decision'],
|
|
||||||
'width': session['width'],
|
|
||||||
'height': session['height'],
|
|
||||||
'container': session['container'],
|
|
||||||
'video_codec': session['video_codec'],
|
|
||||||
'audio_codec': session['audio_codec'],
|
|
||||||
'bitrate': session['bitrate'],
|
|
||||||
'video_resolution': session['video_resolution'],
|
|
||||||
'video_framerate': session['video_framerate'],
|
|
||||||
'aspect_ratio': session['aspect_ratio'],
|
|
||||||
'audio_channels': session['audio_channels'],
|
|
||||||
'transcode_protocol': session['transcode_protocol'],
|
|
||||||
'transcode_container': session['transcode_container'],
|
|
||||||
'transcode_video_codec': session['transcode_video_codec'],
|
|
||||||
'transcode_audio_codec': session['transcode_audio_codec'],
|
|
||||||
'transcode_audio_channels': session['transcode_audio_channels'],
|
|
||||||
'transcode_width': session['transcode_width'],
|
|
||||||
'transcode_height': session['transcode_height']
|
|
||||||
}
|
|
||||||
|
|
||||||
keys = {'session_key': session['session_key'],
|
|
||||||
'rating_key': session['rating_key']}
|
|
||||||
|
|
||||||
result = self.db.upsert('sessions', values, keys)
|
|
||||||
|
|
||||||
if result == 'insert':
|
|
||||||
# Push any notifications - Push it on it's own thread so we don't hold up our db actions
|
|
||||||
threading.Thread(target=notification_handler.notify,
|
|
||||||
kwargs=dict(stream_data=values,notify_action='play')).start()
|
|
||||||
|
|
||||||
started = int(time.time())
|
|
||||||
|
|
||||||
# Try and grab IP address from logs
|
|
||||||
if plexpy.CONFIG.IP_LOGGING_ENABLE and plexpy.CONFIG.PMS_LOGS_FOLDER:
|
|
||||||
ip_address = self.find_session_ip(rating_key=session['rating_key'],
|
|
||||||
machine_id=session['machine_id'])
|
|
||||||
else:
|
|
||||||
ip_address = None
|
|
||||||
|
|
||||||
timestamp = {'started': started,
|
|
||||||
'ip_address': ip_address}
|
|
||||||
|
|
||||||
# If it's our first write then time stamp it.
|
|
||||||
self.db.upsert('sessions', timestamp, keys)
|
|
||||||
|
|
||||||
def write_session_history(self, session=None, import_metadata=None, is_import=False, import_ignore_interval=0):
|
|
||||||
from plexpy import users
|
|
||||||
|
|
||||||
user_data = users.Users()
|
|
||||||
user_details = user_data.get_user_friendly_name(user=session['user'])
|
|
||||||
|
|
||||||
if session:
|
|
||||||
logging_enabled = False
|
|
||||||
|
|
||||||
if is_import:
|
|
||||||
if str(session['stopped']).isdigit():
|
|
||||||
stopped = int(session['stopped'])
|
|
||||||
else:
|
|
||||||
stopped = int(time.time())
|
|
||||||
else:
|
|
||||||
stopped = int(time.time())
|
|
||||||
|
|
||||||
if plexpy.CONFIG.VIDEO_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \
|
|
||||||
(session['media_type'] == 'movie' or session['media_type'] == 'episode'):
|
|
||||||
logging_enabled = True
|
|
||||||
elif plexpy.CONFIG.MUSIC_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \
|
|
||||||
session['media_type'] == 'track':
|
|
||||||
logging_enabled = True
|
|
||||||
else:
|
|
||||||
logger.debug(u"PlexPy Monitor :: ratingKey %s not logged. Does not meet logging criteria. "
|
|
||||||
u"Media type is '%s'" % (session['rating_key'], session['media_type']))
|
|
||||||
|
|
||||||
if str(session['paused_counter']).isdigit():
|
|
||||||
real_play_time = stopped - session['started'] - int(session['paused_counter'])
|
|
||||||
else:
|
|
||||||
real_play_time = stopped - session['started']
|
|
||||||
|
|
||||||
if plexpy.CONFIG.LOGGING_IGNORE_INTERVAL and not is_import:
|
|
||||||
if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \
|
|
||||||
(real_play_time < int(plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)):
|
|
||||||
logging_enabled = False
|
|
||||||
logger.debug(u"PlexPy Monitor :: Play duration for ratingKey %s is %s secs which is less than %s "
|
|
||||||
u"seconds, so we're not logging it." %
|
|
||||||
(session['rating_key'], str(real_play_time), plexpy.CONFIG.LOGGING_IGNORE_INTERVAL))
|
|
||||||
elif is_import and import_ignore_interval:
|
|
||||||
if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \
|
|
||||||
(real_play_time < int(import_ignore_interval)):
|
|
||||||
logging_enabled = False
|
|
||||||
logger.debug(u"PlexPy Monitor :: Play duration for ratingKey %s is %s secs which is less than %s "
|
|
||||||
u"seconds, so we're not logging it." %
|
|
||||||
(session['rating_key'], str(real_play_time),
|
|
||||||
import_ignore_interval))
|
|
||||||
|
|
||||||
if not user_details['keep_history'] and not is_import:
|
|
||||||
logging_enabled = False
|
|
||||||
logger.debug(u"PlexPy Monitor :: History logging for user '%s' is disabled." % session['user'])
|
|
||||||
|
|
||||||
if logging_enabled:
|
|
||||||
# logger.debug(u"PlexPy Monitor :: Attempting to write to session_history table...")
|
|
||||||
query = 'INSERT INTO session_history (started, stopped, rating_key, parent_rating_key, ' \
|
|
||||||
'grandparent_rating_key, media_type, user_id, user, ip_address, paused_counter, player, ' \
|
|
||||||
'platform, machine_id, view_offset) VALUES ' \
|
|
||||||
'(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'
|
|
||||||
|
|
||||||
args = [session['started'], stopped, session['rating_key'], session['parent_rating_key'],
|
|
||||||
session['grandparent_rating_key'], session['media_type'], session['user_id'], session['user'],
|
|
||||||
session['ip_address'], session['paused_counter'], session['player'], session['platform'],
|
|
||||||
session['machine_id'], session['view_offset']]
|
|
||||||
|
|
||||||
# logger.debug(u"PlexPy Monitor :: Writing session_history transaction...")
|
|
||||||
self.db.action(query=query, args=args)
|
|
||||||
|
|
||||||
# logger.debug(u"PlexPy Monitor :: Successfully written history item, last id for session_history is %s"
|
|
||||||
# % last_id)
|
|
||||||
|
|
||||||
# Write the session_history_media_info table
|
|
||||||
# logger.debug(u"PlexPy Monitor :: Attempting to write to session_history_media_info table...")
|
|
||||||
query = 'INSERT INTO session_history_media_info (id, rating_key, video_decision, audio_decision, ' \
|
|
||||||
'duration, width, height, container, video_codec, audio_codec, bitrate, video_resolution, ' \
|
|
||||||
'video_framerate, aspect_ratio, audio_channels, transcode_protocol, transcode_container, ' \
|
|
||||||
'transcode_video_codec, transcode_audio_codec, transcode_audio_channels, transcode_width, ' \
|
|
||||||
'transcode_height) VALUES ' \
|
|
||||||
'(last_insert_rowid(), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'
|
|
||||||
|
|
||||||
args = [session['rating_key'], session['video_decision'], session['audio_decision'],
|
|
||||||
session['duration'], session['width'], session['height'], session['container'],
|
|
||||||
session['video_codec'], session['audio_codec'], session['bitrate'],
|
|
||||||
session['video_resolution'], session['video_framerate'], session['aspect_ratio'],
|
|
||||||
session['audio_channels'], session['transcode_protocol'], session['transcode_container'],
|
|
||||||
session['transcode_video_codec'], session['transcode_audio_codec'],
|
|
||||||
session['transcode_audio_channels'], session['transcode_width'], session['transcode_height']]
|
|
||||||
|
|
||||||
# logger.debug(u"PlexPy Monitor :: Writing session_history_media_info transaction...")
|
|
||||||
self.db.action(query=query, args=args)
|
|
||||||
|
|
||||||
if not is_import:
|
|
||||||
logger.debug(u"PlexPy Monitor :: Fetching metadata for item ratingKey %s" % session['rating_key'])
|
|
||||||
pms_connect = pmsconnect.PmsConnect()
|
|
||||||
result = pms_connect.get_metadata_details(rating_key=str(session['rating_key']))
|
|
||||||
metadata = result['metadata']
|
|
||||||
else:
|
|
||||||
metadata = import_metadata
|
|
||||||
|
|
||||||
# Write the session_history_metadata table
|
|
||||||
directors = ";".join(metadata['directors'])
|
|
||||||
writers = ";".join(metadata['writers'])
|
|
||||||
actors = ";".join(metadata['actors'])
|
|
||||||
genres = ";".join(metadata['genres'])
|
|
||||||
|
|
||||||
# Build media item title
|
|
||||||
if session['media_type'] == 'episode' or session['media_type'] == 'track':
|
|
||||||
full_title = '%s - %s' % (metadata['grandparent_title'], metadata['title'])
|
|
||||||
elif session['media_type'] == 'movie':
|
|
||||||
full_title = metadata['title']
|
|
||||||
else:
|
|
||||||
full_title = metadata['title']
|
|
||||||
|
|
||||||
# logger.debug(u"PlexPy Monitor :: Attempting to write to session_history_metadata table...")
|
|
||||||
query = 'INSERT INTO session_history_metadata (id, rating_key, parent_rating_key, ' \
|
|
||||||
'grandparent_rating_key, title, parent_title, grandparent_title, full_title, media_index, ' \
|
|
||||||
'parent_media_index, thumb, parent_thumb, grandparent_thumb, art, media_type, year, ' \
|
|
||||||
'originally_available_at, added_at, updated_at, last_viewed_at, content_rating, summary, ' \
|
|
||||||
'tagline, rating, duration, guid, directors, writers, actors, genres, studio) VALUES ' \
|
|
||||||
'(last_insert_rowid(), ' \
|
|
||||||
'?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'
|
|
||||||
|
|
||||||
args = [session['rating_key'], session['parent_rating_key'], session['grandparent_rating_key'],
|
|
||||||
session['title'], session['parent_title'], session['grandparent_title'], full_title,
|
|
||||||
metadata['index'], metadata['parent_index'], metadata['thumb'], metadata['parent_thumb'],
|
|
||||||
metadata['grandparent_thumb'], metadata['art'], session['media_type'], metadata['year'],
|
|
||||||
metadata['originally_available_at'], metadata['added_at'], metadata['updated_at'],
|
|
||||||
metadata['last_viewed_at'], metadata['content_rating'], metadata['summary'], metadata['tagline'],
|
|
||||||
metadata['rating'], metadata['duration'], metadata['guid'], directors, writers, actors, genres, metadata['studio']]
|
|
||||||
|
|
||||||
# logger.debug(u"PlexPy Monitor :: Writing session_history_metadata transaction...")
|
|
||||||
self.db.action(query=query, args=args)
|
|
||||||
|
|
||||||
def find_session_ip(self, rating_key=None, machine_id=None):
|
|
||||||
|
|
||||||
logger.debug(u"PlexPy Monitor :: Requesting log lines...")
|
|
||||||
log_lines = log_reader.get_log_tail(window=5000, parsed=False)
|
|
||||||
|
|
||||||
rating_key_line = 'ratingKey=' + rating_key
|
|
||||||
rating_key_line_2 = 'metadata%2F' + rating_key
|
|
||||||
machine_id_line = 'session=' + machine_id
|
|
||||||
|
|
||||||
for line in reversed(log_lines):
|
|
||||||
# We're good if we find a line with both machine id and rating key
|
|
||||||
# This is usually when there is a transcode session
|
|
||||||
if machine_id_line in line and (rating_key_line in line or rating_key_line_2 in line):
|
|
||||||
# Currently only checking for ipv4 addresses
|
|
||||||
ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}', line)
|
|
||||||
if ipv4:
|
|
||||||
# The logged IP will always be the first match and we don't want localhost entries
|
|
||||||
if ipv4[0] != '127.0.0.1':
|
|
||||||
logger.debug(u"PlexPy Monitor :: Matched IP address (%s) for stream ratingKey %s "
|
|
||||||
u"and machineIdentifier %s."
|
|
||||||
% (ipv4[0], rating_key, machine_id))
|
|
||||||
return ipv4[0]
|
|
||||||
|
|
||||||
logger.debug(u"PlexPy Monitor :: Unable to find IP address on first pass. "
|
|
||||||
u"Attempting fallback check in 5 seconds...")
|
|
||||||
|
|
||||||
# Wait for the log to catch up and read in new lines
|
|
||||||
time.sleep(5)
|
|
||||||
|
|
||||||
logger.debug(u"PlexPy Monitor :: Requesting log lines...")
|
|
||||||
log_lines = log_reader.get_log_tail(window=5000, parsed=False)
|
|
||||||
|
|
||||||
for line in reversed(log_lines):
|
|
||||||
if 'GET /:/timeline' in line and (rating_key_line in line or rating_key_line_2 in line):
|
|
||||||
# Currently only checking for ipv4 addresses
|
|
||||||
# This method can return the wrong IP address if more than one user
|
|
||||||
# starts watching the same media item around the same time.
|
|
||||||
ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}', line)
|
|
||||||
if ipv4:
|
|
||||||
# The logged IP will always be the first match and we don't want localhost entries
|
|
||||||
if ipv4[0] != '127.0.0.1':
|
|
||||||
logger.debug(u"PlexPy Monitor :: Matched IP address (%s) for stream ratingKey %s." %
|
|
||||||
(ipv4[0], rating_key))
|
|
||||||
return ipv4[0]
|
|
||||||
|
|
||||||
logger.debug(u"PlexPy Monitor :: Unable to find IP address on fallback search. Not logging IP address.")
|
|
||||||
|
|
||||||
return None
|
|
|
@ -13,7 +13,7 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from plexpy import logger, config, notifiers, database
|
from plexpy import logger, config, notifiers, database, helpers
|
||||||
|
|
||||||
import plexpy
|
import plexpy
|
||||||
import time
|
import time
|
||||||
|
@ -33,6 +33,8 @@ def notify(stream_data=None, notify_action=None):
|
||||||
if stream_data['media_type'] == 'movie' or stream_data['media_type'] == 'episode':
|
if stream_data['media_type'] == 'movie' or stream_data['media_type'] == 'episode':
|
||||||
if plexpy.CONFIG.MOVIE_NOTIFY_ENABLE or plexpy.CONFIG.TV_NOTIFY_ENABLE:
|
if plexpy.CONFIG.MOVIE_NOTIFY_ENABLE or plexpy.CONFIG.TV_NOTIFY_ENABLE:
|
||||||
|
|
||||||
|
progress_percent = helpers.get_percent(stream_data['view_offset'], stream_data['duration'])
|
||||||
|
|
||||||
for agent in notifiers.available_notification_agents():
|
for agent in notifiers.available_notification_agents():
|
||||||
if agent['on_play'] and notify_action == 'play':
|
if agent['on_play'] and notify_action == 'play':
|
||||||
# Build and send notification
|
# Build and send notification
|
||||||
|
@ -43,7 +45,8 @@ def notify(stream_data=None, notify_action=None):
|
||||||
# Set the notification state in the db
|
# Set the notification state in the db
|
||||||
set_notify_state(session=stream_data, state='play', agent_info=agent)
|
set_notify_state(session=stream_data, state='play', agent_info=agent)
|
||||||
|
|
||||||
elif agent['on_stop'] and notify_action == 'stop':
|
elif agent['on_stop'] and notify_action == 'stop' \
|
||||||
|
and (plexpy.CONFIG.NOTIFY_CONSECUTIVE or progress_percent < plexpy.CONFIG.NOTIFY_WATCHED_PERCENT):
|
||||||
# Build and send notification
|
# Build and send notification
|
||||||
notify_strings = build_notify_text(session=stream_data, state=notify_action)
|
notify_strings = build_notify_text(session=stream_data, state=notify_action)
|
||||||
notifiers.send_notification(config_id=agent['id'],
|
notifiers.send_notification(config_id=agent['id'],
|
||||||
|
@ -52,7 +55,8 @@ def notify(stream_data=None, notify_action=None):
|
||||||
|
|
||||||
set_notify_state(session=stream_data, state='stop', agent_info=agent)
|
set_notify_state(session=stream_data, state='stop', agent_info=agent)
|
||||||
|
|
||||||
elif agent['on_pause'] and notify_action == 'pause':
|
elif agent['on_pause'] and notify_action == 'pause' \
|
||||||
|
and (plexpy.CONFIG.NOTIFY_CONSECUTIVE or progress_percent < 99):
|
||||||
# Build and send notification
|
# Build and send notification
|
||||||
notify_strings = build_notify_text(session=stream_data, state=notify_action)
|
notify_strings = build_notify_text(session=stream_data, state=notify_action)
|
||||||
notifiers.send_notification(config_id=agent['id'],
|
notifiers.send_notification(config_id=agent['id'],
|
||||||
|
@ -61,7 +65,8 @@ def notify(stream_data=None, notify_action=None):
|
||||||
|
|
||||||
set_notify_state(session=stream_data, state='pause', agent_info=agent)
|
set_notify_state(session=stream_data, state='pause', agent_info=agent)
|
||||||
|
|
||||||
elif agent['on_resume'] and notify_action == 'resume':
|
elif agent['on_resume'] and notify_action == 'resume' \
|
||||||
|
and (plexpy.CONFIG.NOTIFY_CONSECUTIVE or progress_percent < 99):
|
||||||
# Build and send notification
|
# Build and send notification
|
||||||
notify_strings = build_notify_text(session=stream_data, state=notify_action)
|
notify_strings = build_notify_text(session=stream_data, state=notify_action)
|
||||||
notifiers.send_notification(config_id=agent['id'],
|
notifiers.send_notification(config_id=agent['id'],
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# This file is part of PlexPy.
|
# This file is part of PlexPy.
|
||||||
#
|
#
|
||||||
# PlexPy is free software: you can redistribute it and/or modify
|
# PlexPy is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU General Public License as published by
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
@ -385,16 +385,16 @@ class GROWL(object):
|
||||||
self.notify('ZOMG Lazors Pewpewpew!', 'Test Message')
|
self.notify('ZOMG Lazors Pewpewpew!', 'Test Message')
|
||||||
|
|
||||||
def return_config_options(self):
|
def return_config_options(self):
|
||||||
config_option = [{'label': 'Host',
|
config_option = [{'label': 'Growl Host',
|
||||||
'value': self.host,
|
'value': self.host,
|
||||||
'name': 'growl_host',
|
'name': 'growl_host',
|
||||||
'description': 'Set the hostname.',
|
'description': 'Your Growl hostname.',
|
||||||
'input_type': 'text'
|
'input_type': 'text'
|
||||||
},
|
},
|
||||||
{'label': 'Password',
|
{'label': 'Growl Password',
|
||||||
'value': self.password,
|
'value': self.password,
|
||||||
'name': 'growl_password',
|
'name': 'growl_password',
|
||||||
'description': 'Set the password.',
|
'description': 'Your Growl password.',
|
||||||
'input_type': 'password'
|
'input_type': 'password'
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -458,16 +458,16 @@ class PROWL(object):
|
||||||
self.notify('ZOMG Lazors Pewpewpew!', 'Test Message')
|
self.notify('ZOMG Lazors Pewpewpew!', 'Test Message')
|
||||||
|
|
||||||
def return_config_options(self):
|
def return_config_options(self):
|
||||||
config_option = [{'label': 'API Key',
|
config_option = [{'label': 'Prowl API Key',
|
||||||
'value': self.keys,
|
'value': self.keys,
|
||||||
'name': 'prowl_keys',
|
'name': 'prowl_keys',
|
||||||
'description': 'Set the API key.',
|
'description': 'Your Prowl API key.',
|
||||||
'input_type': 'text'
|
'input_type': 'text'
|
||||||
},
|
},
|
||||||
{'label': 'Priority (-2,-1,0,1 or 2)',
|
{'label': 'Priority',
|
||||||
'value': self.priority,
|
'value': self.priority,
|
||||||
'name': 'prowl_priority',
|
'name': 'prowl_priority',
|
||||||
'description': 'Set the priority.',
|
'description': 'Set the priority (-2,-1,0,1 or 2).',
|
||||||
'input_type': 'number'
|
'input_type': 'number'
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -542,19 +542,19 @@ class XBMC(object):
|
||||||
config_option = [{'label': 'XBMC Host:Port',
|
config_option = [{'label': 'XBMC Host:Port',
|
||||||
'value': self.hosts,
|
'value': self.hosts,
|
||||||
'name': 'xbmc_host',
|
'name': 'xbmc_host',
|
||||||
'description': 'e.g. http://localhost:8080. Separate hosts with commas.',
|
'description': 'Host running XBMC (e.g. http://localhost:8080). Separate multiple hosts with commas.',
|
||||||
'input_type': 'text'
|
'input_type': 'text'
|
||||||
},
|
},
|
||||||
{'label': 'Username',
|
{'label': 'XBMC Username',
|
||||||
'value': self.username,
|
'value': self.username,
|
||||||
'name': 'xbmc_username',
|
'name': 'xbmc_username',
|
||||||
'description': 'Set the Username.',
|
'description': 'Your XBMC username.',
|
||||||
'input_type': 'text'
|
'input_type': 'text'
|
||||||
},
|
},
|
||||||
{'label': 'Password',
|
{'label': 'XBMC Password',
|
||||||
'value': self.password,
|
'value': self.password,
|
||||||
'name': 'xbmc_password',
|
'name': 'xbmc_password',
|
||||||
'description': 'Set the Password.',
|
'description': 'Your XMBC password.',
|
||||||
'input_type': 'password'
|
'input_type': 'password'
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -689,13 +689,13 @@ class NMA(object):
|
||||||
config_option = [{'label': 'NotifyMyAndroid API Key',
|
config_option = [{'label': 'NotifyMyAndroid API Key',
|
||||||
'value': plexpy.CONFIG.NMA_APIKEY,
|
'value': plexpy.CONFIG.NMA_APIKEY,
|
||||||
'name': 'nma_apikey',
|
'name': 'nma_apikey',
|
||||||
'description': 'Separate multiple api keys with commas.',
|
'description': 'Your NotifyMyAndroid API key. Separate multiple api keys with commas.',
|
||||||
'input_type': 'text'
|
'input_type': 'text'
|
||||||
},
|
},
|
||||||
{'label': 'Priority',
|
{'label': 'Priority',
|
||||||
'value': plexpy.CONFIG.NMA_PRIORITY,
|
'value': plexpy.CONFIG.NMA_PRIORITY,
|
||||||
'name': 'nma_priority',
|
'name': 'nma_priority',
|
||||||
'description': 'Priority (-2,-1,0,1 or 2).',
|
'description': 'Set the priority (-2,-1,0,1 or 2).',
|
||||||
'input_type': 'number'
|
'input_type': 'number'
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -761,7 +761,7 @@ class PUSHBULLET(object):
|
||||||
self.notify('Main Screen Activate', 'Test Message')
|
self.notify('Main Screen Activate', 'Test Message')
|
||||||
|
|
||||||
def return_config_options(self):
|
def return_config_options(self):
|
||||||
config_option = [{'label': 'API Key',
|
config_option = [{'label': 'Pushbullet API Key',
|
||||||
'value': self.apikey,
|
'value': self.apikey,
|
||||||
'name': 'pushbullet_apikey',
|
'name': 'pushbullet_apikey',
|
||||||
'description': 'Your Pushbullet API key.',
|
'description': 'Your Pushbullet API key.',
|
||||||
|
@ -829,7 +829,7 @@ class PUSHALOT(object):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def return_config_options(self):
|
def return_config_options(self):
|
||||||
config_option = [{'label': 'API Key',
|
config_option = [{'label': 'Pushalot API Key',
|
||||||
'value': plexpy.CONFIG.PUSHALOT_APIKEY,
|
'value': plexpy.CONFIG.PUSHALOT_APIKEY,
|
||||||
'name': 'pushalot_apikey',
|
'name': 'pushalot_apikey',
|
||||||
'description': 'Your Pushalot API key.',
|
'description': 'Your Pushalot API key.',
|
||||||
|
@ -901,7 +901,7 @@ class PUSHOVER(object):
|
||||||
self.notify('Main Screen Activate', 'Test Message')
|
self.notify('Main Screen Activate', 'Test Message')
|
||||||
|
|
||||||
def return_config_options(self):
|
def return_config_options(self):
|
||||||
config_option = [{'label': 'API Key',
|
config_option = [{'label': 'Pushover API Key',
|
||||||
'value': self.keys,
|
'value': self.keys,
|
||||||
'name': 'pushover_keys',
|
'name': 'pushover_keys',
|
||||||
'description': 'Your Pushover API key.',
|
'description': 'Your Pushover API key.',
|
||||||
|
@ -910,13 +910,13 @@ class PUSHOVER(object):
|
||||||
{'label': 'Priority',
|
{'label': 'Priority',
|
||||||
'value': self.priority,
|
'value': self.priority,
|
||||||
'name': 'pushover_priority',
|
'name': 'pushover_priority',
|
||||||
'description': 'Priority (-1,0, or 1).',
|
'description': 'Set the priority (-2,-1,0,1 or 2).',
|
||||||
'input_type': 'number'
|
'input_type': 'number'
|
||||||
},
|
},
|
||||||
{'label': 'API Token',
|
{'label': 'Pushover API Token',
|
||||||
'value': plexpy.CONFIG.PUSHOVER_APITOKEN,
|
'value': plexpy.CONFIG.PUSHOVER_APITOKEN,
|
||||||
'name': 'pushover_apitoken',
|
'name': 'pushover_apitoken',
|
||||||
'description': 'Leave blank to use PlexPy default.',
|
'description': 'Your Pushover API toekn. Leave blank to use PlexPy default.',
|
||||||
'input_type': 'text'
|
'input_type': 'text'
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -1106,6 +1106,7 @@ class OSX_NOTIFY(object):
|
||||||
|
|
||||||
notification_center = NSUserNotificationCenter.defaultUserNotificationCenter()
|
notification_center = NSUserNotificationCenter.defaultUserNotificationCenter()
|
||||||
notification_center.deliverNotification_(notification)
|
notification_center.deliverNotification_(notification)
|
||||||
|
logger.info(u"OSX Notify notifications sent.")
|
||||||
|
|
||||||
del pool
|
del pool
|
||||||
return True
|
return True
|
||||||
|
@ -1160,7 +1161,7 @@ class BOXCAR(object):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def return_config_options(self):
|
def return_config_options(self):
|
||||||
config_option = [{'label': 'Access Token',
|
config_option = [{'label': 'Boxcar Access Token',
|
||||||
'value': plexpy.CONFIG.BOXCAR_TOKEN,
|
'value': plexpy.CONFIG.BOXCAR_TOKEN,
|
||||||
'name': 'boxcar_token',
|
'name': 'boxcar_token',
|
||||||
'description': 'Your Boxcar access token.',
|
'description': 'Your Boxcar access token.',
|
||||||
|
@ -1215,7 +1216,7 @@ class Email(object):
|
||||||
{'label': 'To',
|
{'label': 'To',
|
||||||
'value': plexpy.CONFIG.EMAIL_TO,
|
'value': plexpy.CONFIG.EMAIL_TO,
|
||||||
'name': 'email_to',
|
'name': 'email_to',
|
||||||
'description': 'Who should the recipeint be.',
|
'description': 'Who should the recipient be.',
|
||||||
'input_type': 'text'
|
'input_type': 'text'
|
||||||
},
|
},
|
||||||
{'label': 'SMTP Server',
|
{'label': 'SMTP Server',
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
|
|
||||||
import sqlite3
|
import sqlite3
|
||||||
|
|
||||||
from plexpy import logger, helpers, monitor, users, plextv
|
from plexpy import logger, helpers, activity_pinger, activity_processor, users, plextv
|
||||||
from xml.dom import minidom
|
from xml.dom import minidom
|
||||||
|
|
||||||
import plexpy
|
import plexpy
|
||||||
|
@ -245,9 +245,10 @@ def import_from_plexwatch(database=None, table_name=None, import_ignore_interval
|
||||||
logger.debug(u"PlexPy Importer :: PlexWatch data import in progress...")
|
logger.debug(u"PlexPy Importer :: PlexWatch data import in progress...")
|
||||||
|
|
||||||
logger.debug(u"PlexPy Importer :: Disabling monitoring while import in progress.")
|
logger.debug(u"PlexPy Importer :: Disabling monitoring while import in progress.")
|
||||||
plexpy.schedule_job(monitor.check_active_sessions, 'Check for active sessions', hours=0, minutes=0, seconds=0)
|
plexpy.schedule_job(activity_pinger.check_active_sessions, 'Check for active sessions',
|
||||||
|
hours=0, minutes=0, seconds=0)
|
||||||
|
|
||||||
monitor_processing = monitor.MonitorProcessing()
|
ap = activity_processor.ActivityProcessor()
|
||||||
user_data = users.Users()
|
user_data = users.Users()
|
||||||
|
|
||||||
# Get the latest friends list so we can pull user id's
|
# Get the latest friends list so we can pull user id's
|
||||||
|
@ -373,10 +374,10 @@ def import_from_plexwatch(database=None, table_name=None, import_ignore_interval
|
||||||
# On older versions of PMS, "clip" items were still classified as "movie" and had bad ratingKey values
|
# On older versions of PMS, "clip" items were still classified as "movie" and had bad ratingKey values
|
||||||
# Just make sure that the ratingKey is indeed an integer
|
# Just make sure that the ratingKey is indeed an integer
|
||||||
if session_history_metadata['rating_key'].isdigit():
|
if session_history_metadata['rating_key'].isdigit():
|
||||||
monitor_processing.write_session_history(session=session_history,
|
ap.write_session_history(session=session_history,
|
||||||
import_metadata=session_history_metadata,
|
import_metadata=session_history_metadata,
|
||||||
is_import=True,
|
is_import=True,
|
||||||
import_ignore_interval=import_ignore_interval)
|
import_ignore_interval=import_ignore_interval)
|
||||||
else:
|
else:
|
||||||
logger.debug(u"PlexPy Importer :: Item has bad rating_key: %s" % session_history_metadata['rating_key'])
|
logger.debug(u"PlexPy Importer :: Item has bad rating_key: %s" % session_history_metadata['rating_key'])
|
||||||
|
|
||||||
|
|
|
@ -17,6 +17,7 @@ from plexpy import logger, helpers, users, http_handler
|
||||||
from urlparse import urlparse
|
from urlparse import urlparse
|
||||||
|
|
||||||
import plexpy
|
import plexpy
|
||||||
|
import urllib2
|
||||||
|
|
||||||
|
|
||||||
class PmsConnect(object):
|
class PmsConnect(object):
|
||||||
|
@ -72,6 +73,23 @@ class PmsConnect(object):
|
||||||
|
|
||||||
return request
|
return request
|
||||||
|
|
||||||
|
"""
|
||||||
|
Return metadata for children of the request item.
|
||||||
|
|
||||||
|
Parameters required: rating_key { Plex ratingKey }
|
||||||
|
Optional parameters: output_format { dict, json }
|
||||||
|
|
||||||
|
Output: array
|
||||||
|
"""
|
||||||
|
def get_metadata_children(self, rating_key='', output_format=''):
|
||||||
|
uri = '/library/metadata/' + rating_key + '/children'
|
||||||
|
request = self.request_handler.make_request(uri=uri,
|
||||||
|
proto=self.protocol,
|
||||||
|
request_type='GET',
|
||||||
|
output_format=output_format)
|
||||||
|
|
||||||
|
return request
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Return list of recently added items.
|
Return list of recently added items.
|
||||||
|
|
||||||
|
@ -219,6 +237,22 @@ class PmsConnect(object):
|
||||||
|
|
||||||
return request
|
return request
|
||||||
|
|
||||||
|
"""
|
||||||
|
Return search results.
|
||||||
|
|
||||||
|
Optional parameters: output_format { dict, json }
|
||||||
|
|
||||||
|
Output: array
|
||||||
|
"""
|
||||||
|
def get_search(self, query='', track='', output_format=''):
|
||||||
|
uri = '/search?query=' + urllib2.quote(query.encode('utf8')) + track
|
||||||
|
request = self.request_handler.make_request(uri=uri,
|
||||||
|
proto=self.protocol,
|
||||||
|
request_type='GET',
|
||||||
|
output_format=output_format)
|
||||||
|
|
||||||
|
return request
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Return processed and validated list of recently added items.
|
Return processed and validated list of recently added items.
|
||||||
|
|
||||||
|
@ -705,9 +739,9 @@ class PmsConnect(object):
|
||||||
'transcode_container': transcode_container,
|
'transcode_container': transcode_container,
|
||||||
'transcode_protocol': transcode_protocol,
|
'transcode_protocol': transcode_protocol,
|
||||||
'duration': duration,
|
'duration': duration,
|
||||||
'progress': progress,
|
'view_offset': progress,
|
||||||
'progress_percent': str(helpers.get_percent(progress, duration)),
|
'progress_percent': str(helpers.get_percent(progress, duration)),
|
||||||
'type': 'track',
|
'media_type': 'track',
|
||||||
'indexes': 0
|
'indexes': 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -826,14 +860,14 @@ class PmsConnect(object):
|
||||||
'transcode_container': transcode_container,
|
'transcode_container': transcode_container,
|
||||||
'transcode_protocol': transcode_protocol,
|
'transcode_protocol': transcode_protocol,
|
||||||
'duration': duration,
|
'duration': duration,
|
||||||
'progress': progress,
|
'view_offset': progress,
|
||||||
'progress_percent': str(helpers.get_percent(progress, duration)),
|
'progress_percent': str(helpers.get_percent(progress, duration)),
|
||||||
'indexes': use_indexes
|
'indexes': use_indexes
|
||||||
}
|
}
|
||||||
if helpers.get_xml_attr(session, 'ratingKey').isdigit():
|
if helpers.get_xml_attr(session, 'ratingKey').isdigit():
|
||||||
session_output['type'] = helpers.get_xml_attr(session, 'type')
|
session_output['media_type'] = helpers.get_xml_attr(session, 'type')
|
||||||
else:
|
else:
|
||||||
session_output['type'] = 'clip'
|
session_output['media_type'] = 'clip'
|
||||||
|
|
||||||
elif helpers.get_xml_attr(session, 'type') == 'movie':
|
elif helpers.get_xml_attr(session, 'type') == 'movie':
|
||||||
session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'),
|
session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'),
|
||||||
|
@ -882,14 +916,14 @@ class PmsConnect(object):
|
||||||
'transcode_container': transcode_container,
|
'transcode_container': transcode_container,
|
||||||
'transcode_protocol': transcode_protocol,
|
'transcode_protocol': transcode_protocol,
|
||||||
'duration': duration,
|
'duration': duration,
|
||||||
'progress': progress,
|
'view_offset': progress,
|
||||||
'progress_percent': str(helpers.get_percent(progress, duration)),
|
'progress_percent': str(helpers.get_percent(progress, duration)),
|
||||||
'indexes': use_indexes
|
'indexes': use_indexes
|
||||||
}
|
}
|
||||||
if helpers.get_xml_attr(session, 'ratingKey').isdigit():
|
if helpers.get_xml_attr(session, 'ratingKey').isdigit():
|
||||||
session_output['type'] = helpers.get_xml_attr(session, 'type')
|
session_output['media_type'] = helpers.get_xml_attr(session, 'type')
|
||||||
else:
|
else:
|
||||||
session_output['type'] = 'clip'
|
session_output['media_type'] = 'clip'
|
||||||
|
|
||||||
elif helpers.get_xml_attr(session, 'type') == 'clip':
|
elif helpers.get_xml_attr(session, 'type') == 'clip':
|
||||||
session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'),
|
session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'),
|
||||||
|
@ -938,9 +972,9 @@ class PmsConnect(object):
|
||||||
'transcode_container': transcode_container,
|
'transcode_container': transcode_container,
|
||||||
'transcode_protocol': transcode_protocol,
|
'transcode_protocol': transcode_protocol,
|
||||||
'duration': duration,
|
'duration': duration,
|
||||||
'progress': progress,
|
'view_offset': progress,
|
||||||
'progress_percent': str(helpers.get_percent(progress, duration)),
|
'progress_percent': str(helpers.get_percent(progress, duration)),
|
||||||
'type': helpers.get_xml_attr(session, 'type'),
|
'media_type': helpers.get_xml_attr(session, 'type'),
|
||||||
'indexes': 0
|
'indexes': 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1027,15 +1061,22 @@ class PmsConnect(object):
|
||||||
'transcode_container': transcode_container,
|
'transcode_container': transcode_container,
|
||||||
'transcode_protocol': transcode_protocol,
|
'transcode_protocol': transcode_protocol,
|
||||||
'duration': '',
|
'duration': '',
|
||||||
'progress': '',
|
'view_offset': '',
|
||||||
'progress_percent': '100',
|
'progress_percent': '100',
|
||||||
'type': 'photo',
|
'media_type': 'photo',
|
||||||
'indexes': 0
|
'indexes': 0
|
||||||
}
|
}
|
||||||
|
|
||||||
else:
|
else:
|
||||||
logger.warn(u"No known stream types found in session list.")
|
logger.warn(u"No known stream types found in session list.")
|
||||||
|
|
||||||
|
# Rename Mystery platform names
|
||||||
|
platform_names = {'Mystery 3': 'Playstation 3',
|
||||||
|
'Mystery 4': 'Playstation 4',
|
||||||
|
'Mystery 5': 'Xbox 360'}
|
||||||
|
session_output['platform'] = platform_names.get(session_output['platform'],
|
||||||
|
session_output['platform'])
|
||||||
|
|
||||||
return session_output
|
return session_output
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
@ -1083,7 +1124,6 @@ class PmsConnect(object):
|
||||||
}
|
}
|
||||||
children_list.append(children_output)
|
children_list.append(children_output)
|
||||||
|
|
||||||
|
|
||||||
output = {'children_count': helpers.get_xml_attr(xml_head[0], 'size'),
|
output = {'children_count': helpers.get_xml_attr(xml_head[0], 'size'),
|
||||||
'children_type': helpers.get_xml_attr(xml_head[0], 'viewGroup'),
|
'children_type': helpers.get_xml_attr(xml_head[0], 'viewGroup'),
|
||||||
'title': helpers.get_xml_attr(xml_head[0], 'title2'),
|
'title': helpers.get_xml_attr(xml_head[0], 'title2'),
|
||||||
|
@ -1341,3 +1381,202 @@ class PmsConnect(object):
|
||||||
else:
|
else:
|
||||||
logger.error("Image proxy queries but no input received.")
|
logger.error("Image proxy queries but no input received.")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
"""
|
||||||
|
Return processed list of search results.
|
||||||
|
|
||||||
|
Output: array
|
||||||
|
"""
|
||||||
|
def get_search_results(self, query=''):
|
||||||
|
search_results = self.get_search(query=query, output_format='xml')
|
||||||
|
search_results_tracks = self.get_search(query=query, track='&type=10', output_format='xml')
|
||||||
|
|
||||||
|
xml_head = []
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
xml_head += search_results.getElementsByTagName('MediaContainer')
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
xml_head += search_results_tracks.getElementsByTagName('MediaContainer')
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
except:
|
||||||
|
logger.warn("Unable to parse XML for get_search_result_details.")
|
||||||
|
return []
|
||||||
|
|
||||||
|
search_results_count = 0
|
||||||
|
search_results_list = {'movie': [],
|
||||||
|
'show': [],
|
||||||
|
'season': [],
|
||||||
|
'episode': [],
|
||||||
|
'artist': [],
|
||||||
|
'album': [],
|
||||||
|
'track': []
|
||||||
|
}
|
||||||
|
|
||||||
|
totalSize = 0
|
||||||
|
for a in xml_head:
|
||||||
|
if a.getAttribute('size'):
|
||||||
|
totalSize += int(a.getAttribute('size'))
|
||||||
|
if totalSize == 0:
|
||||||
|
logger.debug(u"No search results.")
|
||||||
|
search_results_list = {'results_count': search_results_count,
|
||||||
|
'results_list': []
|
||||||
|
}
|
||||||
|
return search_results_list
|
||||||
|
|
||||||
|
for a in xml_head:
|
||||||
|
if a.getElementsByTagName('Video'):
|
||||||
|
result_data = a.getElementsByTagName('Video')
|
||||||
|
for result in result_data:
|
||||||
|
rating_key = helpers.get_xml_attr(result, 'ratingKey')
|
||||||
|
metadata = self.get_metadata_details(rating_key=rating_key)
|
||||||
|
if metadata['metadata']['type'] == 'movie':
|
||||||
|
search_results_list['movie'].append(metadata['metadata'])
|
||||||
|
elif metadata['metadata']['type'] == 'episode':
|
||||||
|
search_results_list['episode'].append(metadata['metadata'])
|
||||||
|
search_results_count += 1
|
||||||
|
|
||||||
|
if a.getElementsByTagName('Directory'):
|
||||||
|
result_data = a.getElementsByTagName('Directory')
|
||||||
|
for result in result_data:
|
||||||
|
rating_key = helpers.get_xml_attr(result, 'ratingKey')
|
||||||
|
metadata = self.get_metadata_details(rating_key=rating_key)
|
||||||
|
if metadata['metadata']['type'] == 'show':
|
||||||
|
search_results_list['show'].append(metadata['metadata'])
|
||||||
|
|
||||||
|
show_seasons = self.get_item_children(rating_key=metadata['metadata']['rating_key'])
|
||||||
|
if show_seasons['children_count'] != '0':
|
||||||
|
for season in show_seasons['children_list']:
|
||||||
|
if season['rating_key']:
|
||||||
|
rating_key = season['rating_key']
|
||||||
|
metadata = self.get_metadata_details(rating_key=rating_key)
|
||||||
|
search_results_list['season'].append(metadata['metadata'])
|
||||||
|
search_results_count += 1
|
||||||
|
|
||||||
|
elif metadata['metadata']['type'] == 'artist':
|
||||||
|
search_results_list['artist'].append(metadata['metadata'])
|
||||||
|
elif metadata['metadata']['type'] == 'album':
|
||||||
|
search_results_list['album'].append(metadata['metadata'])
|
||||||
|
search_results_count += 1
|
||||||
|
|
||||||
|
if a.getElementsByTagName('Track'):
|
||||||
|
result_data = a.getElementsByTagName('Track')
|
||||||
|
for result in result_data:
|
||||||
|
rating_key = helpers.get_xml_attr(result, 'ratingKey')
|
||||||
|
metadata = self.get_metadata_details(rating_key=rating_key)
|
||||||
|
search_results_list['track'].append(metadata['metadata'])
|
||||||
|
search_results_count += 1
|
||||||
|
|
||||||
|
output = {'results_count': search_results_count,
|
||||||
|
'results_list': search_results_list
|
||||||
|
}
|
||||||
|
|
||||||
|
return output
|
||||||
|
|
||||||
|
"""
|
||||||
|
Return processed list of grandparent/parent/child rating keys.
|
||||||
|
|
||||||
|
Output: array
|
||||||
|
"""
|
||||||
|
def get_rating_keys_list(self, rating_key='', media_type=''):
|
||||||
|
|
||||||
|
if media_type == 'movie':
|
||||||
|
key_list = {0: {'rating_key': int(rating_key)}}
|
||||||
|
return key_list
|
||||||
|
|
||||||
|
if media_type == 'artist' or media_type == 'album' or media_type == 'track':
|
||||||
|
match_type = 'title'
|
||||||
|
else:
|
||||||
|
match_type = 'index'
|
||||||
|
|
||||||
|
# get grandparent rating key
|
||||||
|
if media_type == 'season' or media_type == 'album':
|
||||||
|
try:
|
||||||
|
metadata = self.get_metadata_details(rating_key=rating_key)
|
||||||
|
rating_key = metadata['metadata']['parent_rating_key']
|
||||||
|
except:
|
||||||
|
logger.warn("Unable to get parent_rating_key for get_rating_keys_list.")
|
||||||
|
return {}
|
||||||
|
|
||||||
|
elif media_type == 'episode' or media_type == 'track':
|
||||||
|
try:
|
||||||
|
metadata = self.get_metadata_details(rating_key=rating_key)
|
||||||
|
rating_key = metadata['metadata']['grandparent_rating_key']
|
||||||
|
except:
|
||||||
|
logger.warn("Unable to get grandparent_rating_key for get_rating_keys_list.")
|
||||||
|
return {}
|
||||||
|
|
||||||
|
# get parent_rating_keys
|
||||||
|
metadata = self.get_metadata_children(str(rating_key), output_format='xml')
|
||||||
|
|
||||||
|
try:
|
||||||
|
xml_head = metadata.getElementsByTagName('MediaContainer')
|
||||||
|
except:
|
||||||
|
logger.warn("Unable to parse XML for get_rating_keys_list.")
|
||||||
|
return {}
|
||||||
|
|
||||||
|
for a in xml_head:
|
||||||
|
if a.getAttribute('size'):
|
||||||
|
if a.getAttribute('size') == '0':
|
||||||
|
return {}
|
||||||
|
|
||||||
|
title = helpers.get_xml_attr(a, 'title2')
|
||||||
|
|
||||||
|
if a.getElementsByTagName('Directory'):
|
||||||
|
parents_metadata = a.getElementsByTagName('Directory')
|
||||||
|
else:
|
||||||
|
parents_metadata = []
|
||||||
|
|
||||||
|
parents = {}
|
||||||
|
for item in parents_metadata:
|
||||||
|
parent_rating_key = helpers.get_xml_attr(item, 'ratingKey')
|
||||||
|
parent_index = helpers.get_xml_attr(item, 'index')
|
||||||
|
parent_title = helpers.get_xml_attr(item, 'title')
|
||||||
|
|
||||||
|
if parent_rating_key:
|
||||||
|
# get rating_keys
|
||||||
|
metadata = self.get_metadata_children(str(parent_rating_key), output_format='xml')
|
||||||
|
|
||||||
|
try:
|
||||||
|
xml_head = metadata.getElementsByTagName('MediaContainer')
|
||||||
|
except:
|
||||||
|
logger.warn("Unable to parse XML for get_rating_keys_list.")
|
||||||
|
return {}
|
||||||
|
|
||||||
|
for a in xml_head:
|
||||||
|
if a.getAttribute('size'):
|
||||||
|
if a.getAttribute('size') == '0':
|
||||||
|
return {}
|
||||||
|
|
||||||
|
if a.getElementsByTagName('Video'):
|
||||||
|
children_metadata = a.getElementsByTagName('Video')
|
||||||
|
elif a.getElementsByTagName('Track'):
|
||||||
|
children_metadata = a.getElementsByTagName('Track')
|
||||||
|
else:
|
||||||
|
children_metadata = []
|
||||||
|
|
||||||
|
children = {}
|
||||||
|
for item in children_metadata:
|
||||||
|
child_rating_key = helpers.get_xml_attr(item, 'ratingKey')
|
||||||
|
child_index = helpers.get_xml_attr(item, 'index')
|
||||||
|
child_title = helpers.get_xml_attr(item, 'title')
|
||||||
|
|
||||||
|
if child_rating_key:
|
||||||
|
key = int(child_index)
|
||||||
|
children.update({key: {'rating_key': int(child_rating_key)}})
|
||||||
|
|
||||||
|
key = int(parent_index) if match_type == 'index' else parent_title
|
||||||
|
parents.update({key:
|
||||||
|
{'rating_key': int(parent_rating_key),
|
||||||
|
'children': children}
|
||||||
|
})
|
||||||
|
|
||||||
|
key = 0 if match_type == 'index' else title
|
||||||
|
key_list = {key:
|
||||||
|
{'rating_key': int(rating_key),
|
||||||
|
'children': parents}
|
||||||
|
}
|
||||||
|
|
||||||
|
return key_list
|
|
@ -516,8 +516,14 @@ class Users(object):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
for item in result:
|
for item in result:
|
||||||
|
# Rename Mystery platform names
|
||||||
|
platform_names = {'Mystery 3': 'Playstation 3',
|
||||||
|
'Mystery 4': 'Playstation 4',
|
||||||
|
'Mystery 5': 'Xbox 360'}
|
||||||
|
platform_type = platform_names.get(item[2], item[2])
|
||||||
|
|
||||||
row = {'platform_name': item[0],
|
row = {'platform_name': item[0],
|
||||||
'platform_type': item[2],
|
'platform_type': platform_type,
|
||||||
'total_plays': item[1],
|
'total_plays': item[1],
|
||||||
'result_id': result_id
|
'result_id': result_id
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
PLEXPY_VERSION = "master"
|
PLEXPY_VERSION = "master"
|
||||||
PLEXPY_RELEASE_VERSION = "1.1.10"
|
PLEXPY_RELEASE_VERSION = "1.2.0"
|
||||||
|
|
142
plexpy/web_socket.py
Normal file
142
plexpy/web_socket.py
Normal file
|
@ -0,0 +1,142 @@
|
||||||
|
# This file is part of PlexPy.
|
||||||
|
#
|
||||||
|
# PlexPy is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# PlexPy is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
# Mostly borrowed from https://github.com/trakt/Plex-Trakt-Scrobbler
|
||||||
|
|
||||||
|
from plexpy import logger, activity_pinger
|
||||||
|
|
||||||
|
import threading
|
||||||
|
import plexpy
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
import websocket
|
||||||
|
|
||||||
|
name = 'websocket'
|
||||||
|
opcode_data = (websocket.ABNF.OPCODE_TEXT, websocket.ABNF.OPCODE_BINARY)
|
||||||
|
|
||||||
|
|
||||||
|
def start_thread():
|
||||||
|
# Check for any existing sessions on start up
|
||||||
|
activity_pinger.check_active_sessions(ws_request=True)
|
||||||
|
# Start the websocket listener on it's own thread
|
||||||
|
threading.Thread(target=run).start()
|
||||||
|
|
||||||
|
|
||||||
|
def run():
|
||||||
|
from websocket import create_connection
|
||||||
|
|
||||||
|
uri = 'ws://%s:%s/:/websockets/notifications' % (
|
||||||
|
plexpy.CONFIG.PMS_IP,
|
||||||
|
plexpy.CONFIG.PMS_PORT
|
||||||
|
)
|
||||||
|
|
||||||
|
# Set authentication token (if one is available)
|
||||||
|
if plexpy.CONFIG.PMS_TOKEN:
|
||||||
|
uri += '?X-Plex-Token=' + plexpy.CONFIG.PMS_TOKEN
|
||||||
|
|
||||||
|
ws_connected = False
|
||||||
|
reconnects = 0
|
||||||
|
|
||||||
|
# Try an open the websocket connection - if it fails after 15 retries fallback to polling
|
||||||
|
while not ws_connected and reconnects <= 15:
|
||||||
|
try:
|
||||||
|
logger.info(u'PlexPy WebSocket :: Opening websocket, connection attempt %s.' % str(reconnects + 1))
|
||||||
|
ws = create_connection(uri)
|
||||||
|
reconnects = 0
|
||||||
|
ws_connected = True
|
||||||
|
logger.info(u'PlexPy WebSocket :: Ready')
|
||||||
|
except IOError, e:
|
||||||
|
logger.error(u'PlexPy WebSocket :: %s.' % e)
|
||||||
|
reconnects += 1
|
||||||
|
time.sleep(5)
|
||||||
|
|
||||||
|
while ws_connected:
|
||||||
|
try:
|
||||||
|
process(*receive(ws))
|
||||||
|
|
||||||
|
# successfully received data, reset reconnects counter
|
||||||
|
reconnects = 0
|
||||||
|
except websocket.WebSocketConnectionClosedException:
|
||||||
|
if reconnects <= 15:
|
||||||
|
reconnects += 1
|
||||||
|
|
||||||
|
# Sleep 5 between connection attempts
|
||||||
|
if reconnects > 1:
|
||||||
|
time.sleep(5)
|
||||||
|
|
||||||
|
logger.warn(u'PlexPy WebSocket :: Connection has closed, reconnecting...')
|
||||||
|
try:
|
||||||
|
ws = create_connection(uri)
|
||||||
|
except IOError, e:
|
||||||
|
logger.info(u'PlexPy WebSocket :: %s.' % e)
|
||||||
|
|
||||||
|
else:
|
||||||
|
ws_connected = False
|
||||||
|
break
|
||||||
|
|
||||||
|
if not ws_connected:
|
||||||
|
logger.error(u'PlexPy WebSocket :: Connection unavailable, falling back to polling.')
|
||||||
|
plexpy.POLLING_FAILOVER = True
|
||||||
|
plexpy.initialize_scheduler()
|
||||||
|
|
||||||
|
logger.debug(u'PlexPy WebSocket :: Leaving thread.')
|
||||||
|
|
||||||
|
|
||||||
|
def receive(ws):
|
||||||
|
frame = ws.recv_frame()
|
||||||
|
|
||||||
|
if not frame:
|
||||||
|
raise websocket.WebSocketException("Not a valid frame %s" % frame)
|
||||||
|
elif frame.opcode in opcode_data:
|
||||||
|
return frame.opcode, frame.data
|
||||||
|
elif frame.opcode == websocket.ABNF.OPCODE_CLOSE:
|
||||||
|
ws.send_close()
|
||||||
|
return frame.opcode, None
|
||||||
|
elif frame.opcode == websocket.ABNF.OPCODE_PING:
|
||||||
|
ws.pong("Hi!")
|
||||||
|
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
|
def process(opcode, data):
|
||||||
|
from plexpy import activity_handler
|
||||||
|
|
||||||
|
if opcode not in opcode_data:
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
info = json.loads(data)
|
||||||
|
except Exception as ex:
|
||||||
|
logger.warn(u'PlexPy WebSocket :: Error decoding message from websocket: %s' % ex)
|
||||||
|
logger.debug(data)
|
||||||
|
return False
|
||||||
|
|
||||||
|
type = info.get('type')
|
||||||
|
|
||||||
|
if not type:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if type == 'playing':
|
||||||
|
# logger.debug('%s.playing %s' % (name, info))
|
||||||
|
try:
|
||||||
|
time_line = info.get('_children')
|
||||||
|
except:
|
||||||
|
logger.debug(u"PlexPy WebSocket :: Session found but unable to get timeline data.")
|
||||||
|
return False
|
||||||
|
|
||||||
|
activity = activity_handler.ActivityHandler(timeline=time_line[0])
|
||||||
|
activity.process()
|
||||||
|
|
||||||
|
return True
|
|
@ -99,6 +99,7 @@ class WebInterface(object):
|
||||||
# The setup wizard just refreshes the page on submit so we must redirect to home if config set.
|
# The setup wizard just refreshes the page on submit so we must redirect to home if config set.
|
||||||
# Also redirecting to home if a PMS token already exists - will remove this in future.
|
# Also redirecting to home if a PMS token already exists - will remove this in future.
|
||||||
if plexpy.CONFIG.FIRST_RUN_COMPLETE or plexpy.CONFIG.PMS_TOKEN:
|
if plexpy.CONFIG.FIRST_RUN_COMPLETE or plexpy.CONFIG.PMS_TOKEN:
|
||||||
|
plexpy.initialize_scheduler()
|
||||||
raise cherrypy.HTTPRedirect("home")
|
raise cherrypy.HTTPRedirect("home")
|
||||||
else:
|
else:
|
||||||
return serve_template(templatename="welcome.html", title="Welcome", config=config)
|
return serve_template(templatename="welcome.html", title="Welcome", config=config)
|
||||||
|
@ -430,6 +431,7 @@ class WebInterface(object):
|
||||||
"movie_notify_on_pause": checked(plexpy.CONFIG.MOVIE_NOTIFY_ON_PAUSE),
|
"movie_notify_on_pause": checked(plexpy.CONFIG.MOVIE_NOTIFY_ON_PAUSE),
|
||||||
"music_notify_on_pause": checked(plexpy.CONFIG.MUSIC_NOTIFY_ON_PAUSE),
|
"music_notify_on_pause": checked(plexpy.CONFIG.MUSIC_NOTIFY_ON_PAUSE),
|
||||||
"monitoring_interval": plexpy.CONFIG.MONITORING_INTERVAL,
|
"monitoring_interval": plexpy.CONFIG.MONITORING_INTERVAL,
|
||||||
|
"monitoring_use_websocket": checked(plexpy.CONFIG.MONITORING_USE_WEBSOCKET),
|
||||||
"refresh_users_interval": plexpy.CONFIG.REFRESH_USERS_INTERVAL,
|
"refresh_users_interval": plexpy.CONFIG.REFRESH_USERS_INTERVAL,
|
||||||
"refresh_users_on_startup": checked(plexpy.CONFIG.REFRESH_USERS_ON_STARTUP),
|
"refresh_users_on_startup": checked(plexpy.CONFIG.REFRESH_USERS_ON_STARTUP),
|
||||||
"ip_logging_enable": checked(plexpy.CONFIG.IP_LOGGING_ENABLE),
|
"ip_logging_enable": checked(plexpy.CONFIG.IP_LOGGING_ENABLE),
|
||||||
|
@ -437,6 +439,7 @@ class WebInterface(object):
|
||||||
"music_logging_enable": checked(plexpy.CONFIG.MUSIC_LOGGING_ENABLE),
|
"music_logging_enable": checked(plexpy.CONFIG.MUSIC_LOGGING_ENABLE),
|
||||||
"logging_ignore_interval": plexpy.CONFIG.LOGGING_IGNORE_INTERVAL,
|
"logging_ignore_interval": plexpy.CONFIG.LOGGING_IGNORE_INTERVAL,
|
||||||
"pms_is_remote": checked(plexpy.CONFIG.PMS_IS_REMOTE),
|
"pms_is_remote": checked(plexpy.CONFIG.PMS_IS_REMOTE),
|
||||||
|
"notify_consecutive": checked(plexpy.CONFIG.NOTIFY_CONSECUTIVE),
|
||||||
"notify_watched_percent": plexpy.CONFIG.NOTIFY_WATCHED_PERCENT,
|
"notify_watched_percent": plexpy.CONFIG.NOTIFY_WATCHED_PERCENT,
|
||||||
"notify_on_start_subject_text": plexpy.CONFIG.NOTIFY_ON_START_SUBJECT_TEXT,
|
"notify_on_start_subject_text": plexpy.CONFIG.NOTIFY_ON_START_SUBJECT_TEXT,
|
||||||
"notify_on_start_body_text": plexpy.CONFIG.NOTIFY_ON_START_BODY_TEXT,
|
"notify_on_start_body_text": plexpy.CONFIG.NOTIFY_ON_START_BODY_TEXT,
|
||||||
|
@ -456,7 +459,8 @@ class WebInterface(object):
|
||||||
"home_stats_cards": plexpy.CONFIG.HOME_STATS_CARDS,
|
"home_stats_cards": plexpy.CONFIG.HOME_STATS_CARDS,
|
||||||
"home_library_cards": plexpy.CONFIG.HOME_LIBRARY_CARDS,
|
"home_library_cards": plexpy.CONFIG.HOME_LIBRARY_CARDS,
|
||||||
"buffer_threshold": plexpy.CONFIG.BUFFER_THRESHOLD,
|
"buffer_threshold": plexpy.CONFIG.BUFFER_THRESHOLD,
|
||||||
"buffer_wait": plexpy.CONFIG.BUFFER_WAIT
|
"buffer_wait": plexpy.CONFIG.BUFFER_WAIT,
|
||||||
|
"group_history_tables": checked(plexpy.CONFIG.GROUP_HISTORY_TABLES)
|
||||||
}
|
}
|
||||||
|
|
||||||
return serve_template(templatename="settings.html", title="Settings", config=config)
|
return serve_template(templatename="settings.html", title="Settings", config=config)
|
||||||
|
@ -468,11 +472,12 @@ class WebInterface(object):
|
||||||
checked_configs = [
|
checked_configs = [
|
||||||
"launch_browser", "enable_https", "api_enabled", "freeze_db", "check_github",
|
"launch_browser", "enable_https", "api_enabled", "freeze_db", "check_github",
|
||||||
"grouping_global_history", "grouping_user_history", "grouping_charts", "pms_use_bif", "pms_ssl",
|
"grouping_global_history", "grouping_user_history", "grouping_charts", "pms_use_bif", "pms_ssl",
|
||||||
"tv_notify_enable", "movie_notify_enable", "music_notify_enable",
|
"tv_notify_enable", "movie_notify_enable", "music_notify_enable", "monitoring_use_websocket",
|
||||||
"tv_notify_on_start", "movie_notify_on_start", "music_notify_on_start",
|
"tv_notify_on_start", "movie_notify_on_start", "music_notify_on_start",
|
||||||
"tv_notify_on_stop", "movie_notify_on_stop", "music_notify_on_stop",
|
"tv_notify_on_stop", "movie_notify_on_stop", "music_notify_on_stop",
|
||||||
"tv_notify_on_pause", "movie_notify_on_pause", "music_notify_on_pause", "refresh_users_on_startup",
|
"tv_notify_on_pause", "movie_notify_on_pause", "music_notify_on_pause", "refresh_users_on_startup",
|
||||||
"ip_logging_enable", "video_logging_enable", "music_logging_enable", "pms_is_remote", "home_stats_type"
|
"ip_logging_enable", "video_logging_enable", "music_logging_enable", "pms_is_remote", "home_stats_type",
|
||||||
|
"group_history_tables", "notify_consecutive"
|
||||||
]
|
]
|
||||||
for checked_config in checked_configs:
|
for checked_config in checked_configs:
|
||||||
if checked_config not in kwargs:
|
if checked_config not in kwargs:
|
||||||
|
@ -553,28 +558,38 @@ class WebInterface(object):
|
||||||
message=message, timer=timer, quote=quote)
|
message=message, timer=timer, quote=quote)
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def get_history(self, user=None, user_id=None, **kwargs):
|
def get_history(self, user=None, user_id=None, grouping=0, **kwargs):
|
||||||
|
|
||||||
|
if grouping == 'false':
|
||||||
|
grouping = 0
|
||||||
|
else:
|
||||||
|
grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES
|
||||||
|
|
||||||
|
watched_percent = plexpy.CONFIG.NOTIFY_WATCHED_PERCENT
|
||||||
|
|
||||||
custom_where=[]
|
custom_where=[]
|
||||||
if user_id:
|
if user_id:
|
||||||
custom_where = [['user_id', user_id]]
|
custom_where = [['session_history.user_id', user_id]]
|
||||||
elif user:
|
elif user:
|
||||||
custom_where = [['user', user]]
|
custom_where = [['session_history.user', user]]
|
||||||
if 'rating_key' in kwargs:
|
if 'rating_key' in kwargs:
|
||||||
rating_key = kwargs.get('rating_key', "")
|
rating_key = kwargs.get('rating_key', "")
|
||||||
custom_where = [['rating_key', rating_key]]
|
custom_where = [['session_history.rating_key', rating_key]]
|
||||||
if 'parent_rating_key' in kwargs:
|
if 'parent_rating_key' in kwargs:
|
||||||
rating_key = kwargs.get('parent_rating_key', "")
|
rating_key = kwargs.get('parent_rating_key', "")
|
||||||
custom_where = [['parent_rating_key', rating_key]]
|
custom_where = [['session_history.parent_rating_key', rating_key]]
|
||||||
if 'grandparent_rating_key' in kwargs:
|
if 'grandparent_rating_key' in kwargs:
|
||||||
rating_key = kwargs.get('grandparent_rating_key', "")
|
rating_key = kwargs.get('grandparent_rating_key', "")
|
||||||
custom_where = [['grandparent_rating_key', rating_key]]
|
custom_where = [['session_history.grandparent_rating_key', rating_key]]
|
||||||
if 'start_date' in kwargs:
|
if 'start_date' in kwargs:
|
||||||
start_date = kwargs.get('start_date', "")
|
start_date = kwargs.get('start_date', "")
|
||||||
custom_where = [['strftime("%Y-%m-%d", datetime(date, "unixepoch", "localtime"))', start_date]]
|
custom_where = [['strftime("%Y-%m-%d", datetime(date, "unixepoch", "localtime"))', start_date]]
|
||||||
|
if 'reference_id' in kwargs:
|
||||||
|
reference_id = kwargs.get('reference_id', "")
|
||||||
|
custom_where = [['session_history.reference_id', reference_id]]
|
||||||
|
|
||||||
data_factory = datafactory.DataFactory()
|
data_factory = datafactory.DataFactory()
|
||||||
history = data_factory.get_history(kwargs=kwargs, custom_where=custom_where)
|
history = data_factory.get_history(kwargs=kwargs, custom_where=custom_where, grouping=grouping, watched_percent=watched_percent)
|
||||||
|
|
||||||
cherrypy.response.headers['Content-type'] = 'application/json'
|
cherrypy.response.headers['Content-type'] = 'application/json'
|
||||||
return json.dumps(history)
|
return json.dumps(history)
|
||||||
|
@ -747,6 +762,7 @@ class WebInterface(object):
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def info(self, item_id=None, source=None, **kwargs):
|
def info(self, item_id=None, source=None, **kwargs):
|
||||||
metadata = None
|
metadata = None
|
||||||
|
query = None
|
||||||
|
|
||||||
config = {
|
config = {
|
||||||
"pms_identifier": plexpy.CONFIG.PMS_IDENTIFIER
|
"pms_identifier": plexpy.CONFIG.PMS_IDENTIFIER
|
||||||
|
@ -760,12 +776,15 @@ class WebInterface(object):
|
||||||
result = pms_connect.get_metadata_details(rating_key=item_id)
|
result = pms_connect.get_metadata_details(rating_key=item_id)
|
||||||
if result:
|
if result:
|
||||||
metadata = result['metadata']
|
metadata = result['metadata']
|
||||||
|
else:
|
||||||
|
data_factory = datafactory.DataFactory()
|
||||||
|
query = data_factory.get_search_query(rating_key=item_id)
|
||||||
|
|
||||||
if metadata:
|
if metadata:
|
||||||
return serve_template(templatename="info.html", data=metadata, title="Info", config=config)
|
return serve_template(templatename="info.html", data=metadata, title="Info", config=config)
|
||||||
else:
|
else:
|
||||||
logger.warn('Unable to retrieve data.')
|
logger.warn('Unable to retrieve data.')
|
||||||
return serve_template(templatename="info.html", data=None, title="Info")
|
return serve_template(templatename="info.html", data=None, query=query, title="Info")
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def get_user_recently_watched(self, user=None, user_id=None, limit='10', **kwargs):
|
def get_user_recently_watched(self, user=None, user_id=None, limit='10', **kwargs):
|
||||||
|
@ -1321,3 +1340,105 @@ class WebInterface(object):
|
||||||
cherrypy.response.headers['Content-type'] = 'application/json'
|
cherrypy.response.headers['Content-type'] = 'application/json'
|
||||||
return json.dumps({'message': 'no data received'})
|
return json.dumps({'message': 'no data received'})
|
||||||
|
|
||||||
|
@cherrypy.expose
|
||||||
|
def search(self, search_query=''):
|
||||||
|
query = search_query.replace('"', '')
|
||||||
|
|
||||||
|
return serve_template(templatename="search.html", title="Search", query=query)
|
||||||
|
|
||||||
|
@cherrypy.expose
|
||||||
|
def search_results(self, query, **kwargs):
|
||||||
|
|
||||||
|
pms_connect = pmsconnect.PmsConnect()
|
||||||
|
result = pms_connect.get_search_results(query)
|
||||||
|
|
||||||
|
if result:
|
||||||
|
cherrypy.response.headers['Content-type'] = 'application/json'
|
||||||
|
return json.dumps(result)
|
||||||
|
else:
|
||||||
|
logger.warn('Unable to retrieve data.')
|
||||||
|
|
||||||
|
@cherrypy.expose
|
||||||
|
def get_search_results_children(self, query, media_type=None, season_index=None, **kwargs):
|
||||||
|
|
||||||
|
pms_connect = pmsconnect.PmsConnect()
|
||||||
|
result = pms_connect.get_search_results(query)
|
||||||
|
|
||||||
|
if media_type:
|
||||||
|
result['results_list'] = {media_type: result['results_list'][media_type]}
|
||||||
|
if media_type == 'season' and season_index:
|
||||||
|
for season in result['results_list']['season']:
|
||||||
|
if season['index'] == season_index:
|
||||||
|
result['results_list']['season'] = [season]
|
||||||
|
break
|
||||||
|
|
||||||
|
if result:
|
||||||
|
return serve_template(templatename="info_search_results_list.html", data=result, title="Search Result List")
|
||||||
|
else:
|
||||||
|
logger.warn('Unable to retrieve data.')
|
||||||
|
return serve_template(templatename="info_search_results_list.html", data=None, title="Search Result List")
|
||||||
|
|
||||||
|
@cherrypy.expose
|
||||||
|
def update_history_rating_key(self, old_rating_key, new_rating_key, media_type, **kwargs):
|
||||||
|
data_factory = datafactory.DataFactory()
|
||||||
|
pms_connect = pmsconnect.PmsConnect()
|
||||||
|
|
||||||
|
old_key_list = data_factory.get_rating_keys_list(rating_key=old_rating_key, media_type=media_type)
|
||||||
|
new_key_list = pms_connect.get_rating_keys_list(rating_key=new_rating_key, media_type=media_type)
|
||||||
|
|
||||||
|
update_db = data_factory.update_rating_key(old_key_list=old_key_list,
|
||||||
|
new_key_list=new_key_list,
|
||||||
|
media_type=media_type)
|
||||||
|
|
||||||
|
if update_db:
|
||||||
|
cherrypy.response.headers['Content-type'] = 'application/json'
|
||||||
|
return json.dumps({'message': update_db})
|
||||||
|
else:
|
||||||
|
cherrypy.response.headers['Content-type'] = 'application/json'
|
||||||
|
return json.dumps({'message': 'no data received'})
|
||||||
|
|
||||||
|
|
||||||
|
# test code
|
||||||
|
@cherrypy.expose
|
||||||
|
def get_new_rating_keys(self, rating_key='', media_type='', **kwargs):
|
||||||
|
|
||||||
|
pms_connect = pmsconnect.PmsConnect()
|
||||||
|
result = pms_connect.get_rating_keys_list(rating_key=rating_key, media_type=media_type)
|
||||||
|
|
||||||
|
if result:
|
||||||
|
cherrypy.response.headers['Content-type'] = 'application/json'
|
||||||
|
return json.dumps(result)
|
||||||
|
else:
|
||||||
|
logger.warn('Unable to retrieve data.')
|
||||||
|
|
||||||
|
@cherrypy.expose
|
||||||
|
def get_old_rating_keys(self, rating_key='', media_type='', **kwargs):
|
||||||
|
|
||||||
|
data_factory = datafactory.DataFactory()
|
||||||
|
result = data_factory.get_rating_keys_list(rating_key=rating_key, media_type=media_type)
|
||||||
|
|
||||||
|
if result:
|
||||||
|
cherrypy.response.headers['Content-type'] = 'application/json'
|
||||||
|
return json.dumps(result)
|
||||||
|
else:
|
||||||
|
logger.warn('Unable to retrieve data.')
|
||||||
|
|
||||||
|
@cherrypy.expose
|
||||||
|
def get_map_rating_keys(self, old_rating_key, new_rating_key, media_type, **kwargs):
|
||||||
|
|
||||||
|
data_factory = datafactory.DataFactory()
|
||||||
|
pms_connect = pmsconnect.PmsConnect()
|
||||||
|
|
||||||
|
if new_rating_key:
|
||||||
|
old_key_list = data_factory.get_rating_keys_list(rating_key=old_rating_key, media_type=media_type)
|
||||||
|
new_key_list = pms_connect.get_rating_keys_list(rating_key=new_rating_key, media_type=media_type)
|
||||||
|
|
||||||
|
result = data_factory.update_rating_key(old_key_list=old_key_list,
|
||||||
|
new_key_list=new_key_list,
|
||||||
|
media_type=media_type)
|
||||||
|
|
||||||
|
if result:
|
||||||
|
cherrypy.response.headers['Content-type'] = 'application/json'
|
||||||
|
return json.dumps(result)
|
||||||
|
else:
|
||||||
|
logger.warn('Unable to retrieve data.')
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue