mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-11 15:56:07 -07:00
Fix merge conflicts
This commit is contained in:
commit
82b567f15f
50 changed files with 9327 additions and 277 deletions
11
CHANGELOG.md
11
CHANGELOG.md
|
@ -1,5 +1,16 @@
|
||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## v1.1.10 (2015-09-20)
|
||||||
|
|
||||||
|
* Added dedicated settings section for home stats configuration with ability to show/hide selected stats and sections.
|
||||||
|
* Added support for Twitter notifications.
|
||||||
|
* Only show music in graphs if music logging is enabled.
|
||||||
|
* The monitoring ignore interval now excludes paused time.
|
||||||
|
* Fix display bug on activity panel which incorrectly reported transcoding sometimes.
|
||||||
|
* Fix bug with Email notification TLS checkbox when it would be disabled by changing any other settings afterwards.
|
||||||
|
* Fix issue on some Python releases where the webbrowser library isn't included.
|
||||||
|
|
||||||
|
|
||||||
## v1.1.9 (2015-09-14)
|
## v1.1.9 (2015-09-14)
|
||||||
|
|
||||||
* Another JonnyWong release. I'm going to stop thanking you now ;)
|
* Another JonnyWong release. I'm going to stop thanking you now ;)
|
||||||
|
|
12
PlexPy.py
12
PlexPy.py
|
@ -20,7 +20,7 @@ import sys
|
||||||
# Ensure lib added to path, before any other imports
|
# Ensure lib added to path, before any other imports
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'lib/'))
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'lib/'))
|
||||||
|
|
||||||
from plexpy import webstart, logger
|
from plexpy import webstart, logger, web_socket
|
||||||
|
|
||||||
import locale
|
import locale
|
||||||
import time
|
import time
|
||||||
|
@ -191,6 +191,16 @@ def main():
|
||||||
# Start the background threads
|
# Start the background threads
|
||||||
plexpy.start()
|
plexpy.start()
|
||||||
|
|
||||||
|
# Open connection for websocket
|
||||||
|
if plexpy.CONFIG.MONITORING_USE_WEBSOCKET:
|
||||||
|
try:
|
||||||
|
web_socket.start_thread()
|
||||||
|
except:
|
||||||
|
logger.warn(u"Websocket :: Unable to open connection.")
|
||||||
|
# Fallback to polling
|
||||||
|
plexpy.POLLING_FAILOVER = True
|
||||||
|
plexpy.initialize_scheduler()
|
||||||
|
|
||||||
# Open webbrowser
|
# Open webbrowser
|
||||||
if plexpy.CONFIG.LAUNCH_BROWSER and not args.nolaunch:
|
if plexpy.CONFIG.LAUNCH_BROWSER and not args.nolaunch:
|
||||||
plexpy.launch_browser(plexpy.CONFIG.HTTP_HOST, http_port,
|
plexpy.launch_browser(plexpy.CONFIG.HTTP_HOST, http_port,
|
||||||
|
|
|
@ -37,7 +37,7 @@ from plexpy import version
|
||||||
% elif plexpy.CONFIG.CHECK_GITHUB and plexpy.CURRENT_VERSION != plexpy.LATEST_VERSION and plexpy.COMMITS_BEHIND > 0 and plexpy.INSTALL_TYPE != 'win':
|
% elif plexpy.CONFIG.CHECK_GITHUB and plexpy.CURRENT_VERSION != plexpy.LATEST_VERSION and plexpy.COMMITS_BEHIND > 0 and plexpy.INSTALL_TYPE != 'win':
|
||||||
<div id="updatebar" style="display: none;">
|
<div id="updatebar" style="display: none;">
|
||||||
A <a
|
A <a
|
||||||
href="https://github.com/${plexpy.CONFIG.GIT_USER}/plexpy/compare/${plexpy.CURRENT_VERSION}...${plexpy.LATEST_VERSION}">
|
href="https://github.com/${plexpy.CONFIG.GIT_USER}/plexpy/compare/${plexpy.CURRENT_VERSION}...${plexpy.LATEST_VERSION}" target="_blank">
|
||||||
newer version</a> is available. You're ${plexpy.COMMITS_BEHIND} commits behind. <a href="update">Update</a> or
|
newer version</a> is available. You're ${plexpy.COMMITS_BEHIND} commits behind. <a href="update">Update</a> or
|
||||||
<a href="#" id="updateDismiss">Close</a>
|
<a href="#" id="updateDismiss">Close</a>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -33,6 +33,33 @@ select.input-sm {
|
||||||
color: #999;
|
color: #999;
|
||||||
outline: none;
|
outline: none;
|
||||||
}
|
}
|
||||||
|
select[multiple] {
|
||||||
|
height: 125px;
|
||||||
|
margin: 5px 0 5px 0;
|
||||||
|
color: #fff;
|
||||||
|
border: 0px solid #444;
|
||||||
|
background: #555;
|
||||||
|
padding: 2px 2px;
|
||||||
|
background-color: #555;
|
||||||
|
border-radius: 3px;
|
||||||
|
transition: background-color .3s;
|
||||||
|
}
|
||||||
|
select[multiple]:focus {
|
||||||
|
outline: 0;
|
||||||
|
outline: thin dotted \9;
|
||||||
|
color: #555;
|
||||||
|
background-color: #fff;
|
||||||
|
transition: background-color .3s;
|
||||||
|
}
|
||||||
|
select[multiple]:focus::-webkit-scrollbar-thumb {
|
||||||
|
background-color: rgba(0,0,0,.15);
|
||||||
|
}
|
||||||
|
select[multiple] option {
|
||||||
|
padding: 6px 10px;
|
||||||
|
-webkit-border-radius: 2px;
|
||||||
|
-moz-border-radius: 2px;
|
||||||
|
border-radius: 2px;
|
||||||
|
}
|
||||||
img {
|
img {
|
||||||
-webkit-box-sizing: content-box;
|
-webkit-box-sizing: content-box;
|
||||||
-moz-box-sizing: content-box;
|
-moz-box-sizing: content-box;
|
||||||
|
@ -1074,7 +1101,7 @@ a:hover .dashboard-recent-media-cover {
|
||||||
display: block;
|
display: block;
|
||||||
width: 100%;
|
width: 100%;
|
||||||
height: 100%;
|
height: 100%;
|
||||||
background-image: url(../images/plex-logo-light.svg);
|
background-image: url(../images/plex-logo-light-small.png);
|
||||||
background-size: 100px;
|
background-size: 100px;
|
||||||
background-repeat: no-repeat;
|
background-repeat: no-repeat;
|
||||||
background-position: center;
|
background-position: center;
|
||||||
|
|
|
@ -19,7 +19,7 @@ session_key Returns a unique session id for the active stream
|
||||||
rating_key Returns the unique identifier for the media item.
|
rating_key Returns the unique identifier for the media item.
|
||||||
media_index Returns the index of the media item.
|
media_index Returns the index of the media item.
|
||||||
parent_media_index Returns the index of the media item's parent.
|
parent_media_index Returns the index of the media item's parent.
|
||||||
type Returns the type of session. Either 'track', 'episode' or 'movie'.
|
media_type Returns the type of session. Either 'track', 'episode' or 'movie'.
|
||||||
thumb Returns the location of the item's thumbnail. Use with pms_image_proxy.
|
thumb Returns the location of the item's thumbnail. Use with pms_image_proxy.
|
||||||
bif_thumb Returns the location of the item's bif thumbnail. Use with pms_image_proxy.
|
bif_thumb Returns the location of the item's bif thumbnail. Use with pms_image_proxy.
|
||||||
art Returns the location of the item's artwork
|
art Returns the location of the item's artwork
|
||||||
|
@ -67,21 +67,21 @@ DOCUMENTATION :: END
|
||||||
% if data['stream_count'] != '0':
|
% if data['stream_count'] != '0':
|
||||||
% for a in data['sessions']:
|
% for a in data['sessions']:
|
||||||
<div class="dashboard-instance" id="instance-${a['session_key']}">
|
<div class="dashboard-instance" id="instance-${a['session_key']}">
|
||||||
% if a['type'] == 'movie' or a['type'] == 'episode' or a['type'] == 'track':
|
% if a['media_type'] == 'movie' or a['media_type'] == 'episode' or a['media_type'] == 'track':
|
||||||
<a href="info?item_id=${a['rating_key']}">
|
<a href="info?item_id=${a['rating_key']}">
|
||||||
% endif
|
% endif
|
||||||
<div class="dashboard-activity-poster">
|
<div class="dashboard-activity-poster">
|
||||||
% if a['type'] == 'movie' and not a['indexes']:
|
% if a['media_type'] == 'movie' and not a['indexes']:
|
||||||
<div class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img=${a['art']}&width=500&height=280);"></div>
|
<div class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img=${a['art']}&width=500&height=280);"></div>
|
||||||
% elif a['type'] == 'episode' and not a['indexes']:
|
% elif a['media_type'] == 'episode' and not a['indexes']:
|
||||||
<div class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img=${a['art']}&width=500&height=280);"></div>
|
<div class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img=${a['art']}&width=500&height=280);"></div>
|
||||||
% elif a['indexes']:
|
% elif a['indexes']:
|
||||||
<div class="dashboard-activity-poster-face bif" style="background-image: url(pms_image_proxy?img=${a['bif_thumb']}&width=500&height=280); display: none;"></div>
|
<div class="dashboard-activity-poster-face bif" style="background-image: url(pms_image_proxy?img=${a['bif_thumb']}&width=500&height=280); display: none;"></div>
|
||||||
% else:
|
% else:
|
||||||
% if a['type'] == 'track':
|
% if a['media_type'] == 'track':
|
||||||
<div class="dashboard-activity-cover-face-bg" style="background-image: url(pms_image_proxy?img=${a['thumb']}&width=300&height=300);"></div>
|
<div class="dashboard-activity-cover-face-bg" style="background-image: url(pms_image_proxy?img=${a['thumb']}&width=300&height=300);"></div>
|
||||||
<div class="dashboard-activity-cover-face" style="background-image: url(pms_image_proxy?img=${a['thumb']}&width=300&height=300);"></div>
|
<div class="dashboard-activity-cover-face" style="background-image: url(pms_image_proxy?img=${a['thumb']}&width=300&height=300);"></div>
|
||||||
% elif a['type'] == 'clip':
|
% elif a['media_type'] == 'clip':
|
||||||
% if a['art'][:4] == 'http':
|
% if a['art'][:4] == 'http':
|
||||||
<div class="dashboard-activity-poster-face" style="background-image: url(${a['art']});"></div>
|
<div class="dashboard-activity-poster-face" style="background-image: url(${a['art']});"></div>
|
||||||
% elif a['thumb'][:4] == 'http':
|
% elif a['thumb'][:4] == 'http':
|
||||||
|
@ -93,7 +93,7 @@ DOCUMENTATION :: END
|
||||||
<div class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img=${a['thumb']}&width=500&height=280);"></div>
|
<div class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img=${a['thumb']}&width=500&height=280);"></div>
|
||||||
% endif
|
% endif
|
||||||
% endif
|
% endif
|
||||||
% elif a['type'] == 'photo':
|
% elif a['media_type'] == 'photo':
|
||||||
<div class="dashboard-activity-poster-face bif" style="background-image: url(pms_image_proxy?img=${a['thumb']}&width=500&height=500);"></div>
|
<div class="dashboard-activity-poster-face bif" style="background-image: url(pms_image_proxy?img=${a['thumb']}&width=500&height=500);"></div>
|
||||||
% else:
|
% else:
|
||||||
<div class="dashboard-activity-cover-face" style="background-image: url(pms_image_proxy?img=${a['thumb']}&width=300&height=300&fallback=cover);"></div>
|
<div class="dashboard-activity-cover-face" style="background-image: url(pms_image_proxy?img=${a['thumb']}&width=300&height=300&fallback=cover);"></div>
|
||||||
|
@ -116,9 +116,11 @@ DOCUMENTATION :: END
|
||||||
State <strong>Buffering</strong>
|
State <strong>Buffering</strong>
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
% if a['type'] == 'track':
|
% if a['media_type'] == 'track':
|
||||||
% if a['audio_decision'] == 'direct play':
|
% if a['audio_decision'] == 'direct play':
|
||||||
Stream <strong>Direct Play</strong>
|
Stream <strong>Direct Play</strong>
|
||||||
|
% elif a['audio_decision'] == 'copy':
|
||||||
|
Stream <strong>Direct Stream</strong>
|
||||||
% else:
|
% else:
|
||||||
Stream <strong>Transcoding
|
Stream <strong>Transcoding
|
||||||
(Speed: ${a['transcode_speed']})
|
(Speed: ${a['transcode_speed']})
|
||||||
|
@ -135,9 +137,11 @@ DOCUMENTATION :: END
|
||||||
% elif a['audio_decision'] != 'transcode':
|
% elif a['audio_decision'] != 'transcode':
|
||||||
Audio <strong>Transcode (${a['transcode_audio_codec']}) (${a['transcode_audio_channels']}ch)</strong>
|
Audio <strong>Transcode (${a['transcode_audio_codec']}) (${a['transcode_audio_channels']}ch)</strong>
|
||||||
% endif
|
% endif
|
||||||
% elif a['type'] == 'episode' or a['type'] == 'movie' or a['type'] == 'clip':
|
% elif a['media_type'] == 'episode' or a['media_type'] == 'movie' or a['media_type'] == 'clip':
|
||||||
% if a['video_decision'] == 'direct play':
|
% if a['video_decision'] == 'direct play' and a['audio_decision'] == 'direct play':
|
||||||
Stream <strong>Direct Play</strong>
|
Stream <strong>Direct Play</strong>
|
||||||
|
% elif a['video_decision'] == 'copy' and a['audio_decision'] == 'copy':
|
||||||
|
Stream <strong>Direct Stream</strong>
|
||||||
% else:
|
% else:
|
||||||
Stream <strong>Transcoding
|
Stream <strong>Transcoding
|
||||||
(Speed: ${a['transcode_speed']})
|
(Speed: ${a['transcode_speed']})
|
||||||
|
@ -162,9 +166,11 @@ DOCUMENTATION :: END
|
||||||
% elif a['audio_decision'] == 'transcode':
|
% elif a['audio_decision'] == 'transcode':
|
||||||
Audio <strong>Transcode (${a['transcode_audio_codec']}) (${a['transcode_audio_channels']}ch)</strong>
|
Audio <strong>Transcode (${a['transcode_audio_codec']}) (${a['transcode_audio_channels']}ch)</strong>
|
||||||
% endif
|
% endif
|
||||||
% elif a['type'] == 'photo':
|
% elif a['media_type'] == 'photo':
|
||||||
% if a['video_decision'] == 'direct play':
|
% if a['video_decision'] == 'direct play':
|
||||||
Stream <strong>Direct Play</strong>
|
Stream <strong>Direct Play</strong>
|
||||||
|
% elif a['video_decision'] == 'copy':
|
||||||
|
Stream <strong>Direct Stream</strong>
|
||||||
% else:
|
% else:
|
||||||
Stream <strong>
|
Stream <strong>
|
||||||
Transcoding
|
Transcoding
|
||||||
|
@ -178,15 +184,15 @@ DOCUMENTATION :: END
|
||||||
<br>
|
<br>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
% if a['type'] != 'photo':
|
% if a['media_type'] != 'photo':
|
||||||
<div class="dashboard-activity-poster-info-bar">
|
<div class="dashboard-activity-poster-info-bar">
|
||||||
<div class="dashboard-activity-poster-info-time">
|
<div class="dashboard-activity-poster-info-time">
|
||||||
<span class="progress_time">${a['progress']}</span>/<span class="progress_time">${a['duration']}</span>
|
<span class="progress_time">${a['view_offset']}</span>/<span class="progress_time">${a['duration']}</span>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
% if a['type'] == 'movie' or a['type'] == 'episode' or a['type'] == 'track':
|
% if a['media_type'] == 'movie' or a['media_type'] == 'episode' or a['media_type'] == 'track':
|
||||||
</a>
|
</a>
|
||||||
% endif
|
% endif
|
||||||
<div class="dashboard-activity-progress">
|
<div class="dashboard-activity-progress">
|
||||||
|
@ -207,38 +213,38 @@ DOCUMENTATION :: END
|
||||||
% elif a['state'] == 'buffering':
|
% elif a['state'] == 'buffering':
|
||||||
<i class="fa fa-spinner"></i>
|
<i class="fa fa-spinner"></i>
|
||||||
% endif
|
% endif
|
||||||
% if a['type'] == 'episode':
|
% if a['media_type'] == 'episode':
|
||||||
<a href="info?item_id=${a['rating_key']}">${a['grandparent_title']} - ${a['title']}</a>
|
<a href="info?item_id=${a['rating_key']}" title="${a['grandparent_title']} - ${a['title']}">${a['grandparent_title']} - ${a['title']}</a>
|
||||||
% elif a['type'] == 'movie':
|
% elif a['media_type'] == 'movie':
|
||||||
<a href="info?item_id=${a['rating_key']}">${a['title']}</a>
|
<a href="info?item_id=${a['rating_key']}" title="${a['title']}">${a['title']}</a>
|
||||||
% elif a['type'] == 'clip':
|
% elif a['media_type'] == 'clip':
|
||||||
${a['title']}
|
<span title="${a['title']}">${a['title']}</span>
|
||||||
% elif a['type'] == 'track':
|
% elif a['media_type'] == 'track':
|
||||||
<a href="info?item_id=${a['rating_key']}">${a['grandparent_title']} - ${a['title']}</a>
|
<a href="info?item_id=${a['rating_key']}" title="${a['grandparent_title']} - ${a['title']}">${a['grandparent_title']} - ${a['title']}</a>
|
||||||
% elif a['type'] == 'photo':
|
% elif a['media_type'] == 'photo':
|
||||||
${a['parent_title']}
|
<span title="${a['parent_title']}">${a['parent_title']}</span>
|
||||||
% else:
|
% else:
|
||||||
${a['title']}
|
<span title="${a['title']}">${a['title']}</span>
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
<div class="dashboard-activity-metadata-subtitle">
|
<div class="dashboard-activity-metadata-subtitle">
|
||||||
% if a['type'] == 'episode':
|
% if a['media_type'] == 'episode':
|
||||||
S${a['parent_media_index']} · E${a['media_index']}
|
<span title="S${a['parent_media_index']} · E${a['media_index']}">S${a['parent_media_index']} · E${a['media_index']}</span>
|
||||||
% elif a['type'] == 'movie':
|
% elif a['media_type'] == 'movie':
|
||||||
${a['year']}
|
<span title="${a['year']}">${a['year']}</span>
|
||||||
% elif a['type'] == 'track':
|
% elif a['media_type'] == 'track':
|
||||||
<a href="info?item_id=${a['parent_rating_key']}">${a['parent_title']}</a>
|
<a href="info?item_id=${a['parent_rating_key']}" title="${a['parent_title']}">${a['parent_title']}</a>
|
||||||
% elif a['type'] == 'photo':
|
% elif a['media_type'] == 'photo':
|
||||||
${a['title']}
|
<span title="${a['title']}">${a['title']}</span>
|
||||||
% else:
|
% else:
|
||||||
${a['year']}
|
<span title="${a['year']}">${a['year']}</span>
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
<div class="dashboard-activity-metadata-user">
|
<div class="dashboard-activity-metadata-user">
|
||||||
% if a['user_id']:
|
% if a['user_id']:
|
||||||
<a href="user?user_id=${a['user_id']}">${a['friendly_name']}</a>
|
<a href="user?user_id=${a['user_id']}" title="${a['friendly_name']}">${a['friendly_name']}</a>
|
||||||
% else:
|
% else:
|
||||||
<a href="user?user=${a['user']}">${a['friendly_name']}</a>
|
<a href="user?user=${a['user']}" title="${a['friendly_name']}">${a['friendly_name']}</a>
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -294,6 +294,8 @@
|
||||||
$('a[data-toggle=tab][href=' + current_tab + ']').trigger('click');
|
$('a[data-toggle=tab][href=' + current_tab + ']').trigger('click');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var music_visible = (${config['music_logging_enable']} == 1 ? true : false);
|
||||||
|
|
||||||
function loadGraphsTab1(time_range, yaxis) {
|
function loadGraphsTab1(time_range, yaxis) {
|
||||||
setGraphFormat(yaxis);
|
setGraphFormat(yaxis);
|
||||||
|
|
||||||
|
@ -319,6 +321,7 @@
|
||||||
hc_plays_by_day_options.yAxis.min = 0;
|
hc_plays_by_day_options.yAxis.min = 0;
|
||||||
hc_plays_by_day_options.xAxis.categories = dateArray;
|
hc_plays_by_day_options.xAxis.categories = dateArray;
|
||||||
hc_plays_by_day_options.series = data.series;
|
hc_plays_by_day_options.series = data.series;
|
||||||
|
hc_plays_by_day_options.series[2].visible = music_visible;
|
||||||
var hc_plays_by_day = new Highcharts.Chart(hc_plays_by_day_options);
|
var hc_plays_by_day = new Highcharts.Chart(hc_plays_by_day_options);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -331,6 +334,7 @@
|
||||||
success: function(data) {
|
success: function(data) {
|
||||||
hc_plays_by_dayofweek_options.xAxis.categories = data.categories;
|
hc_plays_by_dayofweek_options.xAxis.categories = data.categories;
|
||||||
hc_plays_by_dayofweek_options.series = data.series;
|
hc_plays_by_dayofweek_options.series = data.series;
|
||||||
|
hc_plays_by_dayofweek_options.series[2].visible = music_visible;
|
||||||
var hc_plays_by_dayofweek = new Highcharts.Chart(hc_plays_by_dayofweek_options);
|
var hc_plays_by_dayofweek = new Highcharts.Chart(hc_plays_by_dayofweek_options);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -343,6 +347,7 @@
|
||||||
success: function(data) {
|
success: function(data) {
|
||||||
hc_plays_by_hourofday_options.xAxis.categories = data.categories;
|
hc_plays_by_hourofday_options.xAxis.categories = data.categories;
|
||||||
hc_plays_by_hourofday_options.series = data.series;
|
hc_plays_by_hourofday_options.series = data.series;
|
||||||
|
hc_plays_by_hourofday_options.series[2].visible = music_visible;
|
||||||
var hc_plays_by_hourofday = new Highcharts.Chart(hc_plays_by_hourofday_options);
|
var hc_plays_by_hourofday = new Highcharts.Chart(hc_plays_by_hourofday_options);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -355,6 +360,7 @@
|
||||||
success: function(data) {
|
success: function(data) {
|
||||||
hc_plays_by_platform_options.xAxis.categories = data.categories;
|
hc_plays_by_platform_options.xAxis.categories = data.categories;
|
||||||
hc_plays_by_platform_options.series = data.series;
|
hc_plays_by_platform_options.series = data.series;
|
||||||
|
hc_plays_by_platform_options.series[2].visible = music_visible;
|
||||||
var hc_plays_by_platform = new Highcharts.Chart(hc_plays_by_platform_options);
|
var hc_plays_by_platform = new Highcharts.Chart(hc_plays_by_platform_options);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -367,6 +373,7 @@
|
||||||
success: function(data) {
|
success: function(data) {
|
||||||
hc_plays_by_user_options.xAxis.categories = data.categories;
|
hc_plays_by_user_options.xAxis.categories = data.categories;
|
||||||
hc_plays_by_user_options.series = data.series;
|
hc_plays_by_user_options.series = data.series;
|
||||||
|
hc_plays_by_user_options.series[2].visible = music_visible;
|
||||||
var hc_plays_by_user = new Highcharts.Chart(hc_plays_by_user_options);
|
var hc_plays_by_user = new Highcharts.Chart(hc_plays_by_user_options);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -462,6 +469,7 @@
|
||||||
hc_plays_by_month_options.yAxis.min = 0;
|
hc_plays_by_month_options.yAxis.min = 0;
|
||||||
hc_plays_by_month_options.xAxis.categories = data.categories;
|
hc_plays_by_month_options.xAxis.categories = data.categories;
|
||||||
hc_plays_by_month_options.series = data.series;
|
hc_plays_by_month_options.series = data.series;
|
||||||
|
hc_plays_by_month_options.series[2].visible = music_visible;
|
||||||
var hc_plays_by_month = new Highcharts.Chart(hc_plays_by_month_options);
|
var hc_plays_by_month = new Highcharts.Chart(hc_plays_by_month_options);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -17,19 +17,19 @@ data[array_index]['rows'] :: Usable parameters
|
||||||
|
|
||||||
row_id Return the db row id for a metadata item if one exists
|
row_id Return the db row id for a metadata item if one exists
|
||||||
|
|
||||||
== Only if 'stat_id' is 'top_tv' or 'popular_tv' or 'top_movies' or 'popular_movies' or 'last_watched' ==
|
== Only if 'stat_id' is 'top_tv' or 'popular_tv' or 'top_movies' or 'popular_movies' or 'top_music' or 'popular_music' or 'last_watched' ==
|
||||||
thumb Return the thumb for the media item.
|
thumb Return the thumb for the media item.
|
||||||
|
|
||||||
== Only if 'stat_id' is 'top_tv' or 'popular_tv' ==
|
== Only if 'stat_id' is 'top_tv' or 'popular_tv' or 'top_music' or 'popular_music' ==
|
||||||
grandparent_thumb Returns location of the item's thumbnail. Use with pms_image_proxy.
|
grandparent_thumb Returns location of the item's thumbnail. Use with pms_image_proxy.
|
||||||
rating_key Returns the unique identifier for the media item.
|
rating_key Returns the unique identifier for the media item.
|
||||||
title Returns the title for the associated stat.
|
title Returns the title for the associated stat.
|
||||||
|
|
||||||
== Only if 'stat_id' is 'top_tv' or 'top_movies' or 'top_user' or 'top_platform' ==
|
== Only if 'stat_id' is 'top_tv' or 'top_movies' or 'top_music' or 'top_user' or 'top_platform' ==
|
||||||
total_plays Returns the count for the associated stat.
|
total_plays Returns the count for the associated stat.
|
||||||
total_duration Returns the total duration for the associated stat.
|
total_duration Returns the total duration for the associated stat.
|
||||||
|
|
||||||
== Only of 'stat_id' is 'popular_tv' or 'popular_movies' ==
|
== Only of 'stat_id' is 'popular_tv' or 'popular_movies' or 'popular_music' ==
|
||||||
users_watched Returns the count for the associated stat.
|
users_watched Returns the count for the associated stat.
|
||||||
|
|
||||||
== Only if 'stat_id' is 'top_user' or 'last_watched' ==
|
== Only if 'stat_id' is 'top_user' or 'last_watched' ==
|
||||||
|
@ -77,7 +77,7 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
<div class="home-platforms-instance-playcount">
|
<div class="home-platforms-instance-playcount">
|
||||||
<h4>
|
<h4>
|
||||||
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
${top_stat['rows'][0]['title']}
|
${top_stat['rows'][0]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h4>
|
</h4>
|
||||||
|
@ -89,7 +89,7 @@ DOCUMENTATION :: END
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
% if top_stat['rows'][0]['grandparent_thumb']:
|
% if top_stat['rows'][0]['grandparent_thumb']:
|
||||||
<div class="home-platforms-instance-poster">
|
<div class="home-platforms-instance-poster">
|
||||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||||
|
@ -111,7 +111,7 @@ DOCUMENTATION :: END
|
||||||
<div class="home-platforms-instance-list-info">
|
<div class="home-platforms-instance-list-info">
|
||||||
<div class="home-platforms-instance-list-name">
|
<div class="home-platforms-instance-list-name">
|
||||||
<h5>
|
<h5>
|
||||||
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
${top_stat['rows'][loop.index]['title']}
|
${top_stat['rows'][loop.index]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h5>
|
</h5>
|
||||||
|
@ -125,7 +125,7 @@ DOCUMENTATION :: END
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||||
<div class="home-platforms-instance-list-poster">
|
<div class="home-platforms-instance-list-poster">
|
||||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||||
|
@ -157,7 +157,7 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
<div class="home-platforms-instance-playcount">
|
<div class="home-platforms-instance-playcount">
|
||||||
<h4>
|
<h4>
|
||||||
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
${top_stat['rows'][0]['title']}
|
${top_stat['rows'][0]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h4>
|
</h4>
|
||||||
|
@ -165,7 +165,7 @@ DOCUMENTATION :: END
|
||||||
<p> users</p>
|
<p> users</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
% if top_stat['rows'][0]['grandparent_thumb'] != '':
|
% if top_stat['rows'][0]['grandparent_thumb'] != '':
|
||||||
<div class="home-platforms-instance-poster">
|
<div class="home-platforms-instance-poster">
|
||||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||||
|
@ -187,7 +187,7 @@ DOCUMENTATION :: END
|
||||||
<div class="home-platforms-instance-list-info">
|
<div class="home-platforms-instance-list-info">
|
||||||
<div class="home-platforms-instance-list-name">
|
<div class="home-platforms-instance-list-name">
|
||||||
<h5>
|
<h5>
|
||||||
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
${top_stat['rows'][loop.index]['title']}
|
${top_stat['rows'][loop.index]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h5>
|
</h5>
|
||||||
|
@ -197,7 +197,7 @@ DOCUMENTATION :: END
|
||||||
<p> users</p>
|
<p> users</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||||
<div class="home-platforms-instance-list-poster">
|
<div class="home-platforms-instance-list-poster">
|
||||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||||
|
@ -229,7 +229,7 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
<div class="home-platforms-instance-playcount">
|
<div class="home-platforms-instance-playcount">
|
||||||
<h4>
|
<h4>
|
||||||
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
${top_stat['rows'][0]['title']}
|
${top_stat['rows'][0]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h4>
|
</h4>
|
||||||
|
@ -241,7 +241,7 @@ DOCUMENTATION :: END
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
% if top_stat['rows'][0]['thumb']:
|
% if top_stat['rows'][0]['thumb']:
|
||||||
<div class="home-platforms-instance-poster">
|
<div class="home-platforms-instance-poster">
|
||||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||||
|
@ -263,7 +263,7 @@ DOCUMENTATION :: END
|
||||||
<div class="home-platforms-instance-list-info">
|
<div class="home-platforms-instance-list-info">
|
||||||
<div class="home-platforms-instance-list-name">
|
<div class="home-platforms-instance-list-name">
|
||||||
<h5>
|
<h5>
|
||||||
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
${top_stat['rows'][loop.index]['title']}
|
${top_stat['rows'][loop.index]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h5>
|
</h5>
|
||||||
|
@ -277,7 +277,7 @@ DOCUMENTATION :: END
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
% if top_stat['rows'][loop.index]['thumb']:
|
% if top_stat['rows'][loop.index]['thumb']:
|
||||||
<div class="home-platforms-instance-list-poster">
|
<div class="home-platforms-instance-list-poster">
|
||||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||||
|
@ -309,7 +309,7 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
<div class="home-platforms-instance-playcount">
|
<div class="home-platforms-instance-playcount">
|
||||||
<h4>
|
<h4>
|
||||||
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
${top_stat['rows'][0]['title']}
|
${top_stat['rows'][0]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h4>
|
</h4>
|
||||||
|
@ -317,7 +317,7 @@ DOCUMENTATION :: END
|
||||||
<p> users</p>
|
<p> users</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
% if top_stat['rows'][0]['thumb']:
|
% if top_stat['rows'][0]['thumb']:
|
||||||
<div class="home-platforms-instance-poster">
|
<div class="home-platforms-instance-poster">
|
||||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||||
|
@ -339,7 +339,7 @@ DOCUMENTATION :: END
|
||||||
<div class="home-platforms-instance-list-info">
|
<div class="home-platforms-instance-list-info">
|
||||||
<div class="home-platforms-instance-list-name">
|
<div class="home-platforms-instance-list-name">
|
||||||
<h5>
|
<h5>
|
||||||
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
${top_stat['rows'][loop.index]['title']}
|
${top_stat['rows'][loop.index]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h5>
|
</h5>
|
||||||
|
@ -349,7 +349,7 @@ DOCUMENTATION :: END
|
||||||
<p> users</p>
|
<p> users</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}">
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
% if top_stat['rows'][loop.index]['thumb']:
|
% if top_stat['rows'][loop.index]['thumb']:
|
||||||
<div class="home-platforms-instance-list-poster">
|
<div class="home-platforms-instance-list-poster">
|
||||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||||
|
@ -372,6 +372,158 @@ DOCUMENTATION :: END
|
||||||
% endif
|
% endif
|
||||||
</li>
|
</li>
|
||||||
</div>
|
</div>
|
||||||
|
% elif top_stat['stat_id'] == 'top_music' and top_stat['rows']:
|
||||||
|
<div class="home-platforms-instance">
|
||||||
|
<li>
|
||||||
|
<div class="home-platforms-instance-info">
|
||||||
|
<div class="home-platforms-instance-name">
|
||||||
|
<h4>Most Listened to Artist</h4>
|
||||||
|
</div>
|
||||||
|
<div class="home-platforms-instance-playcount">
|
||||||
|
<h4>
|
||||||
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
|
${top_stat['rows'][0]['title']}
|
||||||
|
</a>
|
||||||
|
</h4>
|
||||||
|
% if top_stat['stat_type'] == 'total_plays':
|
||||||
|
<h3>${top_stat['rows'][0]['total_plays']}</h3>
|
||||||
|
<p> plays</p>
|
||||||
|
% else:
|
||||||
|
${top_stat['rows'][0]['total_duration'] | hd}
|
||||||
|
% endif
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
|
% if top_stat['rows'][0]['grandparent_thumb']:
|
||||||
|
<div class="home-platforms-instance-poster">
|
||||||
|
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||||
|
</div>
|
||||||
|
% else:
|
||||||
|
<div class="home-platforms-instance-poster">
|
||||||
|
<div class="home-platforms-poster-face" style="background-image: url(interfaces/default/images/poster.png);"></div>
|
||||||
|
</div>
|
||||||
|
% endif
|
||||||
|
</a>
|
||||||
|
%if len(top_stat['rows']) > 1:
|
||||||
|
<div class="home-platforms-instance-list-chevron"><i class="fa fa-chevron-down"></i></div>
|
||||||
|
<ul class="list-unstyled">
|
||||||
|
<div class="slider">
|
||||||
|
<div class="home-platforms-instance-list">
|
||||||
|
% for row in top_stat['rows']:
|
||||||
|
%if loop.index > 0:
|
||||||
|
<li>
|
||||||
|
<div class="home-platforms-instance-list-info">
|
||||||
|
<div class="home-platforms-instance-list-name">
|
||||||
|
<h5>
|
||||||
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
|
${top_stat['rows'][loop.index]['title']}
|
||||||
|
</a>
|
||||||
|
</h5>
|
||||||
|
</div>
|
||||||
|
<div class="home-platforms-instance-list-playcount">
|
||||||
|
% if top_stat['stat_type'] == 'total_plays':
|
||||||
|
<h3>${top_stat['rows'][loop.index]['total_plays']}</h3>
|
||||||
|
<p> plays</p>
|
||||||
|
% else:
|
||||||
|
${top_stat['rows'][loop.index]['total_duration'] | hd}
|
||||||
|
% endif
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
|
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||||
|
<div class="home-platforms-instance-list-poster">
|
||||||
|
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||||
|
</div>
|
||||||
|
% else:
|
||||||
|
<div class="home-platforms-instance-poster2">
|
||||||
|
<div class="home-platforms-list-poster-face" style="background-image: url(interfaces/default/images/poster.png);"></div>
|
||||||
|
</div>
|
||||||
|
% endif
|
||||||
|
</a>
|
||||||
|
<div class="home-platforms-instance-list-number">
|
||||||
|
<h4>${loop.index + 1}</h4>
|
||||||
|
</div>
|
||||||
|
</li>
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</ul>
|
||||||
|
% endif
|
||||||
|
</li>
|
||||||
|
</div>
|
||||||
|
% elif top_stat['stat_id'] == 'popular_music' and top_stat['rows']:
|
||||||
|
<div class="home-platforms-instance">
|
||||||
|
<li>
|
||||||
|
<div class="home-platforms-instance-info">
|
||||||
|
<div class="home-platforms-instance-name">
|
||||||
|
<h4>Most Popular Artist</h4>
|
||||||
|
</div>
|
||||||
|
<div class="home-platforms-instance-playcount">
|
||||||
|
<h4>
|
||||||
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
|
${top_stat['rows'][0]['title']}
|
||||||
|
</a>
|
||||||
|
</h4>
|
||||||
|
<h3>${top_stat['rows'][0]['users_watched']}</h3>
|
||||||
|
<p> users</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<a href="info?item_id=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||||
|
% if top_stat['rows'][0]['grandparent_thumb'] != '':
|
||||||
|
<div class="home-platforms-instance-poster">
|
||||||
|
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||||
|
</div>
|
||||||
|
% else:
|
||||||
|
<div class="home-platforms-instance-poster">
|
||||||
|
<div class="home-platforms-poster-face" style="background-image: url(interfaces/default/images/poster.png);"></div>
|
||||||
|
</div>
|
||||||
|
% endif
|
||||||
|
</a>
|
||||||
|
%if len(top_stat['rows']) > 1:
|
||||||
|
<div class="home-platforms-instance-list-chevron"><i class="fa fa-chevron-down"></i></div>
|
||||||
|
<ul class="list-unstyled">
|
||||||
|
<div class="slider">
|
||||||
|
<div class="home-platforms-instance-list">
|
||||||
|
% for row in top_stat['rows']:
|
||||||
|
%if loop.index > 0:
|
||||||
|
<li>
|
||||||
|
<div class="home-platforms-instance-list-info">
|
||||||
|
<div class="home-platforms-instance-list-name">
|
||||||
|
<h5>
|
||||||
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
|
${top_stat['rows'][loop.index]['title']}
|
||||||
|
</a>
|
||||||
|
</h5>
|
||||||
|
</div>
|
||||||
|
<div class="home-platforms-instance-list-playcount">
|
||||||
|
<h3>${top_stat['rows'][loop.index]['users_watched']}</h3>
|
||||||
|
<p> users</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<a href="info?item_id=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
|
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||||
|
<div class="home-platforms-instance-list-poster">
|
||||||
|
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||||
|
</div>
|
||||||
|
% else:
|
||||||
|
<div class="home-platforms-instance-poster2">
|
||||||
|
<div class="home-platforms-list-poster-face" style="background-image: url(interfaces/default/images/poster.png);"></div>
|
||||||
|
</div>
|
||||||
|
% endif
|
||||||
|
</a>
|
||||||
|
<div class="home-platforms-instance-list-number">
|
||||||
|
<h4>${loop.index + 1}</h4>
|
||||||
|
</div>
|
||||||
|
</li>
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</ul>
|
||||||
|
% endif
|
||||||
|
</li>
|
||||||
|
</div>
|
||||||
% elif top_stat['stat_id'] == 'top_users' and top_stat['rows']:
|
% elif top_stat['stat_id'] == 'top_users' and top_stat['rows']:
|
||||||
<div class="home-platforms-instance">
|
<div class="home-platforms-instance">
|
||||||
<li>
|
<li>
|
||||||
|
@ -382,9 +534,9 @@ DOCUMENTATION :: END
|
||||||
<div class="home-platforms-instance-playcount">
|
<div class="home-platforms-instance-playcount">
|
||||||
<h4>
|
<h4>
|
||||||
% if top_stat['rows'][0]['user_id']:
|
% if top_stat['rows'][0]['user_id']:
|
||||||
<a href="user?user_id=${top_stat['rows'][0]['user_id']}">
|
<a href="user?user_id=${top_stat['rows'][0]['user_id']}" title="${top_stat['rows'][0]['friendly_name']}">
|
||||||
% else:
|
% else:
|
||||||
<a href="user?user=${top_stat['rows'][0]['user']}">
|
<a href="user?user=${top_stat['rows'][0]['user']}" title="${top_stat['rows'][0]['friendly_name']}">
|
||||||
% endif
|
% endif
|
||||||
${top_stat['rows'][0]['friendly_name']}
|
${top_stat['rows'][0]['friendly_name']}
|
||||||
</a>
|
</a>
|
||||||
|
@ -398,9 +550,9 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
% if top_stat['rows'][0]['user_id']:
|
% if top_stat['rows'][0]['user_id']:
|
||||||
<a href="user?user_id=${top_stat['rows'][0]['user_id']}">
|
<a href="user?user_id=${top_stat['rows'][0]['user_id']}" title="${top_stat['rows'][0]['friendly_name']}">
|
||||||
% else:
|
% else:
|
||||||
<a href="user?user=${top_stat['rows'][0]['user']}">
|
<a href="user?user=${top_stat['rows'][0]['user']}" title="${top_stat['rows'][0]['friendly_name']}">
|
||||||
% endif
|
% endif
|
||||||
% if top_stat['rows'][0]['user_thumb'] != '':
|
% if top_stat['rows'][0]['user_thumb'] != '':
|
||||||
<div class="home-platforms-instance-poster">
|
<div class="home-platforms-instance-poster">
|
||||||
|
@ -424,9 +576,9 @@ DOCUMENTATION :: END
|
||||||
<div class="home-platforms-instance-list-name">
|
<div class="home-platforms-instance-list-name">
|
||||||
<h5>
|
<h5>
|
||||||
% if top_stat['rows'][loop.index]['user_id']:
|
% if top_stat['rows'][loop.index]['user_id']:
|
||||||
<a href="user?user_id=${top_stat['rows'][loop.index]['user_id']}">
|
<a href="user?user_id=${top_stat['rows'][loop.index]['user_id']}" title="${top_stat['rows'][loop.index]['friendly_name']}">
|
||||||
% else:
|
% else:
|
||||||
<a href="user?user=${top_stat['rows'][loop.index]['user']}">
|
<a href="user?user=${top_stat['rows'][loop.index]['user']}" title="${top_stat['rows'][loop.index]['friendly_name']}">
|
||||||
% endif
|
% endif
|
||||||
${top_stat['rows'][loop.index]['friendly_name']}
|
${top_stat['rows'][loop.index]['friendly_name']}
|
||||||
</a>
|
</a>
|
||||||
|
@ -442,9 +594,9 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
% if top_stat['rows'][loop.index]['user_id']:
|
% if top_stat['rows'][loop.index]['user_id']:
|
||||||
<a href="user?user_id=${top_stat['rows'][loop.index]['user_id']}">
|
<a href="user?user_id=${top_stat['rows'][loop.index]['user_id']}" title="${top_stat['rows'][loop.index]['friendly_name']}">
|
||||||
% else:
|
% else:
|
||||||
<a href="user?user=${top_stat['rows'][loop.index]['user']}">
|
<a href="user?user=${top_stat['rows'][loop.index]['user']}" title="${top_stat['rows'][loop.index]['friendly_name']}">
|
||||||
% endif
|
% endif
|
||||||
% if top_stat['rows'][loop.index]['user_thumb'] != '':
|
% if top_stat['rows'][loop.index]['user_thumb'] != '':
|
||||||
<div class="home-platforms-instance-poster">
|
<div class="home-platforms-instance-poster">
|
||||||
|
@ -476,7 +628,7 @@ DOCUMENTATION :: END
|
||||||
<h4>Most Active Platform</h4>
|
<h4>Most Active Platform</h4>
|
||||||
</div>
|
</div>
|
||||||
<div class="home-platforms-instance-playcount">
|
<div class="home-platforms-instance-playcount">
|
||||||
<h4>${top_stat['rows'][0]['platform_type']}</h4>
|
<h4 title="${top_stat['rows'][0]['platform_type']}">${top_stat['rows'][0]['platform_type']}</h4>
|
||||||
% if top_stat['stat_type'] == 'total_plays':
|
% if top_stat['stat_type'] == 'total_plays':
|
||||||
<h3>${top_stat['rows'][0]['total_plays']}</h3>
|
<h3>${top_stat['rows'][0]['total_plays']}</h3>
|
||||||
<p> plays</p>
|
<p> plays</p>
|
||||||
|
@ -485,7 +637,7 @@ DOCUMENTATION :: END
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div id="platform-stat" class="home-platforms-instance-poster">
|
<div id="platform-stat" class="home-platforms-instance-poster" title="${top_stat['rows'][0]['platform_type']}">
|
||||||
<script>
|
<script>
|
||||||
$("#platform-stat").html("<div class='home-platforms-instance-box' style='background-image: url(" + getPlatformImagePath('${top_stat['rows'][0]['platform_type']}') + ");'>");
|
$("#platform-stat").html("<div class='home-platforms-instance-box' style='background-image: url(" + getPlatformImagePath('${top_stat['rows'][0]['platform_type']}') + ");'>");
|
||||||
</script>
|
</script>
|
||||||
|
@ -500,7 +652,7 @@ DOCUMENTATION :: END
|
||||||
<li>
|
<li>
|
||||||
<div class="home-platforms-instance-list-info">
|
<div class="home-platforms-instance-list-info">
|
||||||
<div class="home-platforms-instance-list-name">
|
<div class="home-platforms-instance-list-name">
|
||||||
<h5>
|
<h5 title="${top_stat['rows'][loop.index]['platform_type']}">
|
||||||
${top_stat['rows'][loop.index]['platform_type']}
|
${top_stat['rows'][loop.index]['platform_type']}
|
||||||
</h5>
|
</h5>
|
||||||
</div>
|
</div>
|
||||||
|
@ -513,7 +665,7 @@ DOCUMENTATION :: END
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="home-platforms-instance-poster" id="home-platforms-instance-poster-${loop.index + 1}">
|
<div class="home-platforms-instance-poster" id="home-platforms-instance-poster-${loop.index + 1}" title="${top_stat['rows'][loop.index]['platform_type']}">
|
||||||
<script>
|
<script>
|
||||||
$("#home-platforms-instance-poster-${loop.index + 1}").html("<div class='home-platforms-instance-list-box' style='background-image: url(" + getPlatformImagePath('${top_stat['rows'][loop.index]['platform_type']}') + ");'>");
|
$("#home-platforms-instance-poster-${loop.index + 1}").html("<div class='home-platforms-instance-list-box' style='background-image: url(" + getPlatformImagePath('${top_stat['rows'][loop.index]['platform_type']}') + ");'>");
|
||||||
</script>
|
</script>
|
||||||
|
@ -539,15 +691,15 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
<div class="home-platforms-instance-last-user">
|
<div class="home-platforms-instance-last-user">
|
||||||
<h4>
|
<h4>
|
||||||
<a href="info?source=history&item_id=${top_stat['rows'][0]['row_id']}">
|
<a href="info?source=history&item_id=${top_stat['rows'][0]['row_id']}" title="${top_stat['rows'][0]['title']}">
|
||||||
${top_stat['rows'][0]['title']}
|
${top_stat['rows'][0]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h4>
|
</h4>
|
||||||
<h5>
|
<h5>
|
||||||
% if top_stat['rows'][0]['user_id']:
|
% if top_stat['rows'][0]['user_id']:
|
||||||
<a href="user?user_id=${top_stat['rows'][0]['user_id']}">
|
<a href="user?user_id=${top_stat['rows'][0]['user_id']}" title="${top_stat['rows'][0]['friendly_name']}">
|
||||||
% else:
|
% else:
|
||||||
<a href="user?user=${top_stat['rows'][0]['user']}">
|
<a href="user?user=${top_stat['rows'][0]['user']}" title="${top_stat['rows'][0]['friendly_name']}">
|
||||||
% endif
|
% endif
|
||||||
${top_stat['rows'][0]['friendly_name']}
|
${top_stat['rows'][0]['friendly_name']}
|
||||||
</a>
|
</a>
|
||||||
|
@ -561,7 +713,7 @@ DOCUMENTATION :: END
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?source=history&item_id=${top_stat['rows'][0]['row_id']}">
|
<a href="info?source=history&item_id=${top_stat['rows'][0]['row_id']}" title="${top_stat['rows'][0]['title']}">
|
||||||
% if top_stat['rows'][0]['thumb']:
|
% if top_stat['rows'][0]['thumb']:
|
||||||
<div class="home-platforms-instance-poster">
|
<div class="home-platforms-instance-poster">
|
||||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||||
|
@ -583,7 +735,7 @@ DOCUMENTATION :: END
|
||||||
<div class="home-platforms-instance-list-info">
|
<div class="home-platforms-instance-list-info">
|
||||||
<div class="home-platforms-instance-list-name">
|
<div class="home-platforms-instance-list-name">
|
||||||
<h5>
|
<h5>
|
||||||
<a href="info?source=history&item_id=${top_stat['rows'][loop.index]['row_id']}">
|
<a href="info?source=history&item_id=${top_stat['rows'][loop.index]['row_id']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
${top_stat['rows'][loop.index]['title']}
|
${top_stat['rows'][loop.index]['title']}
|
||||||
</a>
|
</a>
|
||||||
</h5>
|
</h5>
|
||||||
|
@ -591,9 +743,9 @@ DOCUMENTATION :: END
|
||||||
<div class="home-platforms-instance-list-last-user">
|
<div class="home-platforms-instance-list-last-user">
|
||||||
<h5>
|
<h5>
|
||||||
% if top_stat['rows'][loop.index]['user_id']:
|
% if top_stat['rows'][loop.index]['user_id']:
|
||||||
<a href="user?user_id=${top_stat['rows'][loop.index]['user_id']}">
|
<a href="user?user_id=${top_stat['rows'][loop.index]['user_id']}" title="${top_stat['rows'][loop.index]['friendly_name']}">
|
||||||
% else:
|
% else:
|
||||||
<a href="user?user=${top_stat['rows'][loop.index]['user']}">
|
<a href="user?user=${top_stat['rows'][loop.index]['user']}" title="${top_stat['rows'][loop.index]['friendly_name']}">
|
||||||
% endif
|
% endif
|
||||||
${top_stat['rows'][loop.index]['friendly_name']}
|
${top_stat['rows'][loop.index]['friendly_name']}
|
||||||
</a>
|
</a>
|
||||||
|
@ -607,7 +759,7 @@ DOCUMENTATION :: END
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="info?source=history&item_id=${top_stat['rows'][loop.index]['row_id']}">
|
<a href="info?source=history&item_id=${top_stat['rows'][loop.index]['row_id']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||||
% if top_stat['rows'][loop.index]['thumb']:
|
% if top_stat['rows'][loop.index]['thumb']:
|
||||||
<div class="home-platforms-instance-list-poster">
|
<div class="home-platforms-instance-list-poster">
|
||||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||||
|
|
BIN
data/interfaces/default/images/plex-logo-light-small.png
Normal file
BIN
data/interfaces/default/images/plex-logo-light-small.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 1.4 KiB |
|
@ -16,6 +16,7 @@
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
% if config['home_stats_cards'] > 'watch_statistics':
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col-md-12">
|
<div class="col-md-12">
|
||||||
<div class="padded-header">
|
<div class="padded-header">
|
||||||
|
@ -27,6 +28,8 @@
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
% endif
|
||||||
|
% if config['home_library_cards'] > 'library_statistics':
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col-md-12">
|
<div class="col-md-12">
|
||||||
<div class="padded-header" id="library-statistics-header">
|
<div class="padded-header" id="library-statistics-header">
|
||||||
|
@ -38,6 +41,7 @@
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
% endif
|
||||||
<div class='row'>
|
<div class='row'>
|
||||||
<div class="col-md-12">
|
<div class="col-md-12">
|
||||||
<div class="padded-header">
|
<div class="padded-header">
|
||||||
|
@ -82,12 +86,12 @@
|
||||||
currentActivity();
|
currentActivity();
|
||||||
setInterval(currentActivity, 15000);
|
setInterval(currentActivity, 15000);
|
||||||
|
|
||||||
function getHomeStats(days, stat_type, stat_count) {
|
function getHomeStats(days) {
|
||||||
$.ajax({
|
$.ajax({
|
||||||
url: 'home_stats',
|
url: 'home_stats',
|
||||||
cache: false,
|
cache: false,
|
||||||
async: true,
|
async: true,
|
||||||
data: {time_range: days, stat_type: stat_type, stat_count: stat_count},
|
data: { },
|
||||||
complete: function(xhr, status) {
|
complete: function(xhr, status) {
|
||||||
$("#home-stats").html(xhr.responseText);
|
$("#home-stats").html(xhr.responseText);
|
||||||
}
|
}
|
||||||
|
@ -165,7 +169,7 @@
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
getHomeStats(${config['home_stats_length']}, ${config['home_stats_type']}, ${config['home_stats_count']});
|
getHomeStats();
|
||||||
getLibraryStatsHeader();
|
getLibraryStatsHeader();
|
||||||
getLibraryStats();
|
getLibraryStats();
|
||||||
|
|
||||||
|
|
|
@ -63,14 +63,14 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="item-children-instance-text-wrapper episode-item">
|
<div class="item-children-instance-text-wrapper episode-item">
|
||||||
<h3>${child['title']}</h3>
|
<h3 title="${child['title']}">${child['title']}</h3>
|
||||||
</div>
|
</div>
|
||||||
% elif data['children_type'] == 'album':
|
% elif data['children_type'] == 'album':
|
||||||
<div class="item-children-poster">
|
<div class="item-children-poster">
|
||||||
<div class="item-children-poster-face album-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=300);"></div>
|
<div class="item-children-poster-face album-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=300);"></div>
|
||||||
</div>
|
</div>
|
||||||
<div class="item-children-instance-text-wrapper album-item">
|
<div class="item-children-instance-text-wrapper album-item">
|
||||||
<h3>${child['title']}</h3>
|
<h3 title="${child['title']}">${child['title']}</h3>
|
||||||
</div>
|
</div>
|
||||||
% elif data['children_type'] == 'track':
|
% elif data['children_type'] == 'track':
|
||||||
% if loop.index % 2 == 0:
|
% if loop.index % 2 == 0:
|
||||||
|
|
|
@ -103,11 +103,11 @@ history_table_options = {
|
||||||
"createdCell": function (td, cellData, rowData, row, col) {
|
"createdCell": function (td, cellData, rowData, row, col) {
|
||||||
if (cellData !== '') {
|
if (cellData !== '') {
|
||||||
var transcode_dec = '';
|
var transcode_dec = '';
|
||||||
if (rowData['video_decision'] === 'transcode') {
|
if (rowData['video_decision'] === 'transcode' || rowData['audio_decision'] === 'transcode') {
|
||||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Transcode"><i class="fa fa-server fa-fw"></i></span>';
|
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Transcode"><i class="fa fa-server fa-fw"></i></span>';
|
||||||
} else if (rowData['video_decision'] === 'copy') {
|
} else if (rowData['video_decision'] === 'copy' || rowData['audio_decision'] === 'copy') {
|
||||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Stream"><i class="fa fa-video-camera fa-fw"></i></span>';
|
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Stream"><i class="fa fa-video-camera fa-fw"></i></span>';
|
||||||
} else if (rowData['video_decision'] === 'direct play' || rowData['video_decision'] === '') {
|
} else if (rowData['video_decision'] === 'direct play' || rowData['audio_decision'] === 'direct play') {
|
||||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Play"><i class="fa fa-play-circle fa-fw"></i></span>';
|
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Play"><i class="fa fa-play-circle fa-fw"></i></span>';
|
||||||
}
|
}
|
||||||
$(td).html('<div><a href="#" data-target="#info-modal" data-toggle="modal"><div style="float: left;">' + transcode_dec + ' ' + cellData + '</div></a></div>');
|
$(td).html('<div><a href="#" data-target="#info-modal" data-toggle="modal"><div style="float: left;">' + transcode_dec + ' ' + cellData + '</div></a></div>');
|
||||||
|
@ -253,7 +253,7 @@ history_table_options = {
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
"preDrawCallback": function(settings) {
|
"preDrawCallback": function(settings) {
|
||||||
var msg = "<div class='msg'><i class='fa fa-refresh fa-spin'></i> Fetching rows...</div>";
|
var msg = "<i class='fa fa-refresh fa-spin'></i> Fetching rows...";
|
||||||
showMsg(msg, false, false, 0)
|
showMsg(msg, false, false, 0)
|
||||||
},
|
},
|
||||||
"rowCallback": function (row, rowData, rowIndex) {
|
"rowCallback": function (row, rowData, rowIndex) {
|
||||||
|
|
|
@ -77,11 +77,11 @@ history_table_modal_options = {
|
||||||
"createdCell": function (td, cellData, rowData, row, col) {
|
"createdCell": function (td, cellData, rowData, row, col) {
|
||||||
if (cellData !== '') {
|
if (cellData !== '') {
|
||||||
var transcode_dec = '';
|
var transcode_dec = '';
|
||||||
if (rowData['video_decision'] === 'transcode') {
|
if (rowData['video_decision'] === 'transcode' || rowData['audio_decision'] === 'transcode') {
|
||||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Transcode"><i class="fa fa-server fa-fw"></i></span>';
|
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Transcode"><i class="fa fa-server fa-fw"></i></span>';
|
||||||
} else if (rowData['video_decision'] === 'copy') {
|
} else if (rowData['video_decision'] === 'copy' || rowData['audio_decision'] === 'copy') {
|
||||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Stream"><i class="fa fa-video-camera fa-fw"></i></span>';
|
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Stream"><i class="fa fa-video-camera fa-fw"></i></span>';
|
||||||
} else if (rowData['video_decision'] === 'direct play' || rowData['video_decision'] === '') {
|
} else if (rowData['video_decision'] === 'direct play' || rowData['audio_decision'] === 'direct play') {
|
||||||
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Play"><i class="fa fa-play-circle fa-fw"></i></span>';
|
transcode_dec = '<span class="transcode-tooltip" data-toggle="tooltip" title="Direct Play"><i class="fa fa-play-circle fa-fw"></i></span>';
|
||||||
}
|
}
|
||||||
$(td).html('<div><a href="#" data-target="#info-modal" data-toggle="modal"><div style="float: left;">' + transcode_dec + ' ' + cellData + '</div></a></div>');
|
$(td).html('<div><a href="#" data-target="#info-modal" data-toggle="modal"><div style="float: left;">' + transcode_dec + ' ' + cellData + '</div></a></div>');
|
||||||
|
@ -134,7 +134,7 @@ history_table_modal_options = {
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
"preDrawCallback": function(settings) {
|
"preDrawCallback": function(settings) {
|
||||||
var msg = "<div class='msg'><i class='fa fa-refresh fa-spin'></i> Fetching rows...</div>";
|
var msg = "<i class='fa fa-refresh fa-spin'></i> Fetching rows...";
|
||||||
showMsg(msg, false, false, 0)
|
showMsg(msg, false, false, 0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,7 +35,7 @@ var log_table_options = {
|
||||||
$('#ajaxMsg').fadeOut();
|
$('#ajaxMsg').fadeOut();
|
||||||
},
|
},
|
||||||
"preDrawCallback": function(settings) {
|
"preDrawCallback": function(settings) {
|
||||||
var msg = "<div class='msg'><i class='fa fa-refresh fa-spin'></i> Fetching rows...</div>";
|
var msg = "<i class='fa fa-refresh fa-spin'></i> Fetching rows...";
|
||||||
showMsg(msg, false, false, 0)
|
showMsg(msg, false, false, 0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -123,7 +123,7 @@ user_ip_table_options = {
|
||||||
|
|
||||||
},
|
},
|
||||||
"preDrawCallback": function(settings) {
|
"preDrawCallback": function(settings) {
|
||||||
var msg = "<div class='msg'><i class='fa fa-refresh fa-spin'></i> Fetching rows...</div>";
|
var msg = "<i class='fa fa-refresh fa-spin'></i> Fetching rows...";
|
||||||
showMsg(msg, false, false, 0)
|
showMsg(msg, false, false, 0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -187,7 +187,7 @@ users_list_table_options = {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"preDrawCallback": function(settings) {
|
"preDrawCallback": function(settings) {
|
||||||
var msg = "<div class='msg'><i class='fa fa-refresh fa-spin'></i> Fetching rows...</div>";
|
var msg = "<i class='fa fa-refresh fa-spin'></i> Fetching rows...";
|
||||||
showMsg(msg, false, false, 0)
|
showMsg(msg, false, false, 0)
|
||||||
},
|
},
|
||||||
"rowCallback": function (row, rowData) {
|
"rowCallback": function (row, rowData) {
|
||||||
|
|
|
@ -73,6 +73,6 @@ DOCUMENTATION :: END
|
||||||
% endfor
|
% endfor
|
||||||
</ul>
|
</ul>
|
||||||
% else:
|
% else:
|
||||||
<div class="text-muted">Unable to retrieve data from database. Please check your <a href="settings">settings</a>.
|
<div class="text-muted">Unable to retrieve data from server. Please check your <a href="settings">settings</a>.
|
||||||
</div><br>
|
</div><br>
|
||||||
% endif
|
% endif
|
|
@ -1,3 +1,6 @@
|
||||||
|
<%!
|
||||||
|
from plexpy import helpers
|
||||||
|
%>
|
||||||
% if data:
|
% if data:
|
||||||
<div class="modal-dialog" role="document">
|
<div class="modal-dialog" role="document">
|
||||||
<div class="modal-content">
|
<div class="modal-content">
|
||||||
|
@ -9,9 +12,9 @@
|
||||||
<div class="container-fluid">
|
<div class="container-fluid">
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<form action="set_notification_config" method="post" class="form" id="set_notification_config" data-parsley-validate>
|
<form action="set_notification_config" method="post" class="form" id="set_notification_config" data-parsley-validate>
|
||||||
<div class="col-md-6">
|
<div class="col-md-8">
|
||||||
% for item in data:
|
% for item in data:
|
||||||
% if item['input_type'] != 'checkbox':
|
% if item['input_type'] == 'text' or item['input_type'] == 'number' or item['input_type'] == 'password':
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
<label for="${item['name']}">${item['label']}</label>
|
<label for="${item['name']}">${item['label']}</label>
|
||||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30">
|
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30">
|
||||||
|
@ -20,12 +23,18 @@
|
||||||
% endif
|
% endif
|
||||||
<p class="help-block">${item['description']}</p>
|
<p class="help-block">${item['description']}</p>
|
||||||
</div>
|
</div>
|
||||||
|
% elif item['input_type'] == 'button':
|
||||||
|
<div class="form-group">
|
||||||
|
<input type="${item['input_type']}" class="btn btn-bright" id="${item['name']}" name="${item['name']}" value="${item['value']}">
|
||||||
|
</div>
|
||||||
|
<p class="help-block">${item['description']}</p>
|
||||||
% elif item['input_type'] == 'checkbox':
|
% elif item['input_type'] == 'checkbox':
|
||||||
<div class="checkbox">
|
<div class="checkbox">
|
||||||
<label>
|
<label>
|
||||||
<input type="checkbox" id="${item['name']}" name="${item['name']}" value="1" ${item['value']}> ${item['label']}
|
<input type="checkbox" data-id="${item['name']}" class="checkboxes" value="1" ${helpers.checked(item['value'])}> ${item['label']}
|
||||||
</label>
|
</label>
|
||||||
<p class="help-block">${item['description']}</p>
|
<p class="help-block">${item['description']}</p>
|
||||||
|
<input type="hidden" id="${item['name']}" name="${item['name']}" value="${item['value']}">
|
||||||
</div>
|
</div>
|
||||||
% endif
|
% endif
|
||||||
% endfor
|
% endfor
|
||||||
|
@ -35,7 +44,14 @@
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="modal-footer">
|
<div class="modal-footer">
|
||||||
|
<%
|
||||||
|
nosave = any(d['input_type'] == 'nosave' for d in data)
|
||||||
|
%>
|
||||||
|
% if not nosave:
|
||||||
<input type="button" id="save-notification-item" class="btn btn-bright" value="Save">
|
<input type="button" id="save-notification-item" class="btn btn-bright" value="Save">
|
||||||
|
% else:
|
||||||
|
<input type="button" class="btn btn-bright" data-dismiss="modal" value="Close">
|
||||||
|
% endif
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -53,4 +69,30 @@
|
||||||
doAjaxCall('set_notification_config',$(this),'tabs',true);
|
doAjaxCall('set_notification_config',$(this),'tabs',true);
|
||||||
return false;
|
return false;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
$('#twitterStep1').click(function () {
|
||||||
|
$.get("/twitterStep1", function (data) {window.open(data); })
|
||||||
|
.done(function () { $('#ajaxMsg').html("<div class='msg'><span class='ui-icon ui-icon-check'></span>Confirm Authorization. Check pop-up blocker if no response.</div>"); });
|
||||||
|
$('#ajaxMsg').addClass('success').fadeIn().delay(3000).fadeOut();
|
||||||
|
});
|
||||||
|
$('#twitterStep2').click(function () {
|
||||||
|
var twitter_key = $("#twitter_key").val();
|
||||||
|
$.get("/twitterStep2", {'key': twitter_key}, function (data) { $('#ajaxMsg').html("<div class='msg'><span class='ui-icon ui-icon-check'></span>"+data+"</div>"); });
|
||||||
|
$('#ajaxMsg').addClass('success').fadeIn().delay(3000).fadeOut();
|
||||||
|
});
|
||||||
|
$('#testTwitter').click(function () {
|
||||||
|
$.get("/testTwitter",
|
||||||
|
function (data) { $('#ajaxMsg').html("<div class='msg'><span class='ui-icon ui-icon-check'></span>"+data+"</div>"); });
|
||||||
|
$('#ajaxMsg').addClass('success').fadeIn().delay(3000).fadeOut();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Never send checkbox values directly, always substitute value in hidden input.
|
||||||
|
$('.checkboxes').click(function() {
|
||||||
|
var configToggle = $(this).data('id');
|
||||||
|
if ($(this).is(":checked")) {
|
||||||
|
$("#"+configToggle).val(1);
|
||||||
|
} else {
|
||||||
|
$("#"+configToggle).val(0);
|
||||||
|
}
|
||||||
|
});
|
||||||
</script>
|
</script>
|
||||||
|
|
|
@ -44,10 +44,10 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
<div class="dashboard-recent-media-metacontainer">
|
<div class="dashboard-recent-media-metacontainer">
|
||||||
% if item['type'] == 'season':
|
% if item['type'] == 'season':
|
||||||
<h3>${item['parent_title']}</h3>
|
<h3 title="${item['parent_title']}">${item['parent_title']}</h3>
|
||||||
<h3 class="text-muted">${item['title']}</h3>
|
<h3 class="text-muted">${item['title']}</h3>
|
||||||
% elif item['type'] == 'movie':
|
% elif item['type'] == 'movie':
|
||||||
<h3>${item['title']}</h3>
|
<h3 title="${item['title']}">${item['title']}</h3>
|
||||||
<h3 class="text-muted">${item['year']}</h3>
|
<h3 class="text-muted">${item['year']}</h3>
|
||||||
% endif
|
% endif
|
||||||
</div>
|
</div>
|
||||||
|
@ -66,7 +66,7 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="dashboard-recent-media-metacontainer">
|
<div class="dashboard-recent-media-metacontainer">
|
||||||
<h3>${item['parent_title']}</h3>
|
<h3 title="${item['parent_title']}">${item['parent_title']}</h3>
|
||||||
<h3 class="text-muted">${item['title']}</h3>
|
<h3 class="text-muted">${item['title']}</h3>
|
||||||
</div>
|
</div>
|
||||||
</a>
|
</a>
|
||||||
|
|
|
@ -34,7 +34,7 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
<div class="col-md-4">
|
<div class="col-md-4">
|
||||||
<ul class="nav-settings list-unstyled" role="tablist">
|
<ul class="nav-settings list-unstyled" role="tablist">
|
||||||
<li role="presentation" class="active"><a href="#tabs-1" aria-controls="tabs-1" role="tab" data-toggle="tab">General</a></li>
|
<li role="presentation" class="active"><a href="#tabs-1" aria-controls="tabs-1" role="tab" data-toggle="tab">General</a></li>
|
||||||
<li role="presentation"><a href="#tabs-2" aria-controls="tabs-2" role="tab" data-toggle="tab">Homepage</a></li>
|
<li role="presentation"><a href="#tabs-2" aria-controls="tabs-2" role="tab" data-toggle="tab">Homepage Statistics</a></li>
|
||||||
<li role="presentation"><a href="#tabs-3" aria-controls="tabs-3" role="tab" data-toggle="tab">Web Interface</a></li>
|
<li role="presentation"><a href="#tabs-3" aria-controls="tabs-3" role="tab" data-toggle="tab">Web Interface</a></li>
|
||||||
<li role="presentation"><a href="#tabs-4" aria-controls="tabs-4" role="tab" data-toggle="tab">Access Control</a></li>
|
<li role="presentation"><a href="#tabs-4" aria-controls="tabs-4" role="tab" data-toggle="tab">Access Control</a></li>
|
||||||
<li role="presentation"><a href="#tabs-5" aria-controls="tabs-5" role="tab" data-toggle="tab">Plex Media Server</a></li>
|
<li role="presentation"><a href="#tabs-5" aria-controls="tabs-5" role="tab" data-toggle="tab">Plex Media Server</a></li>
|
||||||
|
@ -94,9 +94,29 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
</div>
|
</div>
|
||||||
<div role="tabpanel" class="tab-pane" id="tabs-2">
|
<div role="tabpanel" class="tab-pane" id="tabs-2">
|
||||||
<div class="padded-header">
|
<div class="padded-header">
|
||||||
<h3>Homepage Statistics</h3>
|
<h3>Watch Statistics</h3>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="home_stats_cards">Cards</label>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col-md-6">
|
||||||
|
<select multiple class="form-control" id="home_stats_cards" name="home_stats_cards" data-parsley-trigger="change">
|
||||||
|
<option id="card-watch_statistics" value="watch_statistics" class="hidden" selected>Watch Statistics</option>
|
||||||
|
<option id="card-top_tv" value="top_tv">Most Watched TV</option>
|
||||||
|
<option id="card-popular_tv" value="popular_tv">Most Popular TV</option>
|
||||||
|
<option id="card-top_movies" value="top_movies">Most Watched Movie</option>
|
||||||
|
<option id="card-popular_movies" value="popular_movies">Most Popular Movie</option>
|
||||||
|
<option id="card-top_music" value="top_music">Most Listened Music</option>
|
||||||
|
<option id="card-popular_music" value="popular_music">Most Popular Music</option>
|
||||||
|
<option id="card-top_users" value="top_users">Most Active User</option>
|
||||||
|
<option id="card-top_platforms" value="top_platforms">Most Active Platform</option>
|
||||||
|
<option id="card-last_watched" value="last_watched">Last Watched</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<p class="help-block">Select the cards to show in the watch statistics on the home page. Select none to disable.</p>
|
||||||
|
</div>
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
<label for="home_stats_length">Time Frame</label>
|
<label for="home_stats_length">Time Frame</label>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
|
@ -105,7 +125,7 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
</div>
|
</div>
|
||||||
<div id="home_stats_length_error" class="alert alert-danger settings-alert" role="alert"></div>
|
<div id="home_stats_length_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||||
</div>
|
</div>
|
||||||
<p class="help-block">Specify the number of days for the statistics on the home page. Default is 30 days.</p>
|
<p class="help-block">Specify the number of days for the watch statistics on the home page. Default is 30 days.</p>
|
||||||
</div>
|
</div>
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
<label for="home_stats_count">Top Lists</label>
|
<label for="home_stats_count">Top Lists</label>
|
||||||
|
@ -115,7 +135,7 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
</div>
|
</div>
|
||||||
<div id="home_stats_count_error" class="alert alert-danger settings-alert" role="alert"></div>
|
<div id="home_stats_count_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||||
</div>
|
</div>
|
||||||
<p class="help-block">Specify the number of items to show in the top lists for the statistics on the home page. Max is 10 items, default is 5 items, 0 to disable.</p>
|
<p class="help-block">Specify the number of items to show in the top lists for the watch statistics on the home page. Max is 10 items, default is 5 items, 0 to disable.</p>
|
||||||
</div>
|
</div>
|
||||||
<div class="checkbox">
|
<div class="checkbox">
|
||||||
<label>
|
<label>
|
||||||
|
@ -123,7 +143,24 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
</label>
|
</label>
|
||||||
<p class="help-block">Use play duration instead of play count to generate statistics.</p>
|
<p class="help-block">Use play duration instead of play count to generate statistics.</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div class="padded-header">
|
||||||
|
<h3>Library Statistics</h3>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="home_library_cards">Cards</label>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col-md-6">
|
||||||
|
<select multiple class="form-control" id="home_library_cards" name="home_library_cards" data-parsley-trigger="change">
|
||||||
|
<option id="card-library_statistics" value="library_statistics" class="hidden" selected>Library Statistics</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<p class="help-block">Select the cards to show in the library statistics on the home page. Select none to disable.</p>
|
||||||
|
</div>
|
||||||
<p><input type="button" class="btn btn-bright save-button" value="Save" data-success="Changes saved successfully"></p>
|
<p><input type="button" class="btn btn-bright save-button" value="Save" data-success="Changes saved successfully"></p>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
<div role="tabpanel" class="tab-pane" id="tabs-3">
|
<div role="tabpanel" class="tab-pane" id="tabs-3">
|
||||||
<div class="padded-header">
|
<div class="padded-header">
|
||||||
|
@ -231,6 +268,7 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
<div class="padded-header">
|
<div class="padded-header">
|
||||||
<h3>Plex Media Server</h3>
|
<h3>Plex Media Server</h3>
|
||||||
</div>
|
</div>
|
||||||
|
<p class="help-block">If you're using websocket monitoring, any server changes require a restart of PlexPy.</p>
|
||||||
<div class="form-group has-feedback" id="pms-ip-group">
|
<div class="form-group has-feedback" id="pms-ip-group">
|
||||||
<label for="pms_ip">Plex IP or Hostname</label>
|
<label for="pms_ip">Plex IP or Hostname</label>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
|
@ -359,6 +397,12 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
</div>
|
</div>
|
||||||
<p class="help-block">The interval (in seconds) PlexPy will ping your Plex Server. Min 30 seconds, recommended 60 seconds.</p>
|
<p class="help-block">The interval (in seconds) PlexPy will ping your Plex Server. Min 30 seconds, recommended 60 seconds.</p>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="checkbox">
|
||||||
|
<label>
|
||||||
|
<input type="checkbox" id="monitoring_use_websocket" name="monitoring_use_websocket" value="1" ${config['monitoring_use_websocket']}> Use Websocket (requires restart)
|
||||||
|
</label>
|
||||||
|
<p class="help-block">Instead of polling the server at regular intervals let the server tell us when something happens. This is currently experimental. Encrypted websocket is not currently supported.</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div class="padded-header">
|
<div class="padded-header">
|
||||||
<h3>History Logging</h3>
|
<h3>History Logging</h3>
|
||||||
|
@ -377,7 +421,7 @@ available_notification_agents = notifiers.available_notification_agents()
|
||||||
</div>
|
</div>
|
||||||
<div id="logging_ignore_interval_error" class="alert alert-danger settings-alert" role="alert"></div>
|
<div id="logging_ignore_interval_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||||
</div>
|
</div>
|
||||||
<p class="help-block">The interval (in seconds) PlexPy will wait for a video item to be active before logging it. 0 to disable.</p>
|
<p class="help-block">The interval (in seconds) an item must be in a playing state before logging it. 0 to disable.</p>
|
||||||
</div>
|
</div>
|
||||||
<div class="checkbox">
|
<div class="checkbox">
|
||||||
<label>
|
<label>
|
||||||
|
@ -1188,6 +1232,47 @@ $(document).ready(function() {
|
||||||
}
|
}
|
||||||
|
|
||||||
var accordion = new Accordion($('#accordion'), false);
|
var accordion = new Accordion($('#accordion'), false);
|
||||||
|
|
||||||
|
var cards = "${config['home_stats_cards']}".split(/[\s,]+/);
|
||||||
|
cards.forEach(function (item) {
|
||||||
|
$('#card-'+item).prop('selected', !$(this).prop('selected'));
|
||||||
|
});
|
||||||
|
$('#home_stats_cards').on('mousedown', function(e) {
|
||||||
|
e.preventDefault();
|
||||||
|
var scroll = this.scrollTop;
|
||||||
|
e.target.selected = !e.target.selected;
|
||||||
|
this.scrollTop = scroll;
|
||||||
|
}).on('mousemove', function(e) {
|
||||||
|
e.preventDefault()
|
||||||
|
});
|
||||||
|
|
||||||
|
$.ajax({
|
||||||
|
url: 'get_server_children',
|
||||||
|
data: { },
|
||||||
|
async: true,
|
||||||
|
complete: function (xhr, status) {
|
||||||
|
server_children_info = $.parseJSON(xhr.responseText);
|
||||||
|
libraries_list = server_children_info.libraries_list;
|
||||||
|
for (var i in libraries_list) {
|
||||||
|
title = libraries_list[i].title;
|
||||||
|
key = libraries_list[i].key;
|
||||||
|
$('#home_library_cards').append('<option id="card-' + key + '" value="' + key + '">' + title + '</option>')
|
||||||
|
}
|
||||||
|
var cards = "${config['home_library_cards']}".split(/[\s,]+/);
|
||||||
|
cards.forEach(function (item) {
|
||||||
|
$('#card-'+item).prop('selected', !$(this).prop('selected'));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
$('#home_library_cards').on('mousedown', function(e) {
|
||||||
|
e.preventDefault();
|
||||||
|
var scroll = this.scrollTop;
|
||||||
|
e.target.selected = !e.target.selected;
|
||||||
|
this.scrollTop = scroll;
|
||||||
|
}).on('mousemove', function(e) {
|
||||||
|
e.preventDefault()
|
||||||
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
</%def>
|
</%def>
|
||||||
|
|
|
@ -48,11 +48,11 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
<div class="dashboard-recent-media-metacontainer">
|
<div class="dashboard-recent-media-metacontainer">
|
||||||
% if item['type'] == 'episode':
|
% if item['type'] == 'episode':
|
||||||
<h3>${item['grandparent_title']}</h3>
|
<h3 title="${item['grandparent_title']}">${item['grandparent_title']}</h3>
|
||||||
<h3>${item['title']}</h3>
|
<h3 title="${item['title']}">${item['title']}</h3>
|
||||||
<h3 class="text-muted">S${item['parent_index']} · E${item['index']}</h3>
|
<h3 class="text-muted">S${item['parent_index']} · E${item['index']}</h3>
|
||||||
% elif item['type'] == 'movie':
|
% elif item['type'] == 'movie':
|
||||||
<h3>${item['title']}</h3>
|
<h3 title="${item['title']}">${item['title']}</h3>
|
||||||
<h3 class="text-muted">${item['year']}</h3>
|
<h3 class="text-muted">${item['year']}</h3>
|
||||||
% endif
|
% endif
|
||||||
<div class="text-muted" id="time-${item['time']}">
|
<div class="text-muted" id="time-${item['time']}">
|
||||||
|
@ -73,8 +73,8 @@ DOCUMENTATION :: END
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="dashboard-recent-media-metacontainer">
|
<div class="dashboard-recent-media-metacontainer">
|
||||||
<h3>${item['grandparent_title']}</h3>
|
<h3 title="${item['grandparent_title']}">${item['grandparent_title']}</h3>
|
||||||
<h3>${item['title']}</h3>
|
<h3 title="${item['title']}">${item['title']}</h3>
|
||||||
<h3 class="text-muted">${item['parent_title']}</h3>
|
<h3 class="text-muted">${item['parent_title']}</h3>
|
||||||
</div>
|
</div>
|
||||||
</a>
|
</a>
|
||||||
|
|
25
lib/websocket/__init__.py
Normal file
25
lib/websocket/__init__.py
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
from ._core import *
|
||||||
|
from ._app import WebSocketApp
|
||||||
|
|
||||||
|
__version__ = "0.32.0"
|
382
lib/websocket/_abnf.py
Normal file
382
lib/websocket/_abnf.py
Normal file
|
@ -0,0 +1,382 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
import six
|
||||||
|
import array
|
||||||
|
import struct
|
||||||
|
import os
|
||||||
|
from ._exceptions import *
|
||||||
|
from ._utils import validate_utf8
|
||||||
|
|
||||||
|
# closing frame status codes.
|
||||||
|
STATUS_NORMAL = 1000
|
||||||
|
STATUS_GOING_AWAY = 1001
|
||||||
|
STATUS_PROTOCOL_ERROR = 1002
|
||||||
|
STATUS_UNSUPPORTED_DATA_TYPE = 1003
|
||||||
|
STATUS_STATUS_NOT_AVAILABLE = 1005
|
||||||
|
STATUS_ABNORMAL_CLOSED = 1006
|
||||||
|
STATUS_INVALID_PAYLOAD = 1007
|
||||||
|
STATUS_POLICY_VIOLATION = 1008
|
||||||
|
STATUS_MESSAGE_TOO_BIG = 1009
|
||||||
|
STATUS_INVALID_EXTENSION = 1010
|
||||||
|
STATUS_UNEXPECTED_CONDITION = 1011
|
||||||
|
STATUS_TLS_HANDSHAKE_ERROR = 1015
|
||||||
|
|
||||||
|
VALID_CLOSE_STATUS = (
|
||||||
|
STATUS_NORMAL,
|
||||||
|
STATUS_GOING_AWAY,
|
||||||
|
STATUS_PROTOCOL_ERROR,
|
||||||
|
STATUS_UNSUPPORTED_DATA_TYPE,
|
||||||
|
STATUS_INVALID_PAYLOAD,
|
||||||
|
STATUS_POLICY_VIOLATION,
|
||||||
|
STATUS_MESSAGE_TOO_BIG,
|
||||||
|
STATUS_INVALID_EXTENSION,
|
||||||
|
STATUS_UNEXPECTED_CONDITION,
|
||||||
|
)
|
||||||
|
|
||||||
|
class ABNF(object):
|
||||||
|
"""
|
||||||
|
ABNF frame class.
|
||||||
|
see http://tools.ietf.org/html/rfc5234
|
||||||
|
and http://tools.ietf.org/html/rfc6455#section-5.2
|
||||||
|
"""
|
||||||
|
|
||||||
|
# operation code values.
|
||||||
|
OPCODE_CONT = 0x0
|
||||||
|
OPCODE_TEXT = 0x1
|
||||||
|
OPCODE_BINARY = 0x2
|
||||||
|
OPCODE_CLOSE = 0x8
|
||||||
|
OPCODE_PING = 0x9
|
||||||
|
OPCODE_PONG = 0xa
|
||||||
|
|
||||||
|
# available operation code value tuple
|
||||||
|
OPCODES = (OPCODE_CONT, OPCODE_TEXT, OPCODE_BINARY, OPCODE_CLOSE,
|
||||||
|
OPCODE_PING, OPCODE_PONG)
|
||||||
|
|
||||||
|
# opcode human readable string
|
||||||
|
OPCODE_MAP = {
|
||||||
|
OPCODE_CONT: "cont",
|
||||||
|
OPCODE_TEXT: "text",
|
||||||
|
OPCODE_BINARY: "binary",
|
||||||
|
OPCODE_CLOSE: "close",
|
||||||
|
OPCODE_PING: "ping",
|
||||||
|
OPCODE_PONG: "pong"
|
||||||
|
}
|
||||||
|
|
||||||
|
# data length threashold.
|
||||||
|
LENGTH_7 = 0x7e
|
||||||
|
LENGTH_16 = 1 << 16
|
||||||
|
LENGTH_63 = 1 << 63
|
||||||
|
|
||||||
|
def __init__(self, fin=0, rsv1=0, rsv2=0, rsv3=0,
|
||||||
|
opcode=OPCODE_TEXT, mask=1, data=""):
|
||||||
|
"""
|
||||||
|
Constructor for ABNF.
|
||||||
|
please check RFC for arguments.
|
||||||
|
"""
|
||||||
|
self.fin = fin
|
||||||
|
self.rsv1 = rsv1
|
||||||
|
self.rsv2 = rsv2
|
||||||
|
self.rsv3 = rsv3
|
||||||
|
self.opcode = opcode
|
||||||
|
self.mask = mask
|
||||||
|
if data == None:
|
||||||
|
data = ""
|
||||||
|
self.data = data
|
||||||
|
self.get_mask_key = os.urandom
|
||||||
|
|
||||||
|
def validate(self, skip_utf8_validation=False):
|
||||||
|
"""
|
||||||
|
validate the ABNF frame.
|
||||||
|
skip_utf8_validation: skip utf8 validation.
|
||||||
|
"""
|
||||||
|
if self.rsv1 or self.rsv2 or self.rsv3:
|
||||||
|
raise WebSocketProtocolException("rsv is not implemented, yet")
|
||||||
|
|
||||||
|
if self.opcode not in ABNF.OPCODES:
|
||||||
|
raise WebSocketProtocolException("Invalid opcode %r", self.opcode)
|
||||||
|
|
||||||
|
if self.opcode == ABNF.OPCODE_PING and not self.fin:
|
||||||
|
raise WebSocketProtocolException("Invalid ping frame.")
|
||||||
|
|
||||||
|
if self.opcode == ABNF.OPCODE_CLOSE:
|
||||||
|
l = len(self.data)
|
||||||
|
if not l:
|
||||||
|
return
|
||||||
|
if l == 1 or l >= 126:
|
||||||
|
raise WebSocketProtocolException("Invalid close frame.")
|
||||||
|
if l > 2 and not skip_utf8_validation and not validate_utf8(self.data[2:]):
|
||||||
|
raise WebSocketProtocolException("Invalid close frame.")
|
||||||
|
|
||||||
|
code = 256*six.byte2int(self.data[0:1]) + six.byte2int(self.data[1:2])
|
||||||
|
if not self._is_valid_close_status(code):
|
||||||
|
raise WebSocketProtocolException("Invalid close opcode.")
|
||||||
|
|
||||||
|
def _is_valid_close_status(self, code):
|
||||||
|
return code in VALID_CLOSE_STATUS or (3000 <= code <5000)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "fin=" + str(self.fin) \
|
||||||
|
+ " opcode=" + str(self.opcode) \
|
||||||
|
+ " data=" + str(self.data)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_frame(data, opcode, fin=1):
|
||||||
|
"""
|
||||||
|
create frame to send text, binary and other data.
|
||||||
|
|
||||||
|
data: data to send. This is string value(byte array).
|
||||||
|
if opcode is OPCODE_TEXT and this value is uniocde,
|
||||||
|
data value is conveted into unicode string, automatically.
|
||||||
|
|
||||||
|
opcode: operation code. please see OPCODE_XXX.
|
||||||
|
|
||||||
|
fin: fin flag. if set to 0, create continue fragmentation.
|
||||||
|
"""
|
||||||
|
if opcode == ABNF.OPCODE_TEXT and isinstance(data, six.text_type):
|
||||||
|
data = data.encode("utf-8")
|
||||||
|
# mask must be set if send data from client
|
||||||
|
return ABNF(fin, 0, 0, 0, opcode, 1, data)
|
||||||
|
|
||||||
|
def format(self):
|
||||||
|
"""
|
||||||
|
format this object to string(byte array) to send data to server.
|
||||||
|
"""
|
||||||
|
if any(x not in (0, 1) for x in [self.fin, self.rsv1, self.rsv2, self.rsv3]):
|
||||||
|
raise ValueError("not 0 or 1")
|
||||||
|
if self.opcode not in ABNF.OPCODES:
|
||||||
|
raise ValueError("Invalid OPCODE")
|
||||||
|
length = len(self.data)
|
||||||
|
if length >= ABNF.LENGTH_63:
|
||||||
|
raise ValueError("data is too long")
|
||||||
|
|
||||||
|
frame_header = chr(self.fin << 7
|
||||||
|
| self.rsv1 << 6 | self.rsv2 << 5 | self.rsv3 << 4
|
||||||
|
| self.opcode)
|
||||||
|
if length < ABNF.LENGTH_7:
|
||||||
|
frame_header += chr(self.mask << 7 | length)
|
||||||
|
frame_header = six.b(frame_header)
|
||||||
|
elif length < ABNF.LENGTH_16:
|
||||||
|
frame_header += chr(self.mask << 7 | 0x7e)
|
||||||
|
frame_header = six.b(frame_header)
|
||||||
|
frame_header += struct.pack("!H", length)
|
||||||
|
else:
|
||||||
|
frame_header += chr(self.mask << 7 | 0x7f)
|
||||||
|
frame_header = six.b(frame_header)
|
||||||
|
frame_header += struct.pack("!Q", length)
|
||||||
|
|
||||||
|
if not self.mask:
|
||||||
|
return frame_header + self.data
|
||||||
|
else:
|
||||||
|
mask_key = self.get_mask_key(4)
|
||||||
|
return frame_header + self._get_masked(mask_key)
|
||||||
|
|
||||||
|
def _get_masked(self, mask_key):
|
||||||
|
s = ABNF.mask(mask_key, self.data)
|
||||||
|
|
||||||
|
if isinstance(mask_key, six.text_type):
|
||||||
|
mask_key = mask_key.encode('utf-8')
|
||||||
|
|
||||||
|
return mask_key + s
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def mask(mask_key, data):
|
||||||
|
"""
|
||||||
|
mask or unmask data. Just do xor for each byte
|
||||||
|
|
||||||
|
mask_key: 4 byte string(byte).
|
||||||
|
|
||||||
|
data: data to mask/unmask.
|
||||||
|
"""
|
||||||
|
if data == None:
|
||||||
|
data = ""
|
||||||
|
if isinstance(mask_key, six.text_type):
|
||||||
|
mask_key = six.b(mask_key)
|
||||||
|
|
||||||
|
if isinstance(data, six.text_type):
|
||||||
|
data = six.b(data)
|
||||||
|
|
||||||
|
_m = array.array("B", mask_key)
|
||||||
|
_d = array.array("B", data)
|
||||||
|
for i in range(len(_d)):
|
||||||
|
_d[i] ^= _m[i % 4]
|
||||||
|
|
||||||
|
if six.PY3:
|
||||||
|
return _d.tobytes()
|
||||||
|
else:
|
||||||
|
return _d.tostring()
|
||||||
|
|
||||||
|
|
||||||
|
class frame_buffer(object):
|
||||||
|
_HEADER_MASK_INDEX = 5
|
||||||
|
_HEADER_LENGHT_INDEX = 6
|
||||||
|
|
||||||
|
def __init__(self, recv_fn, skip_utf8_validation):
|
||||||
|
self.recv = recv_fn
|
||||||
|
self.skip_utf8_validation = skip_utf8_validation
|
||||||
|
# Buffers over the packets from the layer beneath until desired amount
|
||||||
|
# bytes of bytes are received.
|
||||||
|
self.recv_buffer = []
|
||||||
|
self.clear()
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
self.header = None
|
||||||
|
self.length = None
|
||||||
|
self.mask = None
|
||||||
|
|
||||||
|
def has_received_header(self):
|
||||||
|
return self.header is None
|
||||||
|
|
||||||
|
def recv_header(self):
|
||||||
|
header = self.recv_strict(2)
|
||||||
|
b1 = header[0]
|
||||||
|
|
||||||
|
if six.PY2:
|
||||||
|
b1 = ord(b1)
|
||||||
|
|
||||||
|
fin = b1 >> 7 & 1
|
||||||
|
rsv1 = b1 >> 6 & 1
|
||||||
|
rsv2 = b1 >> 5 & 1
|
||||||
|
rsv3 = b1 >> 4 & 1
|
||||||
|
opcode = b1 & 0xf
|
||||||
|
b2 = header[1]
|
||||||
|
|
||||||
|
if six.PY2:
|
||||||
|
b2 = ord(b2)
|
||||||
|
|
||||||
|
has_mask = b2 >> 7 & 1
|
||||||
|
length_bits = b2 & 0x7f
|
||||||
|
|
||||||
|
self.header = (fin, rsv1, rsv2, rsv3, opcode, has_mask, length_bits)
|
||||||
|
|
||||||
|
def has_mask(self):
|
||||||
|
if not self.header:
|
||||||
|
return False
|
||||||
|
return self.header[frame_buffer._HEADER_MASK_INDEX]
|
||||||
|
|
||||||
|
|
||||||
|
def has_received_length(self):
|
||||||
|
return self.length is None
|
||||||
|
|
||||||
|
def recv_length(self):
|
||||||
|
bits = self.header[frame_buffer._HEADER_LENGHT_INDEX]
|
||||||
|
length_bits = bits & 0x7f
|
||||||
|
if length_bits == 0x7e:
|
||||||
|
v = self.recv_strict(2)
|
||||||
|
self.length = struct.unpack("!H", v)[0]
|
||||||
|
elif length_bits == 0x7f:
|
||||||
|
v = self.recv_strict(8)
|
||||||
|
self.length = struct.unpack("!Q", v)[0]
|
||||||
|
else:
|
||||||
|
self.length = length_bits
|
||||||
|
|
||||||
|
def has_received_mask(self):
|
||||||
|
return self.mask is None
|
||||||
|
|
||||||
|
def recv_mask(self):
|
||||||
|
self.mask = self.recv_strict(4) if self.has_mask() else ""
|
||||||
|
|
||||||
|
def recv_frame(self):
|
||||||
|
# Header
|
||||||
|
if self.has_received_header():
|
||||||
|
self.recv_header()
|
||||||
|
(fin, rsv1, rsv2, rsv3, opcode, has_mask, _) = self.header
|
||||||
|
|
||||||
|
# Frame length
|
||||||
|
if self.has_received_length():
|
||||||
|
self.recv_length()
|
||||||
|
length = self.length
|
||||||
|
|
||||||
|
# Mask
|
||||||
|
if self.has_received_mask():
|
||||||
|
self.recv_mask()
|
||||||
|
mask = self.mask
|
||||||
|
|
||||||
|
# Payload
|
||||||
|
payload = self.recv_strict(length)
|
||||||
|
if has_mask:
|
||||||
|
payload = ABNF.mask(mask, payload)
|
||||||
|
|
||||||
|
# Reset for next frame
|
||||||
|
self.clear()
|
||||||
|
|
||||||
|
frame = ABNF(fin, rsv1, rsv2, rsv3, opcode, has_mask, payload)
|
||||||
|
frame.validate(self.skip_utf8_validation)
|
||||||
|
|
||||||
|
return frame
|
||||||
|
|
||||||
|
def recv_strict(self, bufsize):
|
||||||
|
shortage = bufsize - sum(len(x) for x in self.recv_buffer)
|
||||||
|
while shortage > 0:
|
||||||
|
# Limit buffer size that we pass to socket.recv() to avoid
|
||||||
|
# fragmenting the heap -- the number of bytes recv() actually
|
||||||
|
# reads is limited by socket buffer and is relatively small,
|
||||||
|
# yet passing large numbers repeatedly causes lots of large
|
||||||
|
# buffers allocated and then shrunk, which results in fragmentation.
|
||||||
|
bytes = self.recv(min(16384, shortage))
|
||||||
|
self.recv_buffer.append(bytes)
|
||||||
|
shortage -= len(bytes)
|
||||||
|
|
||||||
|
unified = six.b("").join(self.recv_buffer)
|
||||||
|
|
||||||
|
if shortage == 0:
|
||||||
|
self.recv_buffer = []
|
||||||
|
return unified
|
||||||
|
else:
|
||||||
|
self.recv_buffer = [unified[bufsize:]]
|
||||||
|
return unified[:bufsize]
|
||||||
|
|
||||||
|
|
||||||
|
class continuous_frame(object):
|
||||||
|
def __init__(self, fire_cont_frame, skip_utf8_validation):
|
||||||
|
self.fire_cont_frame = fire_cont_frame
|
||||||
|
self.skip_utf8_validation = skip_utf8_validation
|
||||||
|
self.cont_data = None
|
||||||
|
self.recving_frames = None
|
||||||
|
|
||||||
|
def validate(self, frame):
|
||||||
|
if not self.recving_frames and frame.opcode == ABNF.OPCODE_CONT:
|
||||||
|
raise WebSocketProtocolException("Illegal frame")
|
||||||
|
if self.recving_frames and frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY):
|
||||||
|
raise WebSocketProtocolException("Illegal frame")
|
||||||
|
|
||||||
|
def add(self, frame):
|
||||||
|
if self.cont_data:
|
||||||
|
self.cont_data[1] += frame.data
|
||||||
|
else:
|
||||||
|
if frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY):
|
||||||
|
self.recving_frames = frame.opcode
|
||||||
|
self.cont_data = [frame.opcode, frame.data]
|
||||||
|
|
||||||
|
if frame.fin:
|
||||||
|
self.recving_frames = None
|
||||||
|
|
||||||
|
def is_fire(self, frame):
|
||||||
|
return frame.fin or self.fire_cont_frame
|
||||||
|
|
||||||
|
def extract(self, frame):
|
||||||
|
data = self.cont_data
|
||||||
|
self.cont_data = None
|
||||||
|
frame.data = data[1]
|
||||||
|
if not self.fire_cont_frame and data[0] == ABNF.OPCODE_TEXT and not self.skip_utf8_validation and not validate_utf8(frame.data):
|
||||||
|
raise WebSocketPayloadException("cannot decode: " + repr(frame.data))
|
||||||
|
|
||||||
|
return [data[0], frame]
|
236
lib/websocket/_app.py
Normal file
236
lib/websocket/_app.py
Normal file
|
@ -0,0 +1,236 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
"""
|
||||||
|
WebSocketApp provides higher level APIs.
|
||||||
|
"""
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
import traceback
|
||||||
|
import sys
|
||||||
|
import select
|
||||||
|
import six
|
||||||
|
|
||||||
|
from ._core import WebSocket, getdefaulttimeout
|
||||||
|
from ._exceptions import *
|
||||||
|
from ._logging import *
|
||||||
|
from websocket._abnf import ABNF
|
||||||
|
|
||||||
|
__all__ = ["WebSocketApp"]
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketApp(object):
|
||||||
|
"""
|
||||||
|
Higher level of APIs are provided.
|
||||||
|
The interface is like JavaScript WebSocket object.
|
||||||
|
"""
|
||||||
|
def __init__(self, url, header=[],
|
||||||
|
on_open=None, on_message=None, on_error=None,
|
||||||
|
on_close=None, on_ping=None, on_pong=None,
|
||||||
|
on_cont_message=None,
|
||||||
|
keep_running=True, get_mask_key=None, cookie=None,
|
||||||
|
subprotocols=None):
|
||||||
|
"""
|
||||||
|
url: websocket url.
|
||||||
|
header: custom header for websocket handshake.
|
||||||
|
on_open: callable object which is called at opening websocket.
|
||||||
|
this function has one argument. The arugment is this class object.
|
||||||
|
on_message: callbale object which is called when recieved data.
|
||||||
|
on_message has 2 arguments.
|
||||||
|
The 1st arugment is this class object.
|
||||||
|
The passing 2nd arugment is utf-8 string which we get from the server.
|
||||||
|
on_error: callable object which is called when we get error.
|
||||||
|
on_error has 2 arguments.
|
||||||
|
The 1st arugment is this class object.
|
||||||
|
The passing 2nd arugment is exception object.
|
||||||
|
on_close: callable object which is called when closed the connection.
|
||||||
|
this function has one argument. The arugment is this class object.
|
||||||
|
on_cont_message: callback object which is called when recieve continued
|
||||||
|
frame data.
|
||||||
|
on_message has 3 arguments.
|
||||||
|
The 1st arugment is this class object.
|
||||||
|
The passing 2nd arugment is utf-8 string which we get from the server.
|
||||||
|
The 3rd arugment is continue flag. if 0, the data continue
|
||||||
|
to next frame data
|
||||||
|
keep_running: a boolean flag indicating whether the app's main loop
|
||||||
|
should keep running, defaults to True
|
||||||
|
get_mask_key: a callable to produce new mask keys,
|
||||||
|
see the WebSocket.set_mask_key's docstring for more information
|
||||||
|
subprotocols: array of available sub protocols. default is None.
|
||||||
|
"""
|
||||||
|
self.url = url
|
||||||
|
self.header = header
|
||||||
|
self.cookie = cookie
|
||||||
|
self.on_open = on_open
|
||||||
|
self.on_message = on_message
|
||||||
|
self.on_error = on_error
|
||||||
|
self.on_close = on_close
|
||||||
|
self.on_ping = on_ping
|
||||||
|
self.on_pong = on_pong
|
||||||
|
self.on_cont_message = on_cont_message
|
||||||
|
self.keep_running = keep_running
|
||||||
|
self.get_mask_key = get_mask_key
|
||||||
|
self.sock = None
|
||||||
|
self.last_ping_tm = 0
|
||||||
|
self.subprotocols = subprotocols
|
||||||
|
|
||||||
|
def send(self, data, opcode=ABNF.OPCODE_TEXT):
|
||||||
|
"""
|
||||||
|
send message.
|
||||||
|
data: message to send. If you set opcode to OPCODE_TEXT,
|
||||||
|
data must be utf-8 string or unicode.
|
||||||
|
opcode: operation code of data. default is OPCODE_TEXT.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not self.sock or self.sock.send(data, opcode) == 0:
|
||||||
|
raise WebSocketConnectionClosedException("Connection is already closed.")
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
"""
|
||||||
|
close websocket connection.
|
||||||
|
"""
|
||||||
|
self.keep_running = False
|
||||||
|
if self.sock:
|
||||||
|
self.sock.close()
|
||||||
|
|
||||||
|
def _send_ping(self, interval, event):
|
||||||
|
while not event.wait(interval):
|
||||||
|
self.last_ping_tm = time.time()
|
||||||
|
if self.sock:
|
||||||
|
self.sock.ping()
|
||||||
|
|
||||||
|
def run_forever(self, sockopt=None, sslopt=None,
|
||||||
|
ping_interval=0, ping_timeout=None,
|
||||||
|
http_proxy_host=None, http_proxy_port=None,
|
||||||
|
http_no_proxy=None, http_proxy_auth=None,
|
||||||
|
skip_utf8_validation=False,
|
||||||
|
host=None, origin=None):
|
||||||
|
"""
|
||||||
|
run event loop for WebSocket framework.
|
||||||
|
This loop is infinite loop and is alive during websocket is available.
|
||||||
|
sockopt: values for socket.setsockopt.
|
||||||
|
sockopt must be tuple
|
||||||
|
and each element is argument of sock.setscokopt.
|
||||||
|
sslopt: ssl socket optional dict.
|
||||||
|
ping_interval: automatically send "ping" command
|
||||||
|
every specified period(second)
|
||||||
|
if set to 0, not send automatically.
|
||||||
|
ping_timeout: timeout(second) if the pong message is not recieved.
|
||||||
|
http_proxy_host: http proxy host name.
|
||||||
|
http_proxy_port: http proxy port. If not set, set to 80.
|
||||||
|
http_no_proxy: host names, which doesn't use proxy.
|
||||||
|
skip_utf8_validation: skip utf8 validation.
|
||||||
|
host: update host header.
|
||||||
|
origin: update origin header.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not ping_timeout or ping_timeout <= 0:
|
||||||
|
ping_timeout = None
|
||||||
|
if sockopt is None:
|
||||||
|
sockopt = []
|
||||||
|
if sslopt is None:
|
||||||
|
sslopt = {}
|
||||||
|
if self.sock:
|
||||||
|
raise WebSocketException("socket is already opened")
|
||||||
|
thread = None
|
||||||
|
close_frame = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.sock = WebSocket(self.get_mask_key,
|
||||||
|
sockopt=sockopt, sslopt=sslopt,
|
||||||
|
fire_cont_frame=self.on_cont_message and True or False,
|
||||||
|
skip_utf8_validation=skip_utf8_validation)
|
||||||
|
self.sock.settimeout(getdefaulttimeout())
|
||||||
|
self.sock.connect(self.url, header=self.header, cookie=self.cookie,
|
||||||
|
http_proxy_host=http_proxy_host,
|
||||||
|
http_proxy_port=http_proxy_port,
|
||||||
|
http_no_proxy=http_no_proxy, http_proxy_auth=http_proxy_auth,
|
||||||
|
subprotocols=self.subprotocols,
|
||||||
|
host=host, origin=origin)
|
||||||
|
self._callback(self.on_open)
|
||||||
|
|
||||||
|
if ping_interval:
|
||||||
|
event = threading.Event()
|
||||||
|
thread = threading.Thread(target=self._send_ping, args=(ping_interval, event))
|
||||||
|
thread.setDaemon(True)
|
||||||
|
thread.start()
|
||||||
|
|
||||||
|
while self.sock.connected:
|
||||||
|
r, w, e = select.select((self.sock.sock, ), (), (), ping_timeout)
|
||||||
|
if not self.keep_running:
|
||||||
|
break
|
||||||
|
if ping_timeout and self.last_ping_tm and time.time() - self.last_ping_tm > ping_timeout:
|
||||||
|
self.last_ping_tm = 0
|
||||||
|
raise WebSocketTimeoutException("ping timed out")
|
||||||
|
|
||||||
|
if r:
|
||||||
|
op_code, frame = self.sock.recv_data_frame(True)
|
||||||
|
if op_code == ABNF.OPCODE_CLOSE:
|
||||||
|
close_frame = frame
|
||||||
|
break
|
||||||
|
elif op_code == ABNF.OPCODE_PING:
|
||||||
|
self._callback(self.on_ping, frame.data)
|
||||||
|
elif op_code == ABNF.OPCODE_PONG:
|
||||||
|
self._callback(self.on_pong, frame.data)
|
||||||
|
elif op_code == ABNF.OPCODE_CONT and self.on_cont_message:
|
||||||
|
self._callback(self.on_cont_message, frame.data, frame.fin)
|
||||||
|
else:
|
||||||
|
data = frame.data
|
||||||
|
if six.PY3 and frame.opcode == ABNF.OPCODE_TEXT:
|
||||||
|
data = data.decode("utf-8")
|
||||||
|
self._callback(self.on_message, data)
|
||||||
|
except Exception as e:
|
||||||
|
self._callback(self.on_error, e)
|
||||||
|
finally:
|
||||||
|
if thread:
|
||||||
|
event.set()
|
||||||
|
thread.join()
|
||||||
|
self.keep_running = False
|
||||||
|
self.sock.close()
|
||||||
|
self._callback(self.on_close,
|
||||||
|
*self._get_close_args(close_frame.data if close_frame else None))
|
||||||
|
self.sock = None
|
||||||
|
|
||||||
|
def _get_close_args(self, data):
|
||||||
|
""" this functions extracts the code, reason from the close body
|
||||||
|
if they exists, and if the self.on_close except three arguments """
|
||||||
|
import inspect
|
||||||
|
# if the on_close callback is "old", just return empty list
|
||||||
|
if not self.on_close or len(inspect.getargspec(self.on_close).args) != 3:
|
||||||
|
return []
|
||||||
|
|
||||||
|
if data and len(data) >= 2:
|
||||||
|
code = 256*six.byte2int(data[0:1]) + six.byte2int(data[1:2])
|
||||||
|
reason = data[2:].decode('utf-8')
|
||||||
|
return [code, reason]
|
||||||
|
|
||||||
|
return [None, None]
|
||||||
|
|
||||||
|
def _callback(self, callback, *args):
|
||||||
|
if callback:
|
||||||
|
try:
|
||||||
|
callback(self, *args)
|
||||||
|
except Exception as e:
|
||||||
|
error(e)
|
||||||
|
if isEnabledForDebug():
|
||||||
|
_, _, tb = sys.exc_info()
|
||||||
|
traceback.print_tb(tb)
|
482
lib/websocket/_core.py
Normal file
482
lib/websocket/_core.py
Normal file
|
@ -0,0 +1,482 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
|
||||||
|
import six
|
||||||
|
import socket
|
||||||
|
|
||||||
|
if six.PY3:
|
||||||
|
from base64 import encodebytes as base64encode
|
||||||
|
else:
|
||||||
|
from base64 import encodestring as base64encode
|
||||||
|
|
||||||
|
import struct
|
||||||
|
import threading
|
||||||
|
|
||||||
|
# websocket modules
|
||||||
|
from ._exceptions import *
|
||||||
|
from ._abnf import *
|
||||||
|
from ._socket import *
|
||||||
|
from ._utils import *
|
||||||
|
from ._url import *
|
||||||
|
from ._logging import *
|
||||||
|
from ._http import *
|
||||||
|
from ._handshake import *
|
||||||
|
from ._ssl_compat import *
|
||||||
|
|
||||||
|
"""
|
||||||
|
websocket python client.
|
||||||
|
=========================
|
||||||
|
|
||||||
|
This version support only hybi-13.
|
||||||
|
Please see http://tools.ietf.org/html/rfc6455 for protocol.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def create_connection(url, timeout=None, **options):
|
||||||
|
"""
|
||||||
|
connect to url and return websocket object.
|
||||||
|
|
||||||
|
Connect to url and return the WebSocket object.
|
||||||
|
Passing optional timeout parameter will set the timeout on the socket.
|
||||||
|
If no timeout is supplied,
|
||||||
|
the global default timeout setting returned by getdefauttimeout() is used.
|
||||||
|
You can customize using 'options'.
|
||||||
|
If you set "header" list object, you can set your own custom header.
|
||||||
|
|
||||||
|
>>> conn = create_connection("ws://echo.websocket.org/",
|
||||||
|
... header=["User-Agent: MyProgram",
|
||||||
|
... "x-custom: header"])
|
||||||
|
|
||||||
|
|
||||||
|
timeout: socket timeout time. This value is integer.
|
||||||
|
if you set None for this value,
|
||||||
|
it means "use default_timeout value"
|
||||||
|
|
||||||
|
|
||||||
|
options: "header" -> custom http header list.
|
||||||
|
"cookie" -> cookie value.
|
||||||
|
"origin" -> custom origin url.
|
||||||
|
"host" -> custom host header string.
|
||||||
|
"http_proxy_host" - http proxy host name.
|
||||||
|
"http_proxy_port" - http proxy port. If not set, set to 80.
|
||||||
|
"http_no_proxy" - host names, which doesn't use proxy.
|
||||||
|
"http_proxy_auth" - http proxy auth infomation.
|
||||||
|
tuple of username and password.
|
||||||
|
default is None
|
||||||
|
"enable_multithread" -> enable lock for multithread.
|
||||||
|
"sockopt" -> socket options
|
||||||
|
"sslopt" -> ssl option
|
||||||
|
"subprotocols" - array of available sub protocols.
|
||||||
|
default is None.
|
||||||
|
"skip_utf8_validation" - skip utf8 validation.
|
||||||
|
"""
|
||||||
|
sockopt = options.get("sockopt", [])
|
||||||
|
sslopt = options.get("sslopt", {})
|
||||||
|
fire_cont_frame = options.get("fire_cont_frame", False)
|
||||||
|
enable_multithread = options.get("enable_multithread", False)
|
||||||
|
skip_utf8_validation = options.get("skip_utf8_validation", False)
|
||||||
|
websock = WebSocket(sockopt=sockopt, sslopt=sslopt,
|
||||||
|
fire_cont_frame=fire_cont_frame,
|
||||||
|
enable_multithread=enable_multithread,
|
||||||
|
skip_utf8_validation=skip_utf8_validation)
|
||||||
|
websock.settimeout(timeout if timeout is not None else getdefaulttimeout())
|
||||||
|
websock.connect(url, **options)
|
||||||
|
return websock
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocket(object):
|
||||||
|
"""
|
||||||
|
Low level WebSocket interface.
|
||||||
|
This class is based on
|
||||||
|
The WebSocket protocol draft-hixie-thewebsocketprotocol-76
|
||||||
|
http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76
|
||||||
|
|
||||||
|
We can connect to the websocket server and send/recieve data.
|
||||||
|
The following example is a echo client.
|
||||||
|
|
||||||
|
>>> import websocket
|
||||||
|
>>> ws = websocket.WebSocket()
|
||||||
|
>>> ws.connect("ws://echo.websocket.org")
|
||||||
|
>>> ws.send("Hello, Server")
|
||||||
|
>>> ws.recv()
|
||||||
|
'Hello, Server'
|
||||||
|
>>> ws.close()
|
||||||
|
|
||||||
|
get_mask_key: a callable to produce new mask keys, see the set_mask_key
|
||||||
|
function's docstring for more details
|
||||||
|
sockopt: values for socket.setsockopt.
|
||||||
|
sockopt must be tuple and each element is argument of sock.setscokopt.
|
||||||
|
sslopt: dict object for ssl socket option.
|
||||||
|
fire_cont_frame: fire recv event for each cont frame. default is False
|
||||||
|
enable_multithread: if set to True, lock send method.
|
||||||
|
skip_utf8_validation: skip utf8 validation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, get_mask_key=None, sockopt=None, sslopt=None,
|
||||||
|
fire_cont_frame=False, enable_multithread=False,
|
||||||
|
skip_utf8_validation=False):
|
||||||
|
"""
|
||||||
|
Initalize WebSocket object.
|
||||||
|
"""
|
||||||
|
self.sock_opt = sock_opt(sockopt, sslopt)
|
||||||
|
self.handshake_response = None
|
||||||
|
self.sock = None
|
||||||
|
|
||||||
|
self.connected = False
|
||||||
|
self.get_mask_key = get_mask_key
|
||||||
|
# These buffer over the build-up of a single frame.
|
||||||
|
self.frame_buffer = frame_buffer(self._recv, skip_utf8_validation)
|
||||||
|
self.cont_frame = continuous_frame(fire_cont_frame, skip_utf8_validation)
|
||||||
|
|
||||||
|
if enable_multithread:
|
||||||
|
self.lock = threading.Lock()
|
||||||
|
else:
|
||||||
|
self.lock = NoLock()
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
"""
|
||||||
|
Allow iteration over websocket, implying sequential `recv` executions.
|
||||||
|
"""
|
||||||
|
while True:
|
||||||
|
yield self.recv()
|
||||||
|
|
||||||
|
def __next__(self):
|
||||||
|
return self.recv()
|
||||||
|
|
||||||
|
def next(self):
|
||||||
|
return self.__next__()
|
||||||
|
|
||||||
|
def fileno(self):
|
||||||
|
return self.sock.fileno()
|
||||||
|
|
||||||
|
def set_mask_key(self, func):
|
||||||
|
"""
|
||||||
|
set function to create musk key. You can custumize mask key generator.
|
||||||
|
Mainly, this is for testing purpose.
|
||||||
|
|
||||||
|
func: callable object. the fuct must 1 argument as integer.
|
||||||
|
The argument means length of mask key.
|
||||||
|
This func must be return string(byte array),
|
||||||
|
which length is argument specified.
|
||||||
|
"""
|
||||||
|
self.get_mask_key = func
|
||||||
|
|
||||||
|
def gettimeout(self):
|
||||||
|
"""
|
||||||
|
Get the websocket timeout(second).
|
||||||
|
"""
|
||||||
|
return self.sock_opt.timeout
|
||||||
|
|
||||||
|
def settimeout(self, timeout):
|
||||||
|
"""
|
||||||
|
Set the timeout to the websocket.
|
||||||
|
|
||||||
|
timeout: timeout time(second).
|
||||||
|
"""
|
||||||
|
self.sock_opt.timeout = timeout
|
||||||
|
if self.sock:
|
||||||
|
self.sock.settimeout(timeout)
|
||||||
|
|
||||||
|
timeout = property(gettimeout, settimeout)
|
||||||
|
|
||||||
|
def getsubprotocol(self):
|
||||||
|
"""
|
||||||
|
get subprotocol
|
||||||
|
"""
|
||||||
|
if self.handshake_response:
|
||||||
|
return self.handshake_response.subprotocol
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
subprotocol = property(getsubprotocol)
|
||||||
|
|
||||||
|
def getstatus(self):
|
||||||
|
"""
|
||||||
|
get handshake status
|
||||||
|
"""
|
||||||
|
if self.handshake_response:
|
||||||
|
return self.handshake_response.status
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
status = property(getstatus)
|
||||||
|
|
||||||
|
def getheaders(self):
|
||||||
|
"""
|
||||||
|
get handshake response header
|
||||||
|
"""
|
||||||
|
if self.handshake_response:
|
||||||
|
return self.handshake_response.headers
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
headers = property(getheaders)
|
||||||
|
|
||||||
|
def connect(self, url, **options):
|
||||||
|
"""
|
||||||
|
Connect to url. url is websocket url scheme.
|
||||||
|
ie. ws://host:port/resource
|
||||||
|
You can customize using 'options'.
|
||||||
|
If you set "header" list object, you can set your own custom header.
|
||||||
|
|
||||||
|
>>> ws = WebSocket()
|
||||||
|
>>> ws.connect("ws://echo.websocket.org/",
|
||||||
|
... header=["User-Agent: MyProgram",
|
||||||
|
... "x-custom: header"])
|
||||||
|
|
||||||
|
timeout: socket timeout time. This value is integer.
|
||||||
|
if you set None for this value,
|
||||||
|
it means "use default_timeout value"
|
||||||
|
|
||||||
|
options: "header" -> custom http header list.
|
||||||
|
"cookie" -> cookie value.
|
||||||
|
"origin" -> custom origin url.
|
||||||
|
"host" -> custom host header string.
|
||||||
|
"http_proxy_host" - http proxy host name.
|
||||||
|
"http_proxy_port" - http proxy port. If not set, set to 80.
|
||||||
|
"http_no_proxy" - host names, which doesn't use proxy.
|
||||||
|
"http_proxy_auth" - http proxy auth infomation.
|
||||||
|
tuple of username and password.
|
||||||
|
defualt is None
|
||||||
|
"subprotocols" - array of available sub protocols.
|
||||||
|
default is None.
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.sock, addrs = connect(url, self.sock_opt, proxy_info(**options))
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.handshake_response = handshake(self.sock, *addrs, **options)
|
||||||
|
self.connected = True
|
||||||
|
except:
|
||||||
|
if self.sock:
|
||||||
|
self.sock.close()
|
||||||
|
self.sock = None
|
||||||
|
raise
|
||||||
|
|
||||||
|
def send(self, payload, opcode=ABNF.OPCODE_TEXT):
|
||||||
|
"""
|
||||||
|
Send the data as string.
|
||||||
|
|
||||||
|
payload: Payload must be utf-8 string or unicode,
|
||||||
|
if the opcode is OPCODE_TEXT.
|
||||||
|
Otherwise, it must be string(byte array)
|
||||||
|
|
||||||
|
opcode: operation code to send. Please see OPCODE_XXX.
|
||||||
|
"""
|
||||||
|
|
||||||
|
frame = ABNF.create_frame(payload, opcode)
|
||||||
|
return self.send_frame(frame)
|
||||||
|
|
||||||
|
def send_frame(self, frame):
|
||||||
|
"""
|
||||||
|
Send the data frame.
|
||||||
|
|
||||||
|
frame: frame data created by ABNF.create_frame
|
||||||
|
|
||||||
|
>>> ws = create_connection("ws://echo.websocket.org/")
|
||||||
|
>>> frame = ABNF.create_frame("Hello", ABNF.OPCODE_TEXT)
|
||||||
|
>>> ws.send_frame(frame)
|
||||||
|
>>> cont_frame = ABNF.create_frame("My name is ", ABNF.OPCODE_CONT, 0)
|
||||||
|
>>> ws.send_frame(frame)
|
||||||
|
>>> cont_frame = ABNF.create_frame("Foo Bar", ABNF.OPCODE_CONT, 1)
|
||||||
|
>>> ws.send_frame(frame)
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self.get_mask_key:
|
||||||
|
frame.get_mask_key = self.get_mask_key
|
||||||
|
data = frame.format()
|
||||||
|
length = len(data)
|
||||||
|
trace("send: " + repr(data))
|
||||||
|
|
||||||
|
with self.lock:
|
||||||
|
while data:
|
||||||
|
l = self._send(data)
|
||||||
|
data = data[l:]
|
||||||
|
|
||||||
|
return length
|
||||||
|
|
||||||
|
def send_binary(self, payload):
|
||||||
|
return self.send(payload, ABNF.OPCODE_BINARY)
|
||||||
|
|
||||||
|
def ping(self, payload=""):
|
||||||
|
"""
|
||||||
|
send ping data.
|
||||||
|
|
||||||
|
payload: data payload to send server.
|
||||||
|
"""
|
||||||
|
if isinstance(payload, six.text_type):
|
||||||
|
payload = payload.encode("utf-8")
|
||||||
|
self.send(payload, ABNF.OPCODE_PING)
|
||||||
|
|
||||||
|
def pong(self, payload):
|
||||||
|
"""
|
||||||
|
send pong data.
|
||||||
|
|
||||||
|
payload: data payload to send server.
|
||||||
|
"""
|
||||||
|
if isinstance(payload, six.text_type):
|
||||||
|
payload = payload.encode("utf-8")
|
||||||
|
self.send(payload, ABNF.OPCODE_PONG)
|
||||||
|
|
||||||
|
def recv(self):
|
||||||
|
"""
|
||||||
|
Receive string data(byte array) from the server.
|
||||||
|
|
||||||
|
return value: string(byte array) value.
|
||||||
|
"""
|
||||||
|
opcode, data = self.recv_data()
|
||||||
|
if six.PY3 and opcode == ABNF.OPCODE_TEXT:
|
||||||
|
return data.decode("utf-8")
|
||||||
|
elif opcode == ABNF.OPCODE_TEXT or opcode == ABNF.OPCODE_BINARY:
|
||||||
|
return data
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def recv_data(self, control_frame=False):
|
||||||
|
"""
|
||||||
|
Recieve data with operation code.
|
||||||
|
|
||||||
|
control_frame: a boolean flag indicating whether to return control frame
|
||||||
|
data, defaults to False
|
||||||
|
|
||||||
|
return value: tuple of operation code and string(byte array) value.
|
||||||
|
"""
|
||||||
|
opcode, frame = self.recv_data_frame(control_frame)
|
||||||
|
return opcode, frame.data
|
||||||
|
|
||||||
|
def recv_data_frame(self, control_frame=False):
|
||||||
|
"""
|
||||||
|
Recieve data with operation code.
|
||||||
|
|
||||||
|
control_frame: a boolean flag indicating whether to return control frame
|
||||||
|
data, defaults to False
|
||||||
|
|
||||||
|
return value: tuple of operation code and string(byte array) value.
|
||||||
|
"""
|
||||||
|
while True:
|
||||||
|
frame = self.recv_frame()
|
||||||
|
if not frame:
|
||||||
|
# handle error:
|
||||||
|
# 'NoneType' object has no attribute 'opcode'
|
||||||
|
raise WebSocketProtocolException("Not a valid frame %s" % frame)
|
||||||
|
elif frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY, ABNF.OPCODE_CONT):
|
||||||
|
self.cont_frame.validate(frame)
|
||||||
|
self.cont_frame.add(frame)
|
||||||
|
|
||||||
|
if self.cont_frame.is_fire(frame):
|
||||||
|
return self.cont_frame.extract(frame)
|
||||||
|
|
||||||
|
elif frame.opcode == ABNF.OPCODE_CLOSE:
|
||||||
|
self.send_close()
|
||||||
|
return (frame.opcode, frame)
|
||||||
|
elif frame.opcode == ABNF.OPCODE_PING:
|
||||||
|
if len(frame.data) < 126:
|
||||||
|
self.pong(frame.data)
|
||||||
|
else:
|
||||||
|
raise WebSocketProtocolException("Ping message is too long")
|
||||||
|
if control_frame:
|
||||||
|
return (frame.opcode, frame)
|
||||||
|
elif frame.opcode == ABNF.OPCODE_PONG:
|
||||||
|
if control_frame:
|
||||||
|
return (frame.opcode, frame)
|
||||||
|
|
||||||
|
def recv_frame(self):
|
||||||
|
"""
|
||||||
|
recieve data as frame from server.
|
||||||
|
|
||||||
|
return value: ABNF frame object.
|
||||||
|
"""
|
||||||
|
return self.frame_buffer.recv_frame()
|
||||||
|
|
||||||
|
def send_close(self, status=STATUS_NORMAL, reason=six.b("")):
|
||||||
|
"""
|
||||||
|
send close data to the server.
|
||||||
|
|
||||||
|
status: status code to send. see STATUS_XXX.
|
||||||
|
|
||||||
|
reason: the reason to close. This must be string or bytes.
|
||||||
|
"""
|
||||||
|
if status < 0 or status >= ABNF.LENGTH_16:
|
||||||
|
raise ValueError("code is invalid range")
|
||||||
|
self.connected = False
|
||||||
|
self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE)
|
||||||
|
|
||||||
|
def close(self, status=STATUS_NORMAL, reason=six.b("")):
|
||||||
|
"""
|
||||||
|
Close Websocket object
|
||||||
|
|
||||||
|
status: status code to send. see STATUS_XXX.
|
||||||
|
|
||||||
|
reason: the reason to close. This must be string.
|
||||||
|
"""
|
||||||
|
if self.connected:
|
||||||
|
if status < 0 or status >= ABNF.LENGTH_16:
|
||||||
|
raise ValueError("code is invalid range")
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.connected = False
|
||||||
|
self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE)
|
||||||
|
timeout = self.sock.gettimeout()
|
||||||
|
self.sock.settimeout(3)
|
||||||
|
try:
|
||||||
|
frame = self.recv_frame()
|
||||||
|
if isEnabledForError():
|
||||||
|
recv_status = struct.unpack("!H", frame.data)[0]
|
||||||
|
if recv_status != STATUS_NORMAL:
|
||||||
|
error("close status: " + repr(recv_status))
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
self.sock.settimeout(timeout)
|
||||||
|
self.sock.shutdown(socket.SHUT_RDWR)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.shutdown()
|
||||||
|
|
||||||
|
def abort(self):
|
||||||
|
"""
|
||||||
|
Low-level asynchonous abort, wakes up other threads that are waiting in recv_*
|
||||||
|
"""
|
||||||
|
if self.connected:
|
||||||
|
self.sock.shutdown(socket.SHUT_RDWR)
|
||||||
|
|
||||||
|
def shutdown(self):
|
||||||
|
"close socket, immediately."
|
||||||
|
if self.sock:
|
||||||
|
self.sock.close()
|
||||||
|
self.sock = None
|
||||||
|
self.connected = False
|
||||||
|
|
||||||
|
def _send(self, data):
|
||||||
|
return send(self.sock, data)
|
||||||
|
|
||||||
|
def _recv(self, bufsize):
|
||||||
|
try:
|
||||||
|
return recv(self.sock, bufsize)
|
||||||
|
except WebSocketConnectionClosedException:
|
||||||
|
if self.sock:
|
||||||
|
self.sock.close()
|
||||||
|
self.sock = None
|
||||||
|
self.connected = False
|
||||||
|
raise
|
65
lib/websocket/_exceptions.py
Normal file
65
lib/websocket/_exceptions.py
Normal file
|
@ -0,0 +1,65 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
define websocket exceptions
|
||||||
|
"""
|
||||||
|
|
||||||
|
class WebSocketException(Exception):
|
||||||
|
"""
|
||||||
|
websocket exeception class.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
class WebSocketProtocolException(WebSocketException):
|
||||||
|
"""
|
||||||
|
If the webscoket protocol is invalid, this exception will be raised.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
class WebSocketPayloadException(WebSocketException):
|
||||||
|
"""
|
||||||
|
If the webscoket payload is invalid, this exception will be raised.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
class WebSocketConnectionClosedException(WebSocketException):
|
||||||
|
"""
|
||||||
|
If remote host closed the connection or some network error happened,
|
||||||
|
this exception will be raised.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
class WebSocketTimeoutException(WebSocketException):
|
||||||
|
"""
|
||||||
|
WebSocketTimeoutException will be raised at socket timeout during read/write data.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
class WebSocketProxyException(WebSocketException):
|
||||||
|
"""
|
||||||
|
WebSocketProxyException will be raised when proxy error occured.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
155
lib/websocket/_handshake.py
Normal file
155
lib/websocket/_handshake.py
Normal file
|
@ -0,0 +1,155 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import six
|
||||||
|
if six.PY3:
|
||||||
|
from base64 import encodebytes as base64encode
|
||||||
|
else:
|
||||||
|
from base64 import encodestring as base64encode
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
from ._logging import *
|
||||||
|
from ._url import *
|
||||||
|
from ._socket import*
|
||||||
|
from ._http import *
|
||||||
|
from ._exceptions import *
|
||||||
|
|
||||||
|
__all__ = ["handshake_response", "handshake"]
|
||||||
|
|
||||||
|
# websocket supported version.
|
||||||
|
VERSION = 13
|
||||||
|
|
||||||
|
|
||||||
|
class handshake_response(object):
|
||||||
|
def __init__(self, status, headers, subprotocol):
|
||||||
|
self.status = status
|
||||||
|
self.headers = headers
|
||||||
|
self.subprotocol = subprotocol
|
||||||
|
|
||||||
|
|
||||||
|
def handshake(sock, hostname, port, resource, **options):
|
||||||
|
headers, key = _get_handshake_headers(resource, hostname, port, options)
|
||||||
|
|
||||||
|
header_str = "\r\n".join(headers)
|
||||||
|
send(sock, header_str)
|
||||||
|
dump("request header", header_str)
|
||||||
|
|
||||||
|
status, resp = _get_resp_headers(sock)
|
||||||
|
success, subproto = _validate(resp, key, options.get("subprotocols"))
|
||||||
|
if not success:
|
||||||
|
raise WebSocketException("Invalid WebSocket Header")
|
||||||
|
|
||||||
|
return handshake_response(status, resp, subproto)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_handshake_headers(resource, host, port, options):
|
||||||
|
headers = []
|
||||||
|
headers.append("GET %s HTTP/1.1" % resource)
|
||||||
|
headers.append("Upgrade: websocket")
|
||||||
|
headers.append("Connection: Upgrade")
|
||||||
|
if port == 80:
|
||||||
|
hostport = host
|
||||||
|
else:
|
||||||
|
hostport = "%s:%d" % (host, port)
|
||||||
|
|
||||||
|
if "host" in options and options["host"]:
|
||||||
|
headers.append("Host: %s" % options["host"])
|
||||||
|
else:
|
||||||
|
headers.append("Host: %s" % hostport)
|
||||||
|
|
||||||
|
if "origin" in options and options["origin"]:
|
||||||
|
headers.append("Origin: %s" % options["origin"])
|
||||||
|
else:
|
||||||
|
headers.append("Origin: http://%s" % hostport)
|
||||||
|
|
||||||
|
key = _create_sec_websocket_key()
|
||||||
|
headers.append("Sec-WebSocket-Key: %s" % key)
|
||||||
|
headers.append("Sec-WebSocket-Version: %s" % VERSION)
|
||||||
|
|
||||||
|
subprotocols = options.get("subprotocols")
|
||||||
|
if subprotocols:
|
||||||
|
headers.append("Sec-WebSocket-Protocol: %s" % ",".join(subprotocols))
|
||||||
|
|
||||||
|
if "header" in options:
|
||||||
|
headers.extend(options["header"])
|
||||||
|
|
||||||
|
cookie = options.get("cookie", None)
|
||||||
|
|
||||||
|
if cookie:
|
||||||
|
headers.append("Cookie: %s" % cookie)
|
||||||
|
|
||||||
|
headers.append("")
|
||||||
|
headers.append("")
|
||||||
|
|
||||||
|
return headers, key
|
||||||
|
|
||||||
|
|
||||||
|
def _get_resp_headers(sock, success_status=101):
|
||||||
|
status, resp_headers = read_headers(sock)
|
||||||
|
if status != success_status:
|
||||||
|
raise WebSocketException("Handshake status %d" % status)
|
||||||
|
return status, resp_headers
|
||||||
|
|
||||||
|
_HEADERS_TO_CHECK = {
|
||||||
|
"upgrade": "websocket",
|
||||||
|
"connection": "upgrade",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _validate(headers, key, subprotocols):
|
||||||
|
subproto = None
|
||||||
|
for k, v in _HEADERS_TO_CHECK.items():
|
||||||
|
r = headers.get(k, None)
|
||||||
|
if not r:
|
||||||
|
return False, None
|
||||||
|
r = r.lower()
|
||||||
|
if v != r:
|
||||||
|
return False, None
|
||||||
|
|
||||||
|
if subprotocols:
|
||||||
|
subproto = headers.get("sec-websocket-protocol", None).lower()
|
||||||
|
if not subproto or subproto not in [s.lower() for s in subprotocols]:
|
||||||
|
error("Invalid subprotocol: " + str(subprotocols))
|
||||||
|
return False, None
|
||||||
|
|
||||||
|
result = headers.get("sec-websocket-accept", None)
|
||||||
|
if not result:
|
||||||
|
return False, None
|
||||||
|
result = result.lower()
|
||||||
|
|
||||||
|
if isinstance(result, six.text_type):
|
||||||
|
result = result.encode('utf-8')
|
||||||
|
|
||||||
|
value = (key + "258EAFA5-E914-47DA-95CA-C5AB0DC85B11").encode('utf-8')
|
||||||
|
hashed = base64encode(hashlib.sha1(value).digest()).strip().lower()
|
||||||
|
success = (hashed == result)
|
||||||
|
if success:
|
||||||
|
return True, subproto
|
||||||
|
else:
|
||||||
|
return False, None
|
||||||
|
|
||||||
|
|
||||||
|
def _create_sec_websocket_key():
|
||||||
|
uid = uuid.uuid4()
|
||||||
|
return base64encode(uid.bytes).decode('utf-8').strip()
|
215
lib/websocket/_http.py
Normal file
215
lib/websocket/_http.py
Normal file
|
@ -0,0 +1,215 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import six
|
||||||
|
import socket
|
||||||
|
import errno
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
if six.PY3:
|
||||||
|
from base64 import encodebytes as base64encode
|
||||||
|
else:
|
||||||
|
from base64 import encodestring as base64encode
|
||||||
|
|
||||||
|
from ._logging import *
|
||||||
|
from ._url import *
|
||||||
|
from ._socket import*
|
||||||
|
from ._exceptions import *
|
||||||
|
from ._ssl_compat import *
|
||||||
|
|
||||||
|
__all__ = ["proxy_info", "connect", "read_headers"]
|
||||||
|
|
||||||
|
class proxy_info(object):
|
||||||
|
def __init__(self, **options):
|
||||||
|
self.host = options.get("http_proxy_host", None)
|
||||||
|
if self.host:
|
||||||
|
self.port = options.get("http_proxy_port", 0)
|
||||||
|
self.auth = options.get("http_proxy_auth", None)
|
||||||
|
self.no_proxy = options.get("http_no_proxy", None)
|
||||||
|
else:
|
||||||
|
self.port = 0
|
||||||
|
self.auth = None
|
||||||
|
self.no_proxy = None
|
||||||
|
|
||||||
|
def connect(url, options, proxy):
|
||||||
|
hostname, port, resource, is_secure = parse_url(url)
|
||||||
|
addrinfo_list, need_tunnel, auth = _get_addrinfo_list(hostname, port, is_secure, proxy)
|
||||||
|
if not addrinfo_list:
|
||||||
|
raise WebSocketException(
|
||||||
|
"Host not found.: " + hostname + ":" + str(port))
|
||||||
|
|
||||||
|
sock = None
|
||||||
|
try:
|
||||||
|
sock = _open_socket(addrinfo_list, options.sockopt, options.timeout)
|
||||||
|
if need_tunnel:
|
||||||
|
sock = _tunnel(sock, hostname, port, auth)
|
||||||
|
|
||||||
|
if is_secure:
|
||||||
|
if HAVE_SSL:
|
||||||
|
sock = _ssl_socket(sock, options.sslopt, hostname)
|
||||||
|
else:
|
||||||
|
raise WebSocketException("SSL not available.")
|
||||||
|
|
||||||
|
return sock, (hostname, port, resource)
|
||||||
|
except:
|
||||||
|
if sock:
|
||||||
|
sock.close()
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def _get_addrinfo_list(hostname, port, is_secure, proxy):
|
||||||
|
phost, pport, pauth = get_proxy_info(hostname, is_secure,
|
||||||
|
proxy.host, proxy.port, proxy.auth, proxy.no_proxy)
|
||||||
|
if not phost:
|
||||||
|
addrinfo_list = socket.getaddrinfo(hostname, port, 0, 0, socket.SOL_TCP)
|
||||||
|
return addrinfo_list, False, None
|
||||||
|
else:
|
||||||
|
pport = pport and pport or 80
|
||||||
|
addrinfo_list = socket.getaddrinfo(phost, pport, 0, 0, socket.SOL_TCP)
|
||||||
|
return addrinfo_list, True, pauth
|
||||||
|
|
||||||
|
|
||||||
|
def _open_socket(addrinfo_list, sockopt, timeout):
|
||||||
|
err = None
|
||||||
|
for addrinfo in addrinfo_list:
|
||||||
|
family = addrinfo[0]
|
||||||
|
sock = socket.socket(family)
|
||||||
|
sock.settimeout(timeout)
|
||||||
|
for opts in DEFAULT_SOCKET_OPTION:
|
||||||
|
sock.setsockopt(*opts)
|
||||||
|
for opts in sockopt:
|
||||||
|
sock.setsockopt(*opts)
|
||||||
|
|
||||||
|
address = addrinfo[4]
|
||||||
|
try:
|
||||||
|
sock.connect(address)
|
||||||
|
except socket.error as error:
|
||||||
|
error.remote_ip = str(address[0])
|
||||||
|
if error.errno in (errno.ECONNREFUSED, ):
|
||||||
|
err = error
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
raise err
|
||||||
|
|
||||||
|
return sock
|
||||||
|
|
||||||
|
|
||||||
|
def _can_use_sni():
|
||||||
|
return (six.PY2 and sys.version_info[1] >= 7 and sys.version_info[2] >= 9) or (six.PY3 and sys.version_info[2] >= 2)
|
||||||
|
|
||||||
|
|
||||||
|
def _wrap_sni_socket(sock, sslopt, hostname, check_hostname):
|
||||||
|
context = ssl.SSLContext(sslopt.get('ssl_version', ssl.PROTOCOL_SSLv23))
|
||||||
|
|
||||||
|
context.load_verify_locations(cafile=sslopt.get('ca_certs', None))
|
||||||
|
# see https://github.com/liris/websocket-client/commit/b96a2e8fa765753e82eea531adb19716b52ca3ca#commitcomment-10803153
|
||||||
|
context.verify_mode = sslopt['cert_reqs']
|
||||||
|
if HAVE_CONTEXT_CHECK_HOSTNAME:
|
||||||
|
context.check_hostname = check_hostname
|
||||||
|
if 'ciphers' in sslopt:
|
||||||
|
context.set_ciphers(sslopt['ciphers'])
|
||||||
|
|
||||||
|
return context.wrap_socket(
|
||||||
|
sock,
|
||||||
|
do_handshake_on_connect=sslopt.get('do_handshake_on_connect', True),
|
||||||
|
suppress_ragged_eofs=sslopt.get('suppress_ragged_eofs', True),
|
||||||
|
server_hostname=hostname,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _ssl_socket(sock, user_sslopt, hostname):
|
||||||
|
sslopt = dict(cert_reqs=ssl.CERT_REQUIRED)
|
||||||
|
certPath = os.path.join(
|
||||||
|
os.path.dirname(__file__), "cacert.pem")
|
||||||
|
if os.path.isfile(certPath):
|
||||||
|
sslopt['ca_certs'] = certPath
|
||||||
|
sslopt.update(user_sslopt)
|
||||||
|
check_hostname = sslopt["cert_reqs"] != ssl.CERT_NONE and sslopt.pop('check_hostname', True)
|
||||||
|
|
||||||
|
if _can_use_sni():
|
||||||
|
sock = _wrap_sni_socket(sock, sslopt, hostname, check_hostname)
|
||||||
|
else:
|
||||||
|
sslopt.pop('check_hostname', True)
|
||||||
|
sock = ssl.wrap_socket(sock, **sslopt)
|
||||||
|
|
||||||
|
if not HAVE_CONTEXT_CHECK_HOSTNAME and check_hostname:
|
||||||
|
match_hostname(sock.getpeercert(), hostname)
|
||||||
|
|
||||||
|
return sock
|
||||||
|
|
||||||
|
def _tunnel(sock, host, port, auth):
|
||||||
|
debug("Connecting proxy...")
|
||||||
|
connect_header = "CONNECT %s:%d HTTP/1.0\r\n" % (host, port)
|
||||||
|
# TODO: support digest auth.
|
||||||
|
if auth and auth[0]:
|
||||||
|
auth_str = auth[0]
|
||||||
|
if auth[1]:
|
||||||
|
auth_str += ":" + auth[1]
|
||||||
|
encoded_str = base64encode(auth_str.encode()).strip().decode()
|
||||||
|
connect_header += "Proxy-Authorization: Basic %s\r\n" % encoded_str
|
||||||
|
connect_header += "\r\n"
|
||||||
|
dump("request header", connect_header)
|
||||||
|
|
||||||
|
send(sock, connect_header)
|
||||||
|
|
||||||
|
try:
|
||||||
|
status, resp_headers = read_headers(sock)
|
||||||
|
except Exception as e:
|
||||||
|
raise WebSocketProxyException(str(e))
|
||||||
|
|
||||||
|
if status != 200:
|
||||||
|
raise WebSocketProxyException(
|
||||||
|
"failed CONNECT via proxy status: %r" + status)
|
||||||
|
|
||||||
|
return sock
|
||||||
|
|
||||||
|
def read_headers(sock):
|
||||||
|
status = None
|
||||||
|
headers = {}
|
||||||
|
trace("--- response header ---")
|
||||||
|
|
||||||
|
while True:
|
||||||
|
line = recv_line(sock)
|
||||||
|
line = line.decode('utf-8').strip()
|
||||||
|
if not line:
|
||||||
|
break
|
||||||
|
trace(line)
|
||||||
|
if not status:
|
||||||
|
|
||||||
|
status_info = line.split(" ", 2)
|
||||||
|
status = int(status_info[1])
|
||||||
|
else:
|
||||||
|
kv = line.split(":", 1)
|
||||||
|
if len(kv) == 2:
|
||||||
|
key, value = kv
|
||||||
|
headers[key.lower()] = value.strip().lower()
|
||||||
|
else:
|
||||||
|
raise WebSocketException("Invalid header")
|
||||||
|
|
||||||
|
trace("-----------------------")
|
||||||
|
|
||||||
|
return status, headers
|
71
lib/websocket/_logging.py
Normal file
71
lib/websocket/_logging.py
Normal file
|
@ -0,0 +1,71 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
_logger = logging.getLogger()
|
||||||
|
_traceEnabled = False
|
||||||
|
|
||||||
|
__all__ = ["enableTrace", "dump", "error", "debug", "trace",
|
||||||
|
"isEnabledForError", "isEnabledForDebug"]
|
||||||
|
|
||||||
|
|
||||||
|
def enableTrace(tracable):
|
||||||
|
"""
|
||||||
|
turn on/off the tracability.
|
||||||
|
|
||||||
|
tracable: boolean value. if set True, tracability is enabled.
|
||||||
|
"""
|
||||||
|
global _traceEnabled
|
||||||
|
_traceEnabled = tracable
|
||||||
|
if tracable:
|
||||||
|
if not _logger.handlers:
|
||||||
|
_logger.addHandler(logging.StreamHandler())
|
||||||
|
_logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
|
||||||
|
def dump(title, message):
|
||||||
|
if _traceEnabled:
|
||||||
|
_logger.debug("--- " + title + " ---")
|
||||||
|
_logger.debug(message)
|
||||||
|
_logger.debug("-----------------------")
|
||||||
|
|
||||||
|
|
||||||
|
def error(msg):
|
||||||
|
_logger.error(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def debug(msg):
|
||||||
|
_logger.debug(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def trace(msg):
|
||||||
|
if _traceEnabled:
|
||||||
|
_logger.debug(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def isEnabledForError():
|
||||||
|
return _logger.isEnabledFor(logging.ERROR)
|
||||||
|
|
||||||
|
|
||||||
|
def isEnabledForDebug():
|
||||||
|
return _logger.isEnabledFor(logging.DEBUG)
|
121
lib/websocket/_socket.py
Normal file
121
lib/websocket/_socket.py
Normal file
|
@ -0,0 +1,121 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import socket
|
||||||
|
import six
|
||||||
|
|
||||||
|
from ._exceptions import *
|
||||||
|
from ._utils import *
|
||||||
|
from ._ssl_compat import *
|
||||||
|
|
||||||
|
DEFAULT_SOCKET_OPTION = [(socket.SOL_TCP, socket.TCP_NODELAY, 1)]
|
||||||
|
if hasattr(socket, "SO_KEEPALIVE"):
|
||||||
|
DEFAULT_SOCKET_OPTION.append((socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1))
|
||||||
|
if hasattr(socket, "TCP_KEEPIDLE"):
|
||||||
|
DEFAULT_SOCKET_OPTION.append((socket.SOL_TCP, socket.TCP_KEEPIDLE, 30))
|
||||||
|
if hasattr(socket, "TCP_KEEPINTVL"):
|
||||||
|
DEFAULT_SOCKET_OPTION.append((socket.SOL_TCP, socket.TCP_KEEPINTVL, 10))
|
||||||
|
if hasattr(socket, "TCP_KEEPCNT"):
|
||||||
|
DEFAULT_SOCKET_OPTION.append((socket.SOL_TCP, socket.TCP_KEEPCNT, 3))
|
||||||
|
|
||||||
|
_default_timeout = None
|
||||||
|
|
||||||
|
__all__ = ["DEFAULT_SOCKET_OPTION", "sock_opt", "setdefaulttimeout", "getdefaulttimeout",
|
||||||
|
"recv", "recv_line", "send"]
|
||||||
|
|
||||||
|
class sock_opt(object):
|
||||||
|
def __init__(self, sockopt, sslopt):
|
||||||
|
if sockopt is None:
|
||||||
|
sockopt = []
|
||||||
|
if sslopt is None:
|
||||||
|
sslopt = {}
|
||||||
|
self.sockopt = sockopt
|
||||||
|
self.sslopt = sslopt
|
||||||
|
self.timeout = None
|
||||||
|
|
||||||
|
def setdefaulttimeout(timeout):
|
||||||
|
"""
|
||||||
|
Set the global timeout setting to connect.
|
||||||
|
|
||||||
|
timeout: default socket timeout time. This value is second.
|
||||||
|
"""
|
||||||
|
global _default_timeout
|
||||||
|
_default_timeout = timeout
|
||||||
|
|
||||||
|
|
||||||
|
def getdefaulttimeout():
|
||||||
|
"""
|
||||||
|
Return the global timeout setting(second) to connect.
|
||||||
|
"""
|
||||||
|
return _default_timeout
|
||||||
|
|
||||||
|
|
||||||
|
def recv(sock, bufsize):
|
||||||
|
if not sock:
|
||||||
|
raise WebSocketConnectionClosedException("socket is already closed.")
|
||||||
|
|
||||||
|
try:
|
||||||
|
bytes = sock.recv(bufsize)
|
||||||
|
except socket.timeout as e:
|
||||||
|
message = extract_err_message(e)
|
||||||
|
raise WebSocketTimeoutException(message)
|
||||||
|
except SSLError as e:
|
||||||
|
message = extract_err_message(e)
|
||||||
|
if message == "The read operation timed out":
|
||||||
|
raise WebSocketTimeoutException(message)
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
if not bytes:
|
||||||
|
raise WebSocketConnectionClosedException("Connection is already closed.")
|
||||||
|
|
||||||
|
return bytes
|
||||||
|
|
||||||
|
|
||||||
|
def recv_line(sock):
|
||||||
|
line = []
|
||||||
|
while True:
|
||||||
|
c = recv(sock, 1)
|
||||||
|
line.append(c)
|
||||||
|
if c == six.b("\n"):
|
||||||
|
break
|
||||||
|
return six.b("").join(line)
|
||||||
|
|
||||||
|
|
||||||
|
def send(sock, data):
|
||||||
|
if isinstance(data, six.text_type):
|
||||||
|
data = data.encode('utf-8')
|
||||||
|
|
||||||
|
if not sock:
|
||||||
|
raise WebSocketConnectionClosedException("socket is already closed.")
|
||||||
|
|
||||||
|
try:
|
||||||
|
return sock.send(data)
|
||||||
|
except socket.timeout as e:
|
||||||
|
message = extract_err_message(e)
|
||||||
|
raise WebSocketTimeoutException(message)
|
||||||
|
except Exception as e:
|
||||||
|
message = extract_err_message(e)
|
||||||
|
if message and "timed out" in message:
|
||||||
|
raise WebSocketTimeoutException(message)
|
||||||
|
else:
|
||||||
|
raise
|
45
lib/websocket/_ssl_compat.py
Normal file
45
lib/websocket/_ssl_compat.py
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
__all__ = ["HAVE_SSL", "ssl", "SSLError"]
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ssl
|
||||||
|
from ssl import SSLError
|
||||||
|
if hasattr(ssl, 'SSLContext') and hasattr(ssl.SSLContext, 'check_hostname'):
|
||||||
|
HAVE_CONTEXT_CHECK_HOSTNAME = True
|
||||||
|
else:
|
||||||
|
HAVE_CONTEXT_CHECK_HOSTNAME = False
|
||||||
|
if hasattr(ssl, "match_hostname"):
|
||||||
|
from ssl import match_hostname
|
||||||
|
else:
|
||||||
|
from backports.ssl_match_hostname import match_hostname
|
||||||
|
__all__.append("match_hostname")
|
||||||
|
__all__.append("HAVE_CONTEXT_CHECK_HOSTNAME")
|
||||||
|
|
||||||
|
HAVE_SSL = True
|
||||||
|
except ImportError:
|
||||||
|
# dummy class of SSLError for ssl none-support environment.
|
||||||
|
class SSLError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
HAVE_SSL = False
|
126
lib/websocket/_url.py
Normal file
126
lib/websocket/_url.py
Normal file
|
@ -0,0 +1,126 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from six.moves.urllib.parse import urlparse
|
||||||
|
import os
|
||||||
|
|
||||||
|
__all__ = ["parse_url", "get_proxy_info"]
|
||||||
|
|
||||||
|
|
||||||
|
def parse_url(url):
|
||||||
|
"""
|
||||||
|
parse url and the result is tuple of
|
||||||
|
(hostname, port, resource path and the flag of secure mode)
|
||||||
|
|
||||||
|
url: url string.
|
||||||
|
"""
|
||||||
|
if ":" not in url:
|
||||||
|
raise ValueError("url is invalid")
|
||||||
|
|
||||||
|
scheme, url = url.split(":", 1)
|
||||||
|
|
||||||
|
parsed = urlparse(url, scheme="ws")
|
||||||
|
if parsed.hostname:
|
||||||
|
hostname = parsed.hostname
|
||||||
|
else:
|
||||||
|
raise ValueError("hostname is invalid")
|
||||||
|
port = 0
|
||||||
|
if parsed.port:
|
||||||
|
port = parsed.port
|
||||||
|
|
||||||
|
is_secure = False
|
||||||
|
if scheme == "ws":
|
||||||
|
if not port:
|
||||||
|
port = 80
|
||||||
|
elif scheme == "wss":
|
||||||
|
is_secure = True
|
||||||
|
if not port:
|
||||||
|
port = 443
|
||||||
|
else:
|
||||||
|
raise ValueError("scheme %s is invalid" % scheme)
|
||||||
|
|
||||||
|
if parsed.path:
|
||||||
|
resource = parsed.path
|
||||||
|
else:
|
||||||
|
resource = "/"
|
||||||
|
|
||||||
|
if parsed.query:
|
||||||
|
resource += "?" + parsed.query
|
||||||
|
|
||||||
|
return (hostname, port, resource, is_secure)
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_NO_PROXY_HOST = ["localhost", "127.0.0.1"]
|
||||||
|
|
||||||
|
|
||||||
|
def _is_no_proxy_host(hostname, no_proxy):
|
||||||
|
if not no_proxy:
|
||||||
|
v = os.environ.get("no_proxy", "").replace(" ", "")
|
||||||
|
no_proxy = v.split(",")
|
||||||
|
if not no_proxy:
|
||||||
|
no_proxy = DEFAULT_NO_PROXY_HOST
|
||||||
|
|
||||||
|
return hostname in no_proxy
|
||||||
|
|
||||||
|
|
||||||
|
def get_proxy_info(hostname, is_secure,
|
||||||
|
proxy_host=None, proxy_port=0, proxy_auth=None, no_proxy=None):
|
||||||
|
"""
|
||||||
|
try to retrieve proxy host and port from environment
|
||||||
|
if not provided in options.
|
||||||
|
result is (proxy_host, proxy_port, proxy_auth).
|
||||||
|
proxy_auth is tuple of username and password
|
||||||
|
of proxy authentication information.
|
||||||
|
|
||||||
|
hostname: websocket server name.
|
||||||
|
|
||||||
|
is_secure: is the connection secure? (wss)
|
||||||
|
looks for "https_proxy" in env
|
||||||
|
before falling back to "http_proxy"
|
||||||
|
|
||||||
|
options: "http_proxy_host" - http proxy host name.
|
||||||
|
"http_proxy_port" - http proxy port.
|
||||||
|
"http_no_proxy" - host names, which doesn't use proxy.
|
||||||
|
"http_proxy_auth" - http proxy auth infomation.
|
||||||
|
tuple of username and password.
|
||||||
|
defualt is None
|
||||||
|
"""
|
||||||
|
if _is_no_proxy_host(hostname, no_proxy):
|
||||||
|
return None, 0, None
|
||||||
|
|
||||||
|
if proxy_host:
|
||||||
|
port = proxy_port
|
||||||
|
auth = proxy_auth
|
||||||
|
return proxy_host, port, auth
|
||||||
|
|
||||||
|
env_keys = ["http_proxy"]
|
||||||
|
if is_secure:
|
||||||
|
env_keys.insert(0, "https_proxy")
|
||||||
|
|
||||||
|
for key in env_keys:
|
||||||
|
value = os.environ.get(key, None)
|
||||||
|
if value:
|
||||||
|
proxy = urlparse(value)
|
||||||
|
auth = (proxy.username, proxy.password) if proxy.username else None
|
||||||
|
return proxy.hostname, proxy.port, auth
|
||||||
|
|
||||||
|
return None, 0, None
|
88
lib/websocket/_utils.py
Normal file
88
lib/websocket/_utils.py
Normal file
|
@ -0,0 +1,88 @@
|
||||||
|
"""
|
||||||
|
websocket - WebSocket client library for Python
|
||||||
|
|
||||||
|
Copyright (C) 2010 Hiroki Ohtani(liris)
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
||||||
|
Boston, MA 02110-1335 USA
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
__all__ = ["NoLock", "validate_utf8", "extract_err_message"]
|
||||||
|
|
||||||
|
class NoLock(object):
|
||||||
|
def __enter__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __exit__(self,type, value, traceback):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# UTF-8 validator
|
||||||
|
# python implementation of http://bjoern.hoehrmann.de/utf-8/decoder/dfa/
|
||||||
|
|
||||||
|
UTF8_ACCEPT = 0
|
||||||
|
UTF8_REJECT=12
|
||||||
|
|
||||||
|
_UTF8D = [
|
||||||
|
# The first part of the table maps bytes to character classes that
|
||||||
|
# to reduce the size of the transition table and create bitmasks.
|
||||||
|
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
|
||||||
|
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
|
||||||
|
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
|
||||||
|
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
|
||||||
|
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,
|
||||||
|
7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,
|
||||||
|
8,8,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
|
||||||
|
10,3,3,3,3,3,3,3,3,3,3,3,3,4,3,3, 11,6,6,6,5,8,8,8,8,8,8,8,8,8,8,8,
|
||||||
|
|
||||||
|
# The second part is a transition table that maps a combination
|
||||||
|
# of a state of the automaton and a character class to a state.
|
||||||
|
0,12,24,36,60,96,84,12,12,12,48,72, 12,12,12,12,12,12,12,12,12,12,12,12,
|
||||||
|
12, 0,12,12,12,12,12, 0,12, 0,12,12, 12,24,12,12,12,12,12,24,12,24,12,12,
|
||||||
|
12,12,12,12,12,12,12,24,12,12,12,12, 12,24,12,12,12,12,12,12,12,24,12,12,
|
||||||
|
12,12,12,12,12,12,12,36,12,36,12,12, 12,36,12,12,12,12,12,36,12,36,12,12,
|
||||||
|
12,36,12,12,12,12,12,12,12,12,12,12, ]
|
||||||
|
|
||||||
|
def _decode(state, codep, ch):
|
||||||
|
tp = _UTF8D[ch]
|
||||||
|
|
||||||
|
codep = (ch & 0x3f ) | (codep << 6) if (state != UTF8_ACCEPT) else (0xff >> tp) & (ch)
|
||||||
|
state = _UTF8D[256 + state + tp]
|
||||||
|
|
||||||
|
return state, codep;
|
||||||
|
|
||||||
|
def validate_utf8(utfbytes):
|
||||||
|
"""
|
||||||
|
validate utf8 byte string.
|
||||||
|
utfbytes: utf byte string to check.
|
||||||
|
return value: if valid utf8 string, return true. Otherwise, return false.
|
||||||
|
"""
|
||||||
|
state = UTF8_ACCEPT
|
||||||
|
codep = 0
|
||||||
|
for i in utfbytes:
|
||||||
|
if six.PY2:
|
||||||
|
i = ord(i)
|
||||||
|
state, codep = _decode(state, codep, i)
|
||||||
|
if state == UTF8_REJECT:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def extract_err_message(exception):
|
||||||
|
return getattr(exception, 'strerror', str(exception))
|
4966
lib/websocket/cacert.pem
Normal file
4966
lib/websocket/cacert.pem
Normal file
File diff suppressed because it is too large
Load diff
0
lib/websocket/tests/__init__.py
Normal file
0
lib/websocket/tests/__init__.py
Normal file
6
lib/websocket/tests/data/header01.txt
Normal file
6
lib/websocket/tests/data/header01.txt
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
HTTP/1.1 101 WebSocket Protocol Handshake
|
||||||
|
Connection: Upgrade
|
||||||
|
Upgrade: WebSocket
|
||||||
|
Sec-WebSocket-Accept: Kxep+hNu9n51529fGidYu7a3wO0=
|
||||||
|
some_header: something
|
||||||
|
|
6
lib/websocket/tests/data/header02.txt
Normal file
6
lib/websocket/tests/data/header02.txt
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
HTTP/1.1 101 WebSocket Protocol Handshake
|
||||||
|
Connection: Upgrade
|
||||||
|
Upgrade WebSocket
|
||||||
|
Sec-WebSocket-Accept: Kxep+hNu9n51529fGidYu7a3wO0=
|
||||||
|
some_header: something
|
||||||
|
|
660
lib/websocket/tests/test_websocket.py
Normal file
660
lib/websocket/tests/test_websocket.py
Normal file
|
@ -0,0 +1,660 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
|
||||||
|
import six
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import base64
|
||||||
|
import socket
|
||||||
|
try:
|
||||||
|
from ssl import SSLError
|
||||||
|
except ImportError:
|
||||||
|
# dummy class of SSLError for ssl none-support environment.
|
||||||
|
class SSLError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
if sys.version_info[0] == 2 and sys.version_info[1] < 7:
|
||||||
|
import unittest2 as unittest
|
||||||
|
else:
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
if six.PY3:
|
||||||
|
from base64 import decodebytes as base64decode
|
||||||
|
else:
|
||||||
|
from base64 import decodestring as base64decode
|
||||||
|
|
||||||
|
|
||||||
|
# websocket-client
|
||||||
|
import websocket as ws
|
||||||
|
from websocket._handshake import _create_sec_websocket_key
|
||||||
|
from websocket._url import parse_url, get_proxy_info
|
||||||
|
from websocket._utils import validate_utf8
|
||||||
|
from websocket._handshake import _validate as _validate_header
|
||||||
|
from websocket._http import read_headers
|
||||||
|
|
||||||
|
|
||||||
|
# Skip test to access the internet.
|
||||||
|
TEST_WITH_INTERNET = os.environ.get('TEST_WITH_INTERNET', '0') == '1'
|
||||||
|
|
||||||
|
# Skip Secure WebSocket test.
|
||||||
|
TEST_SECURE_WS = True
|
||||||
|
TRACABLE = False
|
||||||
|
|
||||||
|
|
||||||
|
def create_mask_key(n):
|
||||||
|
return "abcd"
|
||||||
|
|
||||||
|
|
||||||
|
class SockMock(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.data = []
|
||||||
|
self.sent = []
|
||||||
|
|
||||||
|
def add_packet(self, data):
|
||||||
|
self.data.append(data)
|
||||||
|
|
||||||
|
def recv(self, bufsize):
|
||||||
|
if self.data:
|
||||||
|
e = self.data.pop(0)
|
||||||
|
if isinstance(e, Exception):
|
||||||
|
raise e
|
||||||
|
if len(e) > bufsize:
|
||||||
|
self.data.insert(0, e[bufsize:])
|
||||||
|
return e[:bufsize]
|
||||||
|
|
||||||
|
def send(self, data):
|
||||||
|
self.sent.append(data)
|
||||||
|
return len(data)
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class HeaderSockMock(SockMock):
|
||||||
|
|
||||||
|
def __init__(self, fname):
|
||||||
|
SockMock.__init__(self)
|
||||||
|
path = os.path.join(os.path.dirname(__file__), fname)
|
||||||
|
with open(path, "rb") as f:
|
||||||
|
self.add_packet(f.read())
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketTest(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
ws.enableTrace(TRACABLE)
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def testDefaultTimeout(self):
|
||||||
|
self.assertEqual(ws.getdefaulttimeout(), None)
|
||||||
|
ws.setdefaulttimeout(10)
|
||||||
|
self.assertEqual(ws.getdefaulttimeout(), 10)
|
||||||
|
ws.setdefaulttimeout(None)
|
||||||
|
|
||||||
|
def testParseUrl(self):
|
||||||
|
p = parse_url("ws://www.example.com/r")
|
||||||
|
self.assertEqual(p[0], "www.example.com")
|
||||||
|
self.assertEqual(p[1], 80)
|
||||||
|
self.assertEqual(p[2], "/r")
|
||||||
|
self.assertEqual(p[3], False)
|
||||||
|
|
||||||
|
p = parse_url("ws://www.example.com/r/")
|
||||||
|
self.assertEqual(p[0], "www.example.com")
|
||||||
|
self.assertEqual(p[1], 80)
|
||||||
|
self.assertEqual(p[2], "/r/")
|
||||||
|
self.assertEqual(p[3], False)
|
||||||
|
|
||||||
|
p = parse_url("ws://www.example.com/")
|
||||||
|
self.assertEqual(p[0], "www.example.com")
|
||||||
|
self.assertEqual(p[1], 80)
|
||||||
|
self.assertEqual(p[2], "/")
|
||||||
|
self.assertEqual(p[3], False)
|
||||||
|
|
||||||
|
p = parse_url("ws://www.example.com")
|
||||||
|
self.assertEqual(p[0], "www.example.com")
|
||||||
|
self.assertEqual(p[1], 80)
|
||||||
|
self.assertEqual(p[2], "/")
|
||||||
|
self.assertEqual(p[3], False)
|
||||||
|
|
||||||
|
p = parse_url("ws://www.example.com:8080/r")
|
||||||
|
self.assertEqual(p[0], "www.example.com")
|
||||||
|
self.assertEqual(p[1], 8080)
|
||||||
|
self.assertEqual(p[2], "/r")
|
||||||
|
self.assertEqual(p[3], False)
|
||||||
|
|
||||||
|
p = parse_url("ws://www.example.com:8080/")
|
||||||
|
self.assertEqual(p[0], "www.example.com")
|
||||||
|
self.assertEqual(p[1], 8080)
|
||||||
|
self.assertEqual(p[2], "/")
|
||||||
|
self.assertEqual(p[3], False)
|
||||||
|
|
||||||
|
p = parse_url("ws://www.example.com:8080")
|
||||||
|
self.assertEqual(p[0], "www.example.com")
|
||||||
|
self.assertEqual(p[1], 8080)
|
||||||
|
self.assertEqual(p[2], "/")
|
||||||
|
self.assertEqual(p[3], False)
|
||||||
|
|
||||||
|
p = parse_url("wss://www.example.com:8080/r")
|
||||||
|
self.assertEqual(p[0], "www.example.com")
|
||||||
|
self.assertEqual(p[1], 8080)
|
||||||
|
self.assertEqual(p[2], "/r")
|
||||||
|
self.assertEqual(p[3], True)
|
||||||
|
|
||||||
|
p = parse_url("wss://www.example.com:8080/r?key=value")
|
||||||
|
self.assertEqual(p[0], "www.example.com")
|
||||||
|
self.assertEqual(p[1], 8080)
|
||||||
|
self.assertEqual(p[2], "/r?key=value")
|
||||||
|
self.assertEqual(p[3], True)
|
||||||
|
|
||||||
|
self.assertRaises(ValueError, parse_url, "http://www.example.com/r")
|
||||||
|
|
||||||
|
if sys.version_info[0] == 2 and sys.version_info[1] < 7:
|
||||||
|
return
|
||||||
|
|
||||||
|
p = parse_url("ws://[2a03:4000:123:83::3]/r")
|
||||||
|
self.assertEqual(p[0], "2a03:4000:123:83::3")
|
||||||
|
self.assertEqual(p[1], 80)
|
||||||
|
self.assertEqual(p[2], "/r")
|
||||||
|
self.assertEqual(p[3], False)
|
||||||
|
|
||||||
|
p = parse_url("ws://[2a03:4000:123:83::3]:8080/r")
|
||||||
|
self.assertEqual(p[0], "2a03:4000:123:83::3")
|
||||||
|
self.assertEqual(p[1], 8080)
|
||||||
|
self.assertEqual(p[2], "/r")
|
||||||
|
self.assertEqual(p[3], False)
|
||||||
|
|
||||||
|
p = parse_url("wss://[2a03:4000:123:83::3]/r")
|
||||||
|
self.assertEqual(p[0], "2a03:4000:123:83::3")
|
||||||
|
self.assertEqual(p[1], 443)
|
||||||
|
self.assertEqual(p[2], "/r")
|
||||||
|
self.assertEqual(p[3], True)
|
||||||
|
|
||||||
|
p = parse_url("wss://[2a03:4000:123:83::3]:8080/r")
|
||||||
|
self.assertEqual(p[0], "2a03:4000:123:83::3")
|
||||||
|
self.assertEqual(p[1], 8080)
|
||||||
|
self.assertEqual(p[2], "/r")
|
||||||
|
self.assertEqual(p[3], True)
|
||||||
|
|
||||||
|
def testWSKey(self):
|
||||||
|
key = _create_sec_websocket_key()
|
||||||
|
self.assertTrue(key != 24)
|
||||||
|
self.assertTrue(six.u("¥n") not in key)
|
||||||
|
|
||||||
|
def testWsUtils(self):
|
||||||
|
key = "c6b8hTg4EeGb2gQMztV1/g=="
|
||||||
|
required_header = {
|
||||||
|
"upgrade": "websocket",
|
||||||
|
"connection": "upgrade",
|
||||||
|
"sec-websocket-accept": "Kxep+hNu9n51529fGidYu7a3wO0=",
|
||||||
|
}
|
||||||
|
self.assertEqual(_validate_header(required_header, key, None), (True, None))
|
||||||
|
|
||||||
|
header = required_header.copy()
|
||||||
|
header["upgrade"] = "http"
|
||||||
|
self.assertEqual(_validate_header(header, key, None), (False, None))
|
||||||
|
del header["upgrade"]
|
||||||
|
self.assertEqual(_validate_header(header, key, None), (False, None))
|
||||||
|
|
||||||
|
header = required_header.copy()
|
||||||
|
header["connection"] = "something"
|
||||||
|
self.assertEqual(_validate_header(header, key, None), (False, None))
|
||||||
|
del header["connection"]
|
||||||
|
self.assertEqual(_validate_header(header, key, None), (False, None))
|
||||||
|
|
||||||
|
header = required_header.copy()
|
||||||
|
header["sec-websocket-accept"] = "something"
|
||||||
|
self.assertEqual(_validate_header(header, key, None), (False, None))
|
||||||
|
del header["sec-websocket-accept"]
|
||||||
|
self.assertEqual(_validate_header(header, key, None), (False, None))
|
||||||
|
|
||||||
|
header = required_header.copy()
|
||||||
|
header["sec-websocket-protocol"] = "sub1"
|
||||||
|
self.assertEqual(_validate_header(header, key, ["sub1", "sub2"]), (True, "sub1"))
|
||||||
|
self.assertEqual(_validate_header(header, key, ["sub2", "sub3"]), (False, None))
|
||||||
|
|
||||||
|
header = required_header.copy()
|
||||||
|
header["sec-websocket-protocol"] = "sUb1"
|
||||||
|
self.assertEqual(_validate_header(header, key, ["Sub1", "suB2"]), (True, "sub1"))
|
||||||
|
|
||||||
|
|
||||||
|
def testReadHeader(self):
|
||||||
|
status, header = read_headers(HeaderSockMock("data/header01.txt"))
|
||||||
|
self.assertEqual(status, 101)
|
||||||
|
self.assertEqual(header["connection"], "upgrade")
|
||||||
|
|
||||||
|
HeaderSockMock("data/header02.txt")
|
||||||
|
self.assertRaises(ws.WebSocketException, read_headers, HeaderSockMock("data/header02.txt"))
|
||||||
|
|
||||||
|
def testSend(self):
|
||||||
|
# TODO: add longer frame data
|
||||||
|
sock = ws.WebSocket()
|
||||||
|
sock.set_mask_key(create_mask_key)
|
||||||
|
s = sock.sock = HeaderSockMock("data/header01.txt")
|
||||||
|
sock.send("Hello")
|
||||||
|
self.assertEqual(s.sent[0], six.b("\x81\x85abcd)\x07\x0f\x08\x0e"))
|
||||||
|
|
||||||
|
sock.send("こんにちは")
|
||||||
|
self.assertEqual(s.sent[1], six.b("\x81\x8fabcd\x82\xe3\xf0\x87\xe3\xf1\x80\xe5\xca\x81\xe2\xc5\x82\xe3\xcc"))
|
||||||
|
|
||||||
|
sock.send(u"こんにちは")
|
||||||
|
self.assertEqual(s.sent[1], six.b("\x81\x8fabcd\x82\xe3\xf0\x87\xe3\xf1\x80\xe5\xca\x81\xe2\xc5\x82\xe3\xcc"))
|
||||||
|
|
||||||
|
sock.send("x" * 127)
|
||||||
|
|
||||||
|
def testRecv(self):
|
||||||
|
# TODO: add longer frame data
|
||||||
|
sock = ws.WebSocket()
|
||||||
|
s = sock.sock = SockMock()
|
||||||
|
something = six.b("\x81\x8fabcd\x82\xe3\xf0\x87\xe3\xf1\x80\xe5\xca\x81\xe2\xc5\x82\xe3\xcc")
|
||||||
|
s.add_packet(something)
|
||||||
|
data = sock.recv()
|
||||||
|
self.assertEqual(data, "こんにちは")
|
||||||
|
|
||||||
|
s.add_packet(six.b("\x81\x85abcd)\x07\x0f\x08\x0e"))
|
||||||
|
data = sock.recv()
|
||||||
|
self.assertEqual(data, "Hello")
|
||||||
|
|
||||||
|
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
|
||||||
|
def testIter(self):
|
||||||
|
count = 2
|
||||||
|
for rsvp in ws.create_connection('ws://stream.meetup.com/2/rsvps'):
|
||||||
|
count -= 1
|
||||||
|
if count == 0:
|
||||||
|
break
|
||||||
|
|
||||||
|
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
|
||||||
|
def testNext(self):
|
||||||
|
sock = ws.create_connection('ws://stream.meetup.com/2/rsvps')
|
||||||
|
self.assertEqual(str, type(next(sock)))
|
||||||
|
|
||||||
|
def testInternalRecvStrict(self):
|
||||||
|
sock = ws.WebSocket()
|
||||||
|
s = sock.sock = SockMock()
|
||||||
|
s.add_packet(six.b("foo"))
|
||||||
|
s.add_packet(socket.timeout())
|
||||||
|
s.add_packet(six.b("bar"))
|
||||||
|
# s.add_packet(SSLError("The read operation timed out"))
|
||||||
|
s.add_packet(six.b("baz"))
|
||||||
|
with self.assertRaises(ws.WebSocketTimeoutException):
|
||||||
|
data = sock.frame_buffer.recv_strict(9)
|
||||||
|
# if six.PY2:
|
||||||
|
# with self.assertRaises(ws.WebSocketTimeoutException):
|
||||||
|
# data = sock._recv_strict(9)
|
||||||
|
# else:
|
||||||
|
# with self.assertRaises(SSLError):
|
||||||
|
# data = sock._recv_strict(9)
|
||||||
|
data = sock.frame_buffer.recv_strict(9)
|
||||||
|
self.assertEqual(data, six.b("foobarbaz"))
|
||||||
|
with self.assertRaises(ws.WebSocketConnectionClosedException):
|
||||||
|
data = sock.frame_buffer.recv_strict(1)
|
||||||
|
|
||||||
|
def testRecvTimeout(self):
|
||||||
|
sock = ws.WebSocket()
|
||||||
|
s = sock.sock = SockMock()
|
||||||
|
s.add_packet(six.b("\x81"))
|
||||||
|
s.add_packet(socket.timeout())
|
||||||
|
s.add_packet(six.b("\x8dabcd\x29\x07\x0f\x08\x0e"))
|
||||||
|
s.add_packet(socket.timeout())
|
||||||
|
s.add_packet(six.b("\x4e\x43\x33\x0e\x10\x0f\x00\x40"))
|
||||||
|
with self.assertRaises(ws.WebSocketTimeoutException):
|
||||||
|
data = sock.recv()
|
||||||
|
with self.assertRaises(ws.WebSocketTimeoutException):
|
||||||
|
data = sock.recv()
|
||||||
|
data = sock.recv()
|
||||||
|
self.assertEqual(data, "Hello, World!")
|
||||||
|
with self.assertRaises(ws.WebSocketConnectionClosedException):
|
||||||
|
data = sock.recv()
|
||||||
|
|
||||||
|
def testRecvWithSimpleFragmentation(self):
|
||||||
|
sock = ws.WebSocket()
|
||||||
|
s = sock.sock = SockMock()
|
||||||
|
# OPCODE=TEXT, FIN=0, MSG="Brevity is "
|
||||||
|
s.add_packet(six.b("\x01\x8babcd#\x10\x06\x12\x08\x16\x1aD\x08\x11C"))
|
||||||
|
# OPCODE=CONT, FIN=1, MSG="the soul of wit"
|
||||||
|
s.add_packet(six.b("\x80\x8fabcd\x15\n\x06D\x12\r\x16\x08A\r\x05D\x16\x0b\x17"))
|
||||||
|
data = sock.recv()
|
||||||
|
self.assertEqual(data, "Brevity is the soul of wit")
|
||||||
|
with self.assertRaises(ws.WebSocketConnectionClosedException):
|
||||||
|
sock.recv()
|
||||||
|
|
||||||
|
def testRecvWithFireEventOfFragmentation(self):
|
||||||
|
sock = ws.WebSocket(fire_cont_frame=True)
|
||||||
|
s = sock.sock = SockMock()
|
||||||
|
# OPCODE=TEXT, FIN=0, MSG="Brevity is "
|
||||||
|
s.add_packet(six.b("\x01\x8babcd#\x10\x06\x12\x08\x16\x1aD\x08\x11C"))
|
||||||
|
# OPCODE=CONT, FIN=0, MSG="Brevity is "
|
||||||
|
s.add_packet(six.b("\x00\x8babcd#\x10\x06\x12\x08\x16\x1aD\x08\x11C"))
|
||||||
|
# OPCODE=CONT, FIN=1, MSG="the soul of wit"
|
||||||
|
s.add_packet(six.b("\x80\x8fabcd\x15\n\x06D\x12\r\x16\x08A\r\x05D\x16\x0b\x17"))
|
||||||
|
|
||||||
|
_, data = sock.recv_data()
|
||||||
|
self.assertEqual(data, six.b("Brevity is "))
|
||||||
|
_, data = sock.recv_data()
|
||||||
|
self.assertEqual(data, six.b("Brevity is "))
|
||||||
|
_, data = sock.recv_data()
|
||||||
|
self.assertEqual(data, six.b("the soul of wit"))
|
||||||
|
|
||||||
|
# OPCODE=CONT, FIN=0, MSG="Brevity is "
|
||||||
|
s.add_packet(six.b("\x80\x8babcd#\x10\x06\x12\x08\x16\x1aD\x08\x11C"))
|
||||||
|
|
||||||
|
with self.assertRaises(ws.WebSocketException):
|
||||||
|
sock.recv_data()
|
||||||
|
|
||||||
|
with self.assertRaises(ws.WebSocketConnectionClosedException):
|
||||||
|
sock.recv()
|
||||||
|
|
||||||
|
def testClose(self):
|
||||||
|
sock = ws.WebSocket()
|
||||||
|
sock.sock = SockMock()
|
||||||
|
sock.connected = True
|
||||||
|
sock.close()
|
||||||
|
self.assertEqual(sock.connected, False)
|
||||||
|
|
||||||
|
sock = ws.WebSocket()
|
||||||
|
s = sock.sock = SockMock()
|
||||||
|
sock.connected = True
|
||||||
|
s.add_packet(six.b('\x88\x80\x17\x98p\x84'))
|
||||||
|
sock.recv()
|
||||||
|
self.assertEqual(sock.connected, False)
|
||||||
|
|
||||||
|
def testRecvContFragmentation(self):
|
||||||
|
sock = ws.WebSocket()
|
||||||
|
s = sock.sock = SockMock()
|
||||||
|
# OPCODE=CONT, FIN=1, MSG="the soul of wit"
|
||||||
|
s.add_packet(six.b("\x80\x8fabcd\x15\n\x06D\x12\r\x16\x08A\r\x05D\x16\x0b\x17"))
|
||||||
|
self.assertRaises(ws.WebSocketException, sock.recv)
|
||||||
|
|
||||||
|
def testRecvWithProlongedFragmentation(self):
|
||||||
|
sock = ws.WebSocket()
|
||||||
|
s = sock.sock = SockMock()
|
||||||
|
# OPCODE=TEXT, FIN=0, MSG="Once more unto the breach, "
|
||||||
|
s.add_packet(six.b("\x01\x9babcd.\x0c\x00\x01A\x0f\x0c\x16\x04B\x16\n\x15" \
|
||||||
|
"\rC\x10\t\x07C\x06\x13\x07\x02\x07\tNC"))
|
||||||
|
# OPCODE=CONT, FIN=0, MSG="dear friends, "
|
||||||
|
s.add_packet(six.b("\x00\x8eabcd\x05\x07\x02\x16A\x04\x11\r\x04\x0c\x07" \
|
||||||
|
"\x17MB"))
|
||||||
|
# OPCODE=CONT, FIN=1, MSG="once more"
|
||||||
|
s.add_packet(six.b("\x80\x89abcd\x0e\x0c\x00\x01A\x0f\x0c\x16\x04"))
|
||||||
|
data = sock.recv()
|
||||||
|
self.assertEqual(
|
||||||
|
data,
|
||||||
|
"Once more unto the breach, dear friends, once more")
|
||||||
|
with self.assertRaises(ws.WebSocketConnectionClosedException):
|
||||||
|
sock.recv()
|
||||||
|
|
||||||
|
def testRecvWithFragmentationAndControlFrame(self):
|
||||||
|
sock = ws.WebSocket()
|
||||||
|
sock.set_mask_key(create_mask_key)
|
||||||
|
s = sock.sock = SockMock()
|
||||||
|
# OPCODE=TEXT, FIN=0, MSG="Too much "
|
||||||
|
s.add_packet(six.b("\x01\x89abcd5\r\x0cD\x0c\x17\x00\x0cA"))
|
||||||
|
# OPCODE=PING, FIN=1, MSG="Please PONG this"
|
||||||
|
s.add_packet(six.b("\x89\x90abcd1\x0e\x06\x05\x12\x07C4.,$D\x15\n\n\x17"))
|
||||||
|
# OPCODE=CONT, FIN=1, MSG="of a good thing"
|
||||||
|
s.add_packet(six.b("\x80\x8fabcd\x0e\x04C\x05A\x05\x0c\x0b\x05B\x17\x0c" \
|
||||||
|
"\x08\x0c\x04"))
|
||||||
|
data = sock.recv()
|
||||||
|
self.assertEqual(data, "Too much of a good thing")
|
||||||
|
with self.assertRaises(ws.WebSocketConnectionClosedException):
|
||||||
|
sock.recv()
|
||||||
|
self.assertEqual(
|
||||||
|
s.sent[0],
|
||||||
|
six.b("\x8a\x90abcd1\x0e\x06\x05\x12\x07C4.,$D\x15\n\n\x17"))
|
||||||
|
|
||||||
|
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
|
||||||
|
def testWebSocket(self):
|
||||||
|
s = ws.create_connection("ws://echo.websocket.org/")
|
||||||
|
self.assertNotEqual(s, None)
|
||||||
|
s.send("Hello, World")
|
||||||
|
result = s.recv()
|
||||||
|
self.assertEqual(result, "Hello, World")
|
||||||
|
|
||||||
|
s.send(u"こにゃにゃちは、世界")
|
||||||
|
result = s.recv()
|
||||||
|
self.assertEqual(result, "こにゃにゃちは、世界")
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
|
||||||
|
def testPingPong(self):
|
||||||
|
s = ws.create_connection("ws://echo.websocket.org/")
|
||||||
|
self.assertNotEqual(s, None)
|
||||||
|
s.ping("Hello")
|
||||||
|
s.pong("Hi")
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
|
||||||
|
@unittest.skipUnless(TEST_SECURE_WS, "wss://echo.websocket.org doesn't work well.")
|
||||||
|
def testSecureWebSocket(self):
|
||||||
|
if 1:
|
||||||
|
import ssl
|
||||||
|
s = ws.create_connection("wss://echo.websocket.org/")
|
||||||
|
self.assertNotEqual(s, None)
|
||||||
|
self.assertTrue(isinstance(s.sock, ssl.SSLSocket))
|
||||||
|
s.send("Hello, World")
|
||||||
|
result = s.recv()
|
||||||
|
self.assertEqual(result, "Hello, World")
|
||||||
|
s.send(u"こにゃにゃちは、世界")
|
||||||
|
result = s.recv()
|
||||||
|
self.assertEqual(result, "こにゃにゃちは、世界")
|
||||||
|
s.close()
|
||||||
|
#except:
|
||||||
|
# pass
|
||||||
|
|
||||||
|
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
|
||||||
|
def testWebSocketWihtCustomHeader(self):
|
||||||
|
s = ws.create_connection("ws://echo.websocket.org/",
|
||||||
|
headers={"User-Agent": "PythonWebsocketClient"})
|
||||||
|
self.assertNotEqual(s, None)
|
||||||
|
s.send("Hello, World")
|
||||||
|
result = s.recv()
|
||||||
|
self.assertEqual(result, "Hello, World")
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
|
||||||
|
def testAfterClose(self):
|
||||||
|
s = ws.create_connection("ws://echo.websocket.org/")
|
||||||
|
self.assertNotEqual(s, None)
|
||||||
|
s.close()
|
||||||
|
self.assertRaises(ws.WebSocketConnectionClosedException, s.send, "Hello")
|
||||||
|
self.assertRaises(ws.WebSocketConnectionClosedException, s.recv)
|
||||||
|
|
||||||
|
def testUUID4(self):
|
||||||
|
""" WebSocket key should be a UUID4.
|
||||||
|
"""
|
||||||
|
key = _create_sec_websocket_key()
|
||||||
|
u = uuid.UUID(bytes=base64decode(key.encode("utf-8")))
|
||||||
|
self.assertEqual(4, u.version)
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketAppTest(unittest.TestCase):
|
||||||
|
|
||||||
|
class NotSetYet(object):
|
||||||
|
""" A marker class for signalling that a value hasn't been set yet.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
ws.enableTrace(TRACABLE)
|
||||||
|
|
||||||
|
WebSocketAppTest.keep_running_open = WebSocketAppTest.NotSetYet()
|
||||||
|
WebSocketAppTest.keep_running_close = WebSocketAppTest.NotSetYet()
|
||||||
|
WebSocketAppTest.get_mask_key_id = WebSocketAppTest.NotSetYet()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
WebSocketAppTest.keep_running_open = WebSocketAppTest.NotSetYet()
|
||||||
|
WebSocketAppTest.keep_running_close = WebSocketAppTest.NotSetYet()
|
||||||
|
WebSocketAppTest.get_mask_key_id = WebSocketAppTest.NotSetYet()
|
||||||
|
|
||||||
|
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
|
||||||
|
def testKeepRunning(self):
|
||||||
|
""" A WebSocketApp should keep running as long as its self.keep_running
|
||||||
|
is not False (in the boolean context).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def on_open(self, *args, **kwargs):
|
||||||
|
""" Set the keep_running flag for later inspection and immediately
|
||||||
|
close the connection.
|
||||||
|
"""
|
||||||
|
WebSocketAppTest.keep_running_open = self.keep_running
|
||||||
|
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
def on_close(self, *args, **kwargs):
|
||||||
|
""" Set the keep_running flag for the test to use.
|
||||||
|
"""
|
||||||
|
WebSocketAppTest.keep_running_close = self.keep_running
|
||||||
|
|
||||||
|
app = ws.WebSocketApp('ws://echo.websocket.org/', on_open=on_open, on_close=on_close)
|
||||||
|
app.run_forever()
|
||||||
|
|
||||||
|
self.assertFalse(isinstance(WebSocketAppTest.keep_running_open,
|
||||||
|
WebSocketAppTest.NotSetYet))
|
||||||
|
|
||||||
|
self.assertFalse(isinstance(WebSocketAppTest.keep_running_close,
|
||||||
|
WebSocketAppTest.NotSetYet))
|
||||||
|
|
||||||
|
self.assertEqual(True, WebSocketAppTest.keep_running_open)
|
||||||
|
self.assertEqual(False, WebSocketAppTest.keep_running_close)
|
||||||
|
|
||||||
|
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
|
||||||
|
def testSockMaskKey(self):
|
||||||
|
""" A WebSocketApp should forward the received mask_key function down
|
||||||
|
to the actual socket.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def my_mask_key_func():
|
||||||
|
pass
|
||||||
|
|
||||||
|
def on_open(self, *args, **kwargs):
|
||||||
|
""" Set the value so the test can use it later on and immediately
|
||||||
|
close the connection.
|
||||||
|
"""
|
||||||
|
WebSocketAppTest.get_mask_key_id = id(self.get_mask_key)
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
app = ws.WebSocketApp('ws://echo.websocket.org/', on_open=on_open, get_mask_key=my_mask_key_func)
|
||||||
|
app.run_forever()
|
||||||
|
|
||||||
|
# Note: We can't use 'is' for comparing the functions directly, need to use 'id'.
|
||||||
|
self.assertEqual(WebSocketAppTest.get_mask_key_id, id(my_mask_key_func))
|
||||||
|
|
||||||
|
|
||||||
|
class SockOptTest(unittest.TestCase):
|
||||||
|
@unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled")
|
||||||
|
def testSockOpt(self):
|
||||||
|
sockopt = ((socket.IPPROTO_TCP, socket.TCP_NODELAY, 1),)
|
||||||
|
s = ws.create_connection("ws://echo.websocket.org", sockopt=sockopt)
|
||||||
|
self.assertNotEqual(s.sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY), 0)
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
class UtilsTest(unittest.TestCase):
|
||||||
|
def testUtf8Validator(self):
|
||||||
|
state = validate_utf8(six.b('\xf0\x90\x80\x80'))
|
||||||
|
self.assertEqual(state, True)
|
||||||
|
state = validate_utf8(six.b('\xce\xba\xe1\xbd\xb9\xcf\x83\xce\xbc\xce\xb5\xed\xa0\x80edited'))
|
||||||
|
self.assertEqual(state, False)
|
||||||
|
state = validate_utf8(six.b(''))
|
||||||
|
self.assertEqual(state, True)
|
||||||
|
|
||||||
|
class ProxyInfoTest(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.http_proxy = os.environ.get("http_proxy", None)
|
||||||
|
self.https_proxy = os.environ.get("https_proxy", None)
|
||||||
|
if "http_proxy" in os.environ:
|
||||||
|
del os.environ["http_proxy"]
|
||||||
|
if "https_proxy" in os.environ:
|
||||||
|
del os.environ["https_proxy"]
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
if self.http_proxy:
|
||||||
|
os.environ["http_proxy"] = self.http_proxy
|
||||||
|
elif "http_proxy" in os.environ:
|
||||||
|
del os.environ["http_proxy"]
|
||||||
|
|
||||||
|
if self.https_proxy:
|
||||||
|
os.environ["https_proxy"] = self.https_proxy
|
||||||
|
elif "https_proxy" in os.environ:
|
||||||
|
del os.environ["https_proxy"]
|
||||||
|
|
||||||
|
|
||||||
|
def testProxyFromArgs(self):
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False, proxy_host="localhost"), ("localhost", 0, None))
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False, proxy_host="localhost", proxy_port=3128), ("localhost", 3128, None))
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", True, proxy_host="localhost"), ("localhost", 0, None))
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", True, proxy_host="localhost", proxy_port=3128), ("localhost", 3128, None))
|
||||||
|
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False, proxy_host="localhost", proxy_auth=("a", "b")),
|
||||||
|
("localhost", 0, ("a", "b")))
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False, proxy_host="localhost", proxy_port=3128, proxy_auth=("a", "b")),
|
||||||
|
("localhost", 3128, ("a", "b")))
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", True, proxy_host="localhost", proxy_auth=("a", "b")),
|
||||||
|
("localhost", 0, ("a", "b")))
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", True, proxy_host="localhost", proxy_port=3128, proxy_auth=("a", "b")),
|
||||||
|
("localhost", 3128, ("a", "b")))
|
||||||
|
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", True, proxy_host="localhost", proxy_port=3128, no_proxy=["example.com"], proxy_auth=("a", "b")),
|
||||||
|
("localhost", 3128, ("a", "b")))
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", True, proxy_host="localhost", proxy_port=3128, no_proxy=["echo.websocket.org"], proxy_auth=("a", "b")),
|
||||||
|
(None, 0, None))
|
||||||
|
|
||||||
|
|
||||||
|
def testProxyFromEnv(self):
|
||||||
|
os.environ["http_proxy"] = "http://localhost/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", None, None))
|
||||||
|
os.environ["http_proxy"] = "http://localhost:3128/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", 3128, None))
|
||||||
|
|
||||||
|
os.environ["http_proxy"] = "http://localhost/"
|
||||||
|
os.environ["https_proxy"] = "http://localhost2/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", None, None))
|
||||||
|
os.environ["http_proxy"] = "http://localhost:3128/"
|
||||||
|
os.environ["https_proxy"] = "http://localhost2:3128/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", 3128, None))
|
||||||
|
|
||||||
|
os.environ["http_proxy"] = "http://localhost/"
|
||||||
|
os.environ["https_proxy"] = "http://localhost2/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", True), ("localhost2", None, None))
|
||||||
|
os.environ["http_proxy"] = "http://localhost:3128/"
|
||||||
|
os.environ["https_proxy"] = "http://localhost2:3128/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", True), ("localhost2", 3128, None))
|
||||||
|
|
||||||
|
|
||||||
|
os.environ["http_proxy"] = "http://a:b@localhost/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", None, ("a", "b")))
|
||||||
|
os.environ["http_proxy"] = "http://a:b@localhost:3128/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", 3128, ("a", "b")))
|
||||||
|
|
||||||
|
os.environ["http_proxy"] = "http://a:b@localhost/"
|
||||||
|
os.environ["https_proxy"] = "http://a:b@localhost2/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", None, ("a", "b")))
|
||||||
|
os.environ["http_proxy"] = "http://a:b@localhost:3128/"
|
||||||
|
os.environ["https_proxy"] = "http://a:b@localhost2:3128/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", 3128, ("a", "b")))
|
||||||
|
|
||||||
|
os.environ["http_proxy"] = "http://a:b@localhost/"
|
||||||
|
os.environ["https_proxy"] = "http://a:b@localhost2/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", True), ("localhost2", None, ("a", "b")))
|
||||||
|
os.environ["http_proxy"] = "http://a:b@localhost:3128/"
|
||||||
|
os.environ["https_proxy"] = "http://a:b@localhost2:3128/"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", True), ("localhost2", 3128, ("a", "b")))
|
||||||
|
|
||||||
|
os.environ["http_proxy"] = "http://a:b@localhost/"
|
||||||
|
os.environ["https_proxy"] = "http://a:b@localhost2/"
|
||||||
|
os.environ["no_proxy"] = "example1.com,example2.com"
|
||||||
|
self.assertEqual(get_proxy_info("example.1.com", True), ("localhost2", None, ("a", "b")))
|
||||||
|
os.environ["http_proxy"] = "http://a:b@localhost:3128/"
|
||||||
|
os.environ["https_proxy"] = "http://a:b@localhost2:3128/"
|
||||||
|
os.environ["no_proxy"] = "example1.com,example2.com, echo.websocket.org"
|
||||||
|
self.assertEqual(get_proxy_info("echo.websocket.org", True), (None, 0, None))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
|
@ -17,16 +17,21 @@ import os
|
||||||
import sys
|
import sys
|
||||||
import subprocess
|
import subprocess
|
||||||
import threading
|
import threading
|
||||||
import webbrowser
|
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import cherrypy
|
import cherrypy
|
||||||
import datetime
|
import datetime
|
||||||
import uuid
|
import uuid
|
||||||
|
# Some cut down versions of Python may not include this module and it's not critical for us
|
||||||
|
try:
|
||||||
|
import webbrowser
|
||||||
|
no_browser = False
|
||||||
|
except ImportError:
|
||||||
|
no_browser = True
|
||||||
|
|
||||||
from apscheduler.schedulers.background import BackgroundScheduler
|
from apscheduler.schedulers.background import BackgroundScheduler
|
||||||
from apscheduler.triggers.interval import IntervalTrigger
|
from apscheduler.triggers.interval import IntervalTrigger
|
||||||
|
|
||||||
from plexpy import versioncheck, logger, monitor, plextv
|
from plexpy import versioncheck, logger, activity_pinger, plextv
|
||||||
import plexpy.config
|
import plexpy.config
|
||||||
|
|
||||||
PROG_DIR = None
|
PROG_DIR = None
|
||||||
|
@ -66,6 +71,7 @@ COMMITS_BEHIND = None
|
||||||
|
|
||||||
UMASK = None
|
UMASK = None
|
||||||
|
|
||||||
|
POLLING_FAILOVER = False
|
||||||
|
|
||||||
def initialize(config_file):
|
def initialize(config_file):
|
||||||
with INIT_LOCK:
|
with INIT_LOCK:
|
||||||
|
@ -75,6 +81,7 @@ def initialize(config_file):
|
||||||
global CURRENT_VERSION
|
global CURRENT_VERSION
|
||||||
global LATEST_VERSION
|
global LATEST_VERSION
|
||||||
global UMASK
|
global UMASK
|
||||||
|
global POLLING_FAILOVER
|
||||||
|
|
||||||
CONFIG = plexpy.config.Config(config_file)
|
CONFIG = plexpy.config.Config(config_file)
|
||||||
|
|
||||||
|
@ -228,6 +235,7 @@ def daemonize():
|
||||||
|
|
||||||
|
|
||||||
def launch_browser(host, port, root):
|
def launch_browser(host, port, root):
|
||||||
|
if not no_browser:
|
||||||
if host == '0.0.0.0':
|
if host == '0.0.0.0':
|
||||||
host = 'localhost'
|
host = 'localhost'
|
||||||
|
|
||||||
|
@ -273,7 +281,11 @@ def initialize_scheduler():
|
||||||
|
|
||||||
if CONFIG.PMS_IP and CONFIG.PMS_TOKEN:
|
if CONFIG.PMS_IP and CONFIG.PMS_TOKEN:
|
||||||
schedule_job(plextv.get_real_pms_url, 'Refresh Plex Server URLs', hours=12, minutes=0, seconds=0)
|
schedule_job(plextv.get_real_pms_url, 'Refresh Plex Server URLs', hours=12, minutes=0, seconds=0)
|
||||||
schedule_job(monitor.check_active_sessions, 'Check for active sessions', hours=0, minutes=0, seconds=seconds)
|
|
||||||
|
# If we're not using websockets then fall back to polling
|
||||||
|
if not CONFIG.MONITORING_USE_WEBSOCKET or POLLING_FAILOVER:
|
||||||
|
schedule_job(activity_pinger.check_active_sessions, 'Check for active sessions',
|
||||||
|
hours=0, minutes=0, seconds=seconds)
|
||||||
|
|
||||||
# Refresh the users list
|
# Refresh the users list
|
||||||
if CONFIG.REFRESH_USERS_INTERVAL:
|
if CONFIG.REFRESH_USERS_INTERVAL:
|
||||||
|
@ -349,7 +361,7 @@ def dbcheck():
|
||||||
'audio_channels INTEGER, transcode_protocol TEXT, transcode_container TEXT, '
|
'audio_channels INTEGER, transcode_protocol TEXT, transcode_container TEXT, '
|
||||||
'transcode_video_codec TEXT, transcode_audio_codec TEXT, transcode_audio_channels INTEGER,'
|
'transcode_video_codec TEXT, transcode_audio_codec TEXT, transcode_audio_channels INTEGER,'
|
||||||
'transcode_width INTEGER, transcode_height INTEGER, buffer_count INTEGER DEFAULT 0, '
|
'transcode_width INTEGER, transcode_height INTEGER, buffer_count INTEGER DEFAULT 0, '
|
||||||
'buffer_last_triggered INTEGER)'
|
'buffer_last_triggered INTEGER, last_paused INTEGER)'
|
||||||
)
|
)
|
||||||
|
|
||||||
# session_history table :: This is a history table which logs essential stream details
|
# session_history table :: This is a history table which logs essential stream details
|
||||||
|
@ -597,6 +609,15 @@ def dbcheck():
|
||||||
'ALTER TABLE users ADD COLUMN custom_avatar_url TEXT'
|
'ALTER TABLE users ADD COLUMN custom_avatar_url TEXT'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Upgrade sessions table from earlier versions
|
||||||
|
try:
|
||||||
|
c_db.execute('SELECT last_paused from sessions')
|
||||||
|
except sqlite3.OperationalError:
|
||||||
|
logger.debug(u"Altering database. Updating database table sessions.")
|
||||||
|
c_db.execute(
|
||||||
|
'ALTER TABLE sessions ADD COLUMN last_paused INTEGER'
|
||||||
|
)
|
||||||
|
|
||||||
# Add "Local" user to database as default unauthenticated user.
|
# Add "Local" user to database as default unauthenticated user.
|
||||||
result = c_db.execute('SELECT id FROM users WHERE username = "Local"')
|
result = c_db.execute('SELECT id FROM users WHERE username = "Local"')
|
||||||
if not result.fetchone():
|
if not result.fetchone():
|
||||||
|
@ -633,10 +654,6 @@ def shutdown(restart=False, update=False):
|
||||||
cherrypy.engine.exit()
|
cherrypy.engine.exit()
|
||||||
SCHED.shutdown(wait=False)
|
SCHED.shutdown(wait=False)
|
||||||
|
|
||||||
# Clear any sessions in the db - Not sure yet if we should do this. More testing required
|
|
||||||
# logger.debug(u'Clearing Plex sessions.')
|
|
||||||
# monitor.drop_session_db()
|
|
||||||
|
|
||||||
CONFIG.write()
|
CONFIG.write()
|
||||||
|
|
||||||
if not restart and not update:
|
if not restart and not update:
|
||||||
|
|
215
plexpy/activity_handler.py
Normal file
215
plexpy/activity_handler.py
Normal file
|
@ -0,0 +1,215 @@
|
||||||
|
# This file is part of PlexPy.
|
||||||
|
#
|
||||||
|
# PlexPy is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# PlexPy is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import time
|
||||||
|
import plexpy
|
||||||
|
|
||||||
|
from plexpy import logger, pmsconnect, activity_processor, threading, notification_handler
|
||||||
|
|
||||||
|
|
||||||
|
class ActivityHandler(object):
|
||||||
|
|
||||||
|
def __init__(self, timeline):
|
||||||
|
self.timeline = timeline
|
||||||
|
# print timeline
|
||||||
|
|
||||||
|
def is_valid_session(self):
|
||||||
|
if 'sessionKey' in self.timeline:
|
||||||
|
if str(self.timeline['sessionKey']).isdigit():
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_session_key(self):
|
||||||
|
if self.is_valid_session():
|
||||||
|
return int(self.timeline['sessionKey'])
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_live_session(self):
|
||||||
|
pms_connect = pmsconnect.PmsConnect()
|
||||||
|
session_list = pms_connect.get_current_activity()
|
||||||
|
|
||||||
|
for session in session_list['sessions']:
|
||||||
|
if int(session['session_key']) == self.get_session_key():
|
||||||
|
return session
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def update_db_session(self):
|
||||||
|
# Update our session temp table values
|
||||||
|
monitor_proc = activity_processor.ActivityProcessor()
|
||||||
|
monitor_proc.write_session(session=self.get_live_session(), notify=False)
|
||||||
|
|
||||||
|
def on_start(self):
|
||||||
|
if self.is_valid_session():
|
||||||
|
logger.debug(u"PlexPy ActivityHandler :: Session %s has started." % str(self.get_session_key()))
|
||||||
|
|
||||||
|
# Fire off notifications
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=self.get_live_session(), notify_action='play')).start()
|
||||||
|
|
||||||
|
# Write the new session to our temp session table
|
||||||
|
self.update_db_session()
|
||||||
|
|
||||||
|
def on_stop(self, force_stop=False):
|
||||||
|
if self.is_valid_session():
|
||||||
|
logger.debug(u"PlexPy ActivityHandler :: Session %s has stopped." % str(self.get_session_key()))
|
||||||
|
|
||||||
|
# Set the session last_paused timestamp
|
||||||
|
ap = activity_processor.ActivityProcessor()
|
||||||
|
ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=None)
|
||||||
|
|
||||||
|
# Update the session state and viewOffset
|
||||||
|
# Set force_stop to true to disable the state set
|
||||||
|
if not force_stop:
|
||||||
|
ap.set_session_state(session_key=self.get_session_key(),
|
||||||
|
state=self.timeline['state'],
|
||||||
|
view_offset=self.timeline['viewOffset'])
|
||||||
|
|
||||||
|
# Retrieve the session data from our temp table
|
||||||
|
db_session = ap.get_session_by_key(session_key=self.get_session_key())
|
||||||
|
|
||||||
|
# Fire off notifications
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=db_session, notify_action='stop')).start()
|
||||||
|
|
||||||
|
# Write it to the history table
|
||||||
|
monitor_proc = activity_processor.ActivityProcessor()
|
||||||
|
monitor_proc.write_session_history(session=db_session)
|
||||||
|
|
||||||
|
# Remove the session from our temp session table
|
||||||
|
ap.delete_session(session_key=self.get_session_key())
|
||||||
|
|
||||||
|
def on_pause(self):
|
||||||
|
if self.is_valid_session():
|
||||||
|
logger.debug(u"PlexPy ActivityHandler :: Session %s has been paused." % str(self.get_session_key()))
|
||||||
|
|
||||||
|
# Set the session last_paused timestamp
|
||||||
|
ap = activity_processor.ActivityProcessor()
|
||||||
|
ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=int(time.time()))
|
||||||
|
|
||||||
|
# Update the session state and viewOffset
|
||||||
|
ap.set_session_state(session_key=self.get_session_key(),
|
||||||
|
state=self.timeline['state'],
|
||||||
|
view_offset=self.timeline['viewOffset'])
|
||||||
|
|
||||||
|
# Retrieve the session data from our temp table
|
||||||
|
db_session = ap.get_session_by_key(session_key=self.get_session_key())
|
||||||
|
|
||||||
|
# Fire off notifications
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=db_session, notify_action='pause')).start()
|
||||||
|
|
||||||
|
def on_resume(self):
|
||||||
|
if self.is_valid_session():
|
||||||
|
logger.debug(u"PlexPy ActivityHandler :: Session %s has been resumed." % str(self.get_session_key()))
|
||||||
|
|
||||||
|
# Set the session last_paused timestamp
|
||||||
|
ap = activity_processor.ActivityProcessor()
|
||||||
|
ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=None)
|
||||||
|
|
||||||
|
# Update the session state and viewOffset
|
||||||
|
ap.set_session_state(session_key=self.get_session_key(),
|
||||||
|
state=self.timeline['state'],
|
||||||
|
view_offset=self.timeline['viewOffset'])
|
||||||
|
|
||||||
|
# Retrieve the session data from our temp table
|
||||||
|
db_session = ap.get_session_by_key(session_key=self.get_session_key())
|
||||||
|
|
||||||
|
# Fire off notifications
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=db_session, notify_action='resume')).start()
|
||||||
|
|
||||||
|
def on_buffer(self):
|
||||||
|
if self.is_valid_session():
|
||||||
|
logger.debug(u"PlexPy ActivityHandler :: Session %s is buffering." % self.get_session_key())
|
||||||
|
ap = activity_processor.ActivityProcessor()
|
||||||
|
db_stream = ap.get_session_by_key(session_key=self.get_session_key())
|
||||||
|
|
||||||
|
# Increment our buffer count
|
||||||
|
ap.increment_session_buffer_count(session_key=self.get_session_key())
|
||||||
|
|
||||||
|
# Get our current buffer count
|
||||||
|
current_buffer_count = ap.get_session_buffer_count(self.get_session_key())
|
||||||
|
logger.debug(u"PlexPy ActivityHandler :: Session %s buffer count is %s." %
|
||||||
|
(self.get_session_key(), current_buffer_count))
|
||||||
|
|
||||||
|
# Get our last triggered time
|
||||||
|
buffer_last_triggered = ap.get_session_buffer_trigger_time(self.get_session_key())
|
||||||
|
|
||||||
|
time_since_last_trigger = 0
|
||||||
|
if buffer_last_triggered:
|
||||||
|
logger.debug(u"PlexPy ActivityHandler :: Session %s buffer last triggered at %s." %
|
||||||
|
(self.get_session_key(), buffer_last_triggered))
|
||||||
|
time_since_last_trigger = int(time.time()) - int(buffer_last_triggered)
|
||||||
|
|
||||||
|
if current_buffer_count >= plexpy.CONFIG.BUFFER_THRESHOLD and time_since_last_trigger == 0 or \
|
||||||
|
time_since_last_trigger >= plexpy.CONFIG.BUFFER_WAIT:
|
||||||
|
ap.set_session_buffer_trigger_time(session_key=self.get_session_key())
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=db_stream, notify_action='buffer')).start()
|
||||||
|
|
||||||
|
# This function receives events from our websocket connection
|
||||||
|
def process(self):
|
||||||
|
if self.is_valid_session():
|
||||||
|
from plexpy import helpers
|
||||||
|
|
||||||
|
ap = activity_processor.ActivityProcessor()
|
||||||
|
db_session = ap.get_session_by_key(session_key=self.get_session_key())
|
||||||
|
|
||||||
|
this_state = self.timeline['state']
|
||||||
|
this_key = str(self.timeline['ratingKey'])
|
||||||
|
|
||||||
|
# If we already have this session in the temp table, check for state changes
|
||||||
|
if db_session:
|
||||||
|
last_state = db_session['state']
|
||||||
|
last_key = str(db_session['rating_key'])
|
||||||
|
|
||||||
|
# Make sure the same item is being played
|
||||||
|
if this_key == last_key:
|
||||||
|
# Update the session state and viewOffset
|
||||||
|
if this_state == 'playing':
|
||||||
|
ap.set_session_state(session_key=self.get_session_key(),
|
||||||
|
state=this_state,
|
||||||
|
view_offset=self.timeline['viewOffset'])
|
||||||
|
# Start our state checks
|
||||||
|
if this_state != last_state:
|
||||||
|
if this_state == 'paused':
|
||||||
|
self.on_pause()
|
||||||
|
elif last_state == 'paused' and this_state == 'playing':
|
||||||
|
self.on_resume()
|
||||||
|
elif this_state == 'stopped':
|
||||||
|
self.on_stop()
|
||||||
|
elif this_state == 'buffering':
|
||||||
|
self.on_buffer()
|
||||||
|
# If a client doesn't register stop events (I'm looking at you PHT!) check if the ratingKey has changed
|
||||||
|
else:
|
||||||
|
# Manually stop and start
|
||||||
|
# Set force_stop so that we don't overwrite our last viewOffset
|
||||||
|
self.on_stop(force_stop=True)
|
||||||
|
self.on_start()
|
||||||
|
|
||||||
|
# Monitor if the stream has reached the watch percentage for notifications
|
||||||
|
# The only purpose of this is for notifications
|
||||||
|
progress_percent = helpers.get_percent(self.timeline['viewOffset'], db_session['duration'])
|
||||||
|
if progress_percent >= plexpy.CONFIG.NOTIFY_WATCHED_PERCENT and this_state != 'buffering':
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=db_session, notify_action='watched')).start()
|
||||||
|
|
||||||
|
else:
|
||||||
|
# We don't have this session in our table yet, start a new one.
|
||||||
|
if this_state != 'buffering':
|
||||||
|
self.on_start()
|
164
plexpy/activity_pinger.py
Normal file
164
plexpy/activity_pinger.py
Normal file
|
@ -0,0 +1,164 @@
|
||||||
|
# This file is part of PlexPy.
|
||||||
|
#
|
||||||
|
# PlexPy is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# PlexPy is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from plexpy import logger, pmsconnect, notification_handler, database, helpers, activity_processor
|
||||||
|
|
||||||
|
import threading
|
||||||
|
import plexpy
|
||||||
|
import time
|
||||||
|
|
||||||
|
monitor_lock = threading.Lock()
|
||||||
|
|
||||||
|
|
||||||
|
def check_active_sessions(ws_request=False):
|
||||||
|
|
||||||
|
with monitor_lock:
|
||||||
|
pms_connect = pmsconnect.PmsConnect()
|
||||||
|
session_list = pms_connect.get_current_activity()
|
||||||
|
monitor_db = database.MonitorDatabase()
|
||||||
|
monitor_process = activity_processor.ActivityProcessor()
|
||||||
|
# logger.debug(u"PlexPy Monitor :: Checking for active streams.")
|
||||||
|
|
||||||
|
if session_list:
|
||||||
|
media_container = session_list['sessions']
|
||||||
|
|
||||||
|
# Check our temp table for what we must do with the new streams
|
||||||
|
db_streams = monitor_db.select('SELECT started, session_key, rating_key, media_type, title, parent_title, '
|
||||||
|
'grandparent_title, user_id, user, friendly_name, ip_address, player, '
|
||||||
|
'platform, machine_id, parent_rating_key, grandparent_rating_key, state, '
|
||||||
|
'view_offset, duration, video_decision, audio_decision, width, height, '
|
||||||
|
'container, video_codec, audio_codec, bitrate, video_resolution, '
|
||||||
|
'video_framerate, aspect_ratio, audio_channels, transcode_protocol, '
|
||||||
|
'transcode_container, transcode_video_codec, transcode_audio_codec, '
|
||||||
|
'transcode_audio_channels, transcode_width, transcode_height, '
|
||||||
|
'paused_counter, last_paused '
|
||||||
|
'FROM sessions')
|
||||||
|
for stream in db_streams:
|
||||||
|
if any(d['session_key'] == str(stream['session_key']) and d['rating_key'] == str(stream['rating_key'])
|
||||||
|
for d in media_container):
|
||||||
|
# The user's session is still active
|
||||||
|
for session in media_container:
|
||||||
|
if session['session_key'] == str(stream['session_key']) and \
|
||||||
|
session['rating_key'] == str(stream['rating_key']):
|
||||||
|
# The user is still playing the same media item
|
||||||
|
# Here we can check the play states
|
||||||
|
if session['state'] != stream['state']:
|
||||||
|
if session['state'] == 'paused':
|
||||||
|
# Push any notifications -
|
||||||
|
# Push it on it's own thread so we don't hold up our db actions
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=stream, notify_action='pause')).start()
|
||||||
|
|
||||||
|
if session['state'] == 'playing' and stream['state'] == 'paused':
|
||||||
|
# Push any notifications -
|
||||||
|
# Push it on it's own thread so we don't hold up our db actions
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=stream, notify_action='resume')).start()
|
||||||
|
|
||||||
|
if stream['state'] == 'paused' and not ws_request:
|
||||||
|
# The stream is still paused so we need to increment the paused_counter
|
||||||
|
# Using the set config parameter as the interval, probably not the most accurate but
|
||||||
|
# it will have to do for now. If it's a websocket request don't use this method.
|
||||||
|
paused_counter = int(stream['paused_counter']) + plexpy.CONFIG.MONITORING_INTERVAL
|
||||||
|
monitor_db.action('UPDATE sessions SET paused_counter = ? '
|
||||||
|
'WHERE session_key = ? AND rating_key = ?',
|
||||||
|
[paused_counter, stream['session_key'], stream['rating_key']])
|
||||||
|
|
||||||
|
if session['state'] == 'buffering' and plexpy.CONFIG.BUFFER_THRESHOLD > 0:
|
||||||
|
# The stream is buffering so we need to increment the buffer_count
|
||||||
|
# We're going just increment on every monitor ping,
|
||||||
|
# would be difficult to keep track otherwise
|
||||||
|
monitor_db.action('UPDATE sessions SET buffer_count = buffer_count + 1 '
|
||||||
|
'WHERE session_key = ? AND rating_key = ?',
|
||||||
|
[stream['session_key'], stream['rating_key']])
|
||||||
|
|
||||||
|
# Check the current buffer count and last buffer to determine if we should notify
|
||||||
|
buffer_values = monitor_db.select('SELECT buffer_count, buffer_last_triggered '
|
||||||
|
'FROM sessions '
|
||||||
|
'WHERE session_key = ? AND rating_key = ?',
|
||||||
|
[stream['session_key'], stream['rating_key']])
|
||||||
|
|
||||||
|
if buffer_values[0]['buffer_count'] >= plexpy.CONFIG.BUFFER_THRESHOLD:
|
||||||
|
# Push any notifications -
|
||||||
|
# Push it on it's own thread so we don't hold up our db actions
|
||||||
|
# Our first buffer notification
|
||||||
|
if buffer_values[0]['buffer_count'] == plexpy.CONFIG.BUFFER_THRESHOLD:
|
||||||
|
logger.info(u"PlexPy Monitor :: User '%s' has triggered a buffer warning."
|
||||||
|
% stream['user'])
|
||||||
|
# Set the buffer trigger time
|
||||||
|
monitor_db.action('UPDATE sessions '
|
||||||
|
'SET buffer_last_triggered = strftime("%s","now") '
|
||||||
|
'WHERE session_key = ? AND rating_key = ?',
|
||||||
|
[stream['session_key'], stream['rating_key']])
|
||||||
|
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=stream, notify_action='buffer')).start()
|
||||||
|
else:
|
||||||
|
# Subsequent buffer notifications after wait time
|
||||||
|
if int(time.time()) > buffer_values[0]['buffer_last_triggered'] + \
|
||||||
|
plexpy.CONFIG.BUFFER_WAIT:
|
||||||
|
logger.info(u"PlexPy Monitor :: User '%s' has triggered multiple buffer warnings."
|
||||||
|
% stream['user'])
|
||||||
|
# Set the buffer trigger time
|
||||||
|
monitor_db.action('UPDATE sessions '
|
||||||
|
'SET buffer_last_triggered = strftime("%s","now") '
|
||||||
|
'WHERE session_key = ? AND rating_key = ?',
|
||||||
|
[stream['session_key'], stream['rating_key']])
|
||||||
|
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=stream, notify_action='buffer')).start()
|
||||||
|
|
||||||
|
logger.debug(u"PlexPy Monitor :: Stream buffering. Count is now %s. Last triggered %s."
|
||||||
|
% (buffer_values[0][0], buffer_values[0][1]))
|
||||||
|
|
||||||
|
# Check if the user has reached the offset in the media we defined as the "watched" percent
|
||||||
|
# Don't trigger if state is buffer as some clients push the progress to the end when
|
||||||
|
# buffering on start.
|
||||||
|
if session['view_offset'] and session['duration'] and session['state'] != 'buffering':
|
||||||
|
if helpers.get_percent(session['view_offset'],
|
||||||
|
session['duration']) > plexpy.CONFIG.NOTIFY_WATCHED_PERCENT:
|
||||||
|
# Push any notifications -
|
||||||
|
# Push it on it's own thread so we don't hold up our db actions
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=stream, notify_action='watched')).start()
|
||||||
|
|
||||||
|
else:
|
||||||
|
# The user has stopped playing a stream
|
||||||
|
logger.debug(u"PlexPy Monitor :: Removing sessionKey %s ratingKey %s from session queue"
|
||||||
|
% (stream['session_key'], stream['rating_key']))
|
||||||
|
monitor_db.action('DELETE FROM sessions WHERE session_key = ? AND rating_key = ?',
|
||||||
|
[stream['session_key'], stream['rating_key']])
|
||||||
|
|
||||||
|
# Check if the user has reached the offset in the media we defined as the "watched" percent
|
||||||
|
if stream['view_offset'] and stream['duration']:
|
||||||
|
if helpers.get_percent(stream['view_offset'],
|
||||||
|
stream['duration']) > plexpy.CONFIG.NOTIFY_WATCHED_PERCENT:
|
||||||
|
# Push any notifications -
|
||||||
|
# Push it on it's own thread so we don't hold up our db actions
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=stream, notify_action='watched')).start()
|
||||||
|
|
||||||
|
# Push any notifications - Push it on it's own thread so we don't hold up our db actions
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=stream, notify_action='stop')).start()
|
||||||
|
|
||||||
|
# Write the item history on playback stop
|
||||||
|
monitor_process.write_session_history(session=stream)
|
||||||
|
|
||||||
|
# Process the newly received session data
|
||||||
|
for session in media_container:
|
||||||
|
monitor_process.write_session(session)
|
||||||
|
else:
|
||||||
|
logger.debug(u"PlexPy Monitor :: Unable to read session list.")
|
364
plexpy/activity_processor.py
Normal file
364
plexpy/activity_processor.py
Normal file
|
@ -0,0 +1,364 @@
|
||||||
|
# This file is part of PlexPy.
|
||||||
|
#
|
||||||
|
# PlexPy is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# PlexPy is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from plexpy import logger, pmsconnect, notification_handler, log_reader, database
|
||||||
|
|
||||||
|
import threading
|
||||||
|
import plexpy
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
|
||||||
|
|
||||||
|
class ActivityProcessor(object):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.db = database.MonitorDatabase()
|
||||||
|
|
||||||
|
def write_session(self, session=None, notify=True):
|
||||||
|
if session:
|
||||||
|
values = {'session_key': session['session_key'],
|
||||||
|
'rating_key': session['rating_key'],
|
||||||
|
'media_type': session['media_type'],
|
||||||
|
'state': session['state'],
|
||||||
|
'user_id': session['user_id'],
|
||||||
|
'user': session['user'],
|
||||||
|
'machine_id': session['machine_id'],
|
||||||
|
'title': session['title'],
|
||||||
|
'parent_title': session['parent_title'],
|
||||||
|
'grandparent_title': session['grandparent_title'],
|
||||||
|
'friendly_name': session['friendly_name'],
|
||||||
|
'player': session['player'],
|
||||||
|
'platform': session['platform'],
|
||||||
|
'parent_rating_key': session['parent_rating_key'],
|
||||||
|
'grandparent_rating_key': session['grandparent_rating_key'],
|
||||||
|
'view_offset': session['view_offset'],
|
||||||
|
'duration': session['duration'],
|
||||||
|
'video_decision': session['video_decision'],
|
||||||
|
'audio_decision': session['audio_decision'],
|
||||||
|
'width': session['width'],
|
||||||
|
'height': session['height'],
|
||||||
|
'container': session['container'],
|
||||||
|
'video_codec': session['video_codec'],
|
||||||
|
'audio_codec': session['audio_codec'],
|
||||||
|
'bitrate': session['bitrate'],
|
||||||
|
'video_resolution': session['video_resolution'],
|
||||||
|
'video_framerate': session['video_framerate'],
|
||||||
|
'aspect_ratio': session['aspect_ratio'],
|
||||||
|
'audio_channels': session['audio_channels'],
|
||||||
|
'transcode_protocol': session['transcode_protocol'],
|
||||||
|
'transcode_container': session['transcode_container'],
|
||||||
|
'transcode_video_codec': session['transcode_video_codec'],
|
||||||
|
'transcode_audio_codec': session['transcode_audio_codec'],
|
||||||
|
'transcode_audio_channels': session['transcode_audio_channels'],
|
||||||
|
'transcode_width': session['transcode_width'],
|
||||||
|
'transcode_height': session['transcode_height']
|
||||||
|
}
|
||||||
|
|
||||||
|
keys = {'session_key': session['session_key'],
|
||||||
|
'rating_key': session['rating_key']}
|
||||||
|
|
||||||
|
result = self.db.upsert('sessions', values, keys)
|
||||||
|
|
||||||
|
if result == 'insert':
|
||||||
|
# Push any notifications - Push it on it's own thread so we don't hold up our db actions
|
||||||
|
if notify:
|
||||||
|
threading.Thread(target=notification_handler.notify,
|
||||||
|
kwargs=dict(stream_data=values, notify_action='play')).start()
|
||||||
|
|
||||||
|
started = int(time.time())
|
||||||
|
|
||||||
|
# Try and grab IP address from logs
|
||||||
|
if plexpy.CONFIG.IP_LOGGING_ENABLE and plexpy.CONFIG.PMS_LOGS_FOLDER:
|
||||||
|
ip_address = self.find_session_ip(rating_key=session['rating_key'],
|
||||||
|
machine_id=session['machine_id'])
|
||||||
|
else:
|
||||||
|
ip_address = None
|
||||||
|
|
||||||
|
timestamp = {'started': started,
|
||||||
|
'ip_address': ip_address}
|
||||||
|
|
||||||
|
# If it's our first write then time stamp it.
|
||||||
|
self.db.upsert('sessions', timestamp, keys)
|
||||||
|
|
||||||
|
def write_session_history(self, session=None, import_metadata=None, is_import=False, import_ignore_interval=0):
|
||||||
|
from plexpy import users
|
||||||
|
|
||||||
|
user_data = users.Users()
|
||||||
|
user_details = user_data.get_user_friendly_name(user=session['user'])
|
||||||
|
|
||||||
|
if session:
|
||||||
|
logging_enabled = False
|
||||||
|
|
||||||
|
if is_import:
|
||||||
|
if str(session['stopped']).isdigit():
|
||||||
|
stopped = int(session['stopped'])
|
||||||
|
else:
|
||||||
|
stopped = int(time.time())
|
||||||
|
else:
|
||||||
|
stopped = int(time.time())
|
||||||
|
|
||||||
|
if plexpy.CONFIG.VIDEO_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \
|
||||||
|
(session['media_type'] == 'movie' or session['media_type'] == 'episode'):
|
||||||
|
logging_enabled = True
|
||||||
|
elif plexpy.CONFIG.MUSIC_LOGGING_ENABLE and str(session['rating_key']).isdigit() and \
|
||||||
|
session['media_type'] == 'track':
|
||||||
|
logging_enabled = True
|
||||||
|
else:
|
||||||
|
logger.debug(u"PlexPy ActivityProcessor :: ratingKey %s not logged. Does not meet logging criteria. "
|
||||||
|
u"Media type is '%s'" % (session['rating_key'], session['media_type']))
|
||||||
|
|
||||||
|
if str(session['paused_counter']).isdigit():
|
||||||
|
real_play_time = stopped - session['started'] - int(session['paused_counter'])
|
||||||
|
else:
|
||||||
|
real_play_time = stopped - session['started']
|
||||||
|
|
||||||
|
if plexpy.CONFIG.LOGGING_IGNORE_INTERVAL and not is_import:
|
||||||
|
if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \
|
||||||
|
(real_play_time < int(plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)):
|
||||||
|
logging_enabled = False
|
||||||
|
logger.debug(u"PlexPy ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s "
|
||||||
|
u"seconds, so we're not logging it." %
|
||||||
|
(session['rating_key'], str(real_play_time), plexpy.CONFIG.LOGGING_IGNORE_INTERVAL))
|
||||||
|
elif is_import and import_ignore_interval:
|
||||||
|
if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \
|
||||||
|
(real_play_time < int(import_ignore_interval)):
|
||||||
|
logging_enabled = False
|
||||||
|
logger.debug(u"PlexPy ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s "
|
||||||
|
u"seconds, so we're not logging it." %
|
||||||
|
(session['rating_key'], str(real_play_time),
|
||||||
|
import_ignore_interval))
|
||||||
|
|
||||||
|
if not user_details['keep_history'] and not is_import:
|
||||||
|
logging_enabled = False
|
||||||
|
logger.debug(u"PlexPy ActivityProcessor :: History logging for user '%s' is disabled." % session['user'])
|
||||||
|
|
||||||
|
if logging_enabled:
|
||||||
|
# logger.debug(u"PlexPy ActivityProcessor :: Attempting to write to session_history table...")
|
||||||
|
query = 'INSERT INTO session_history (started, stopped, rating_key, parent_rating_key, ' \
|
||||||
|
'grandparent_rating_key, media_type, user_id, user, ip_address, paused_counter, player, ' \
|
||||||
|
'platform, machine_id, view_offset) VALUES ' \
|
||||||
|
'(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'
|
||||||
|
|
||||||
|
args = [session['started'], stopped, session['rating_key'], session['parent_rating_key'],
|
||||||
|
session['grandparent_rating_key'], session['media_type'], session['user_id'], session['user'],
|
||||||
|
session['ip_address'], session['paused_counter'], session['player'], session['platform'],
|
||||||
|
session['machine_id'], session['view_offset']]
|
||||||
|
|
||||||
|
# logger.debug(u"PlexPy ActivityProcessor :: Writing session_history transaction...")
|
||||||
|
self.db.action(query=query, args=args)
|
||||||
|
|
||||||
|
# logger.debug(u"PlexPy ActivityProcessor :: Successfully written history item, last id for session_history is %s"
|
||||||
|
# % last_id)
|
||||||
|
|
||||||
|
# Write the session_history_media_info table
|
||||||
|
# logger.debug(u"PlexPy ActivityProcessor :: Attempting to write to session_history_media_info table...")
|
||||||
|
query = 'INSERT INTO session_history_media_info (id, rating_key, video_decision, audio_decision, ' \
|
||||||
|
'duration, width, height, container, video_codec, audio_codec, bitrate, video_resolution, ' \
|
||||||
|
'video_framerate, aspect_ratio, audio_channels, transcode_protocol, transcode_container, ' \
|
||||||
|
'transcode_video_codec, transcode_audio_codec, transcode_audio_channels, transcode_width, ' \
|
||||||
|
'transcode_height) VALUES ' \
|
||||||
|
'(last_insert_rowid(), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'
|
||||||
|
|
||||||
|
args = [session['rating_key'], session['video_decision'], session['audio_decision'],
|
||||||
|
session['duration'], session['width'], session['height'], session['container'],
|
||||||
|
session['video_codec'], session['audio_codec'], session['bitrate'],
|
||||||
|
session['video_resolution'], session['video_framerate'], session['aspect_ratio'],
|
||||||
|
session['audio_channels'], session['transcode_protocol'], session['transcode_container'],
|
||||||
|
session['transcode_video_codec'], session['transcode_audio_codec'],
|
||||||
|
session['transcode_audio_channels'], session['transcode_width'], session['transcode_height']]
|
||||||
|
|
||||||
|
# logger.debug(u"PlexPy ActivityProcessor :: Writing session_history_media_info transaction...")
|
||||||
|
self.db.action(query=query, args=args)
|
||||||
|
|
||||||
|
if not is_import:
|
||||||
|
logger.debug(u"PlexPy ActivityProcessor :: Fetching metadata for item ratingKey %s" % session['rating_key'])
|
||||||
|
pms_connect = pmsconnect.PmsConnect()
|
||||||
|
result = pms_connect.get_metadata_details(rating_key=str(session['rating_key']))
|
||||||
|
metadata = result['metadata']
|
||||||
|
else:
|
||||||
|
metadata = import_metadata
|
||||||
|
|
||||||
|
# Write the session_history_metadata table
|
||||||
|
directors = ";".join(metadata['directors'])
|
||||||
|
writers = ";".join(metadata['writers'])
|
||||||
|
actors = ";".join(metadata['actors'])
|
||||||
|
genres = ";".join(metadata['genres'])
|
||||||
|
|
||||||
|
# Build media item title
|
||||||
|
if session['media_type'] == 'episode' or session['media_type'] == 'track':
|
||||||
|
full_title = '%s - %s' % (metadata['grandparent_title'], metadata['title'])
|
||||||
|
elif session['media_type'] == 'movie':
|
||||||
|
full_title = metadata['title']
|
||||||
|
else:
|
||||||
|
full_title = metadata['title']
|
||||||
|
|
||||||
|
# logger.debug(u"PlexPy ActivityProcessor :: Attempting to write to session_history_metadata table...")
|
||||||
|
query = 'INSERT INTO session_history_metadata (id, rating_key, parent_rating_key, ' \
|
||||||
|
'grandparent_rating_key, title, parent_title, grandparent_title, full_title, media_index, ' \
|
||||||
|
'parent_media_index, thumb, parent_thumb, grandparent_thumb, art, media_type, year, ' \
|
||||||
|
'originally_available_at, added_at, updated_at, last_viewed_at, content_rating, summary, ' \
|
||||||
|
'tagline, rating, duration, guid, directors, writers, actors, genres, studio) VALUES ' \
|
||||||
|
'(last_insert_rowid(), ' \
|
||||||
|
'?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'
|
||||||
|
|
||||||
|
args = [session['rating_key'], session['parent_rating_key'], session['grandparent_rating_key'],
|
||||||
|
session['title'], session['parent_title'], session['grandparent_title'], full_title,
|
||||||
|
metadata['index'], metadata['parent_index'], metadata['thumb'], metadata['parent_thumb'],
|
||||||
|
metadata['grandparent_thumb'], metadata['art'], session['media_type'], metadata['year'],
|
||||||
|
metadata['originally_available_at'], metadata['added_at'], metadata['updated_at'],
|
||||||
|
metadata['last_viewed_at'], metadata['content_rating'], metadata['summary'], metadata['tagline'],
|
||||||
|
metadata['rating'], metadata['duration'], metadata['guid'], directors, writers, actors, genres, metadata['studio']]
|
||||||
|
|
||||||
|
# logger.debug(u"PlexPy ActivityProcessor :: Writing session_history_metadata transaction...")
|
||||||
|
self.db.action(query=query, args=args)
|
||||||
|
|
||||||
|
def find_session_ip(self, rating_key=None, machine_id=None):
|
||||||
|
|
||||||
|
logger.debug(u"PlexPy ActivityProcessor :: Requesting log lines...")
|
||||||
|
log_lines = log_reader.get_log_tail(window=5000, parsed=False)
|
||||||
|
|
||||||
|
rating_key_line = 'ratingKey=' + rating_key
|
||||||
|
rating_key_line_2 = 'metadata%2F' + rating_key
|
||||||
|
machine_id_line = 'session=' + machine_id
|
||||||
|
|
||||||
|
for line in reversed(log_lines):
|
||||||
|
# We're good if we find a line with both machine id and rating key
|
||||||
|
# This is usually when there is a transcode session
|
||||||
|
if machine_id_line in line and (rating_key_line in line or rating_key_line_2 in line):
|
||||||
|
# Currently only checking for ipv4 addresses
|
||||||
|
ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}', line)
|
||||||
|
if ipv4:
|
||||||
|
# The logged IP will always be the first match and we don't want localhost entries
|
||||||
|
if ipv4[0] != '127.0.0.1':
|
||||||
|
logger.debug(u"PlexPy ActivityProcessor :: Matched IP address (%s) for stream ratingKey %s "
|
||||||
|
u"and machineIdentifier %s."
|
||||||
|
% (ipv4[0], rating_key, machine_id))
|
||||||
|
return ipv4[0]
|
||||||
|
|
||||||
|
logger.debug(u"PlexPy ActivityProcessor :: Unable to find IP address on first pass. "
|
||||||
|
u"Attempting fallback check in 5 seconds...")
|
||||||
|
|
||||||
|
# Wait for the log to catch up and read in new lines
|
||||||
|
time.sleep(5)
|
||||||
|
|
||||||
|
logger.debug(u"PlexPy ActivityProcessor :: Requesting log lines...")
|
||||||
|
log_lines = log_reader.get_log_tail(window=5000, parsed=False)
|
||||||
|
|
||||||
|
for line in reversed(log_lines):
|
||||||
|
if 'GET /:/timeline' in line and (rating_key_line in line or rating_key_line_2 in line):
|
||||||
|
# Currently only checking for ipv4 addresses
|
||||||
|
# This method can return the wrong IP address if more than one user
|
||||||
|
# starts watching the same media item around the same time.
|
||||||
|
ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}', line)
|
||||||
|
if ipv4:
|
||||||
|
# The logged IP will always be the first match and we don't want localhost entries
|
||||||
|
if ipv4[0] != '127.0.0.1':
|
||||||
|
logger.debug(u"PlexPy ActivityProcessor :: Matched IP address (%s) for stream ratingKey %s." %
|
||||||
|
(ipv4[0], rating_key))
|
||||||
|
return ipv4[0]
|
||||||
|
|
||||||
|
logger.debug(u"PlexPy ActivityProcessor :: Unable to find IP address on fallback search. Not logging IP address.")
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_session_by_key(self, session_key=None):
|
||||||
|
if str(session_key).isdigit():
|
||||||
|
result = self.db.select('SELECT started, session_key, rating_key, media_type, title, parent_title, '
|
||||||
|
'grandparent_title, user_id, user, friendly_name, ip_address, player, '
|
||||||
|
'platform, machine_id, parent_rating_key, grandparent_rating_key, state, '
|
||||||
|
'view_offset, duration, video_decision, audio_decision, width, height, '
|
||||||
|
'container, video_codec, audio_codec, bitrate, video_resolution, '
|
||||||
|
'video_framerate, aspect_ratio, audio_channels, transcode_protocol, '
|
||||||
|
'transcode_container, transcode_video_codec, transcode_audio_codec, '
|
||||||
|
'transcode_audio_channels, transcode_width, transcode_height, '
|
||||||
|
'paused_counter, last_paused '
|
||||||
|
'FROM sessions WHERE session_key = ? LIMIT 1', args=[session_key])
|
||||||
|
for session in result:
|
||||||
|
if session:
|
||||||
|
return session
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def set_session_state(self, session_key=None, state=None, view_offset=0):
|
||||||
|
if str(session_key).isdigit() and str(view_offset).isdigit():
|
||||||
|
values = {'view_offset': int(view_offset)}
|
||||||
|
if state:
|
||||||
|
values['state'] = state
|
||||||
|
|
||||||
|
keys = {'session_key': session_key}
|
||||||
|
result = self.db.upsert('sessions', values, keys)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def delete_session(self, session_key=None):
|
||||||
|
if str(session_key).isdigit():
|
||||||
|
self.db.action('DELETE FROM sessions WHERE session_key = ?', [session_key])
|
||||||
|
|
||||||
|
def set_session_last_paused(self, session_key=None, timestamp=None):
|
||||||
|
if str(session_key).isdigit():
|
||||||
|
result = self.db.select('SELECT last_paused, paused_counter '
|
||||||
|
'FROM sessions '
|
||||||
|
'WHERE session_key = ?', args=[session_key])
|
||||||
|
|
||||||
|
paused_counter = None
|
||||||
|
for session in result:
|
||||||
|
if session['last_paused']:
|
||||||
|
paused_offset = int(time.time()) - int(session['last_paused'])
|
||||||
|
paused_counter = int(session['paused_counter']) + int(paused_offset)
|
||||||
|
|
||||||
|
values = {'state': 'playing',
|
||||||
|
'last_paused': timestamp
|
||||||
|
}
|
||||||
|
if paused_counter:
|
||||||
|
values['paused_counter'] = paused_counter
|
||||||
|
|
||||||
|
keys = {'session_key': session_key}
|
||||||
|
self.db.upsert('sessions', values, keys)
|
||||||
|
|
||||||
|
def increment_session_buffer_count(self, session_key=None):
|
||||||
|
if str(session_key).isdigit():
|
||||||
|
self.db.action('UPDATE sessions SET buffer_count = buffer_count + 1 '
|
||||||
|
'WHERE session_key = ?',
|
||||||
|
[session_key])
|
||||||
|
|
||||||
|
def get_session_buffer_count(self, session_key=None):
|
||||||
|
if str(session_key).isdigit():
|
||||||
|
buffer_count = self.db.select_single('SELECT buffer_count '
|
||||||
|
'FROM sessions '
|
||||||
|
'WHERE session_key = ?',
|
||||||
|
[session_key])
|
||||||
|
if buffer_count:
|
||||||
|
return buffer_count
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def set_session_buffer_trigger_time(self, session_key=None):
|
||||||
|
if str(session_key).isdigit():
|
||||||
|
self.db.action('UPDATE sessions SET buffer_last_triggered = strftime("%s","now") '
|
||||||
|
'WHERE session_key = ?',
|
||||||
|
[session_key])
|
||||||
|
|
||||||
|
def get_session_buffer_trigger_time(self, session_key=None):
|
||||||
|
if str(session_key).isdigit():
|
||||||
|
last_time = self.db.select_single('SELECT buffer_last_triggered '
|
||||||
|
'FROM sessions '
|
||||||
|
'WHERE session_key = ?',
|
||||||
|
[session_key])
|
||||||
|
if last_time:
|
||||||
|
return last_time
|
||||||
|
|
||||||
|
return None
|
|
@ -83,9 +83,11 @@ _CONFIG_DEFINITIONS = {
|
||||||
'GROWL_ON_RESUME': (int, 'Growl', 0),
|
'GROWL_ON_RESUME': (int, 'Growl', 0),
|
||||||
'GROWL_ON_BUFFER': (int, 'Growl', 0),
|
'GROWL_ON_BUFFER': (int, 'Growl', 0),
|
||||||
'GROWL_ON_WATCHED': (int, 'Growl', 0),
|
'GROWL_ON_WATCHED': (int, 'Growl', 0),
|
||||||
|
'HOME_LIBRARY_CARDS': (str, 'General', 'library_statistics_first'),
|
||||||
'HOME_STATS_LENGTH': (int, 'General', 30),
|
'HOME_STATS_LENGTH': (int, 'General', 30),
|
||||||
'HOME_STATS_TYPE': (int, 'General', 0),
|
'HOME_STATS_TYPE': (int, 'General', 0),
|
||||||
'HOME_STATS_COUNT': (int, 'General', 5),
|
'HOME_STATS_COUNT': (int, 'General', 5),
|
||||||
|
'HOME_STATS_CARDS': (str, 'General', 'watch_statistics, top_tv, popular_tv, top_movies, popular_movies, top_music, popular_music, top_users, top_platforms, last_watched'),
|
||||||
'HTTPS_CERT': (str, 'General', ''),
|
'HTTPS_CERT': (str, 'General', ''),
|
||||||
'HTTPS_KEY': (str, 'General', ''),
|
'HTTPS_KEY': (str, 'General', ''),
|
||||||
'HTTP_HOST': (str, 'General', '0.0.0.0'),
|
'HTTP_HOST': (str, 'General', '0.0.0.0'),
|
||||||
|
@ -110,6 +112,7 @@ _CONFIG_DEFINITIONS = {
|
||||||
'MUSIC_NOTIFY_ON_PAUSE': (int, 'Monitoring', 0),
|
'MUSIC_NOTIFY_ON_PAUSE': (int, 'Monitoring', 0),
|
||||||
'MUSIC_LOGGING_ENABLE': (int, 'Monitoring', 0),
|
'MUSIC_LOGGING_ENABLE': (int, 'Monitoring', 0),
|
||||||
'MONITORING_INTERVAL': (int, 'Monitoring', 60),
|
'MONITORING_INTERVAL': (int, 'Monitoring', 60),
|
||||||
|
'MONITORING_USE_WEBSOCKET': (int, 'Monitoring', 0),
|
||||||
'NMA_APIKEY': (str, 'NMA', ''),
|
'NMA_APIKEY': (str, 'NMA', ''),
|
||||||
'NMA_ENABLED': (int, 'NMA', 0),
|
'NMA_ENABLED': (int, 'NMA', 0),
|
||||||
'NMA_PRIORITY': (int, 'NMA', 0),
|
'NMA_PRIORITY': (int, 'NMA', 0),
|
||||||
|
@ -195,8 +198,14 @@ _CONFIG_DEFINITIONS = {
|
||||||
'TV_NOTIFY_ON_PAUSE': (int, 'Monitoring', 0),
|
'TV_NOTIFY_ON_PAUSE': (int, 'Monitoring', 0),
|
||||||
'TWITTER_ENABLED': (int, 'Twitter', 0),
|
'TWITTER_ENABLED': (int, 'Twitter', 0),
|
||||||
'TWITTER_PASSWORD': (str, 'Twitter', ''),
|
'TWITTER_PASSWORD': (str, 'Twitter', ''),
|
||||||
'TWITTER_PREFIX': (str, 'Twitter', 'Headphones'),
|
'TWITTER_PREFIX': (str, 'Twitter', 'PlexPy'),
|
||||||
'TWITTER_USERNAME': (str, 'Twitter', ''),
|
'TWITTER_USERNAME': (str, 'Twitter', ''),
|
||||||
|
'TWITTER_ON_PLAY': (int, 'Twitter', 0),
|
||||||
|
'TWITTER_ON_STOP': (int, 'Twitter', 0),
|
||||||
|
'TWITTER_ON_PAUSE': (int, 'Twitter', 0),
|
||||||
|
'TWITTER_ON_RESUME': (int, 'Twitter', 0),
|
||||||
|
'TWITTER_ON_BUFFER': (int, 'Twitter', 0),
|
||||||
|
'TWITTER_ON_WATCHED': (int, 'Twitter', 0),
|
||||||
'UPDATE_DB_INTERVAL': (int, 'General', 24),
|
'UPDATE_DB_INTERVAL': (int, 'General', 24),
|
||||||
'VERIFY_SSL_CERT': (bool_int, 'Advanced', 1),
|
'VERIFY_SSL_CERT': (bool_int, 'Advanced', 1),
|
||||||
'VIDEO_LOGGING_ENABLE': (int, 'Monitoring', 1),
|
'VIDEO_LOGGING_ENABLE': (int, 'Monitoring', 1),
|
||||||
|
|
|
@ -60,6 +60,8 @@ class DataFactory(object):
|
||||||
(CASE WHEN session_history_metadata.duration IS NULL THEN 1.0 ELSE session_history_metadata.duration * 1.0 END) * 100) AS percent_complete',
|
(CASE WHEN session_history_metadata.duration IS NULL THEN 1.0 ELSE session_history_metadata.duration * 1.0 END) * 100) AS percent_complete',
|
||||||
'session_history_media_info.video_decision',
|
'session_history_media_info.video_decision',
|
||||||
'COUNT(*) AS group_count'
|
'COUNT(*) AS group_count'
|
||||||
|
'session_history_media_info.audio_decision',
|
||||||
|
'session_history.user_id as user_id'
|
||||||
]
|
]
|
||||||
try:
|
try:
|
||||||
query = data_tables.ssp_query(table_name='session_history',
|
query = data_tables.ssp_query(table_name='session_history',
|
||||||
|
@ -127,6 +129,8 @@ class DataFactory(object):
|
||||||
"video_decision": item["video_decision"],
|
"video_decision": item["video_decision"],
|
||||||
"watched_status": watched_status,
|
"watched_status": watched_status,
|
||||||
"group_count": item["group_count"]
|
"group_count": item["group_count"]
|
||||||
|
"audio_decision": item["audio_decision"],
|
||||||
|
"user_id": item["user_id"]
|
||||||
}
|
}
|
||||||
|
|
||||||
rows.append(row)
|
rows.append(row)
|
||||||
|
@ -139,22 +143,14 @@ class DataFactory(object):
|
||||||
|
|
||||||
return dict
|
return dict
|
||||||
|
|
||||||
def get_home_stats(self, time_range='30', stat_type='0', stat_count='5'):
|
def get_home_stats(self, time_range='30', stats_type=0, stats_count='5', stats_cards='', notify_watched_percent='85'):
|
||||||
monitor_db = database.MonitorDatabase()
|
monitor_db = database.MonitorDatabase()
|
||||||
|
|
||||||
if not time_range.isdigit():
|
sort_type = 'total_plays' if stats_type == 0 else 'total_duration'
|
||||||
time_range = '30'
|
|
||||||
|
|
||||||
sort_type = 'total_plays' if stat_type == '0' else 'total_duration'
|
|
||||||
|
|
||||||
if not time_range.isdigit():
|
|
||||||
stat_count = '5'
|
|
||||||
|
|
||||||
# This actually determines the output order in the home page
|
|
||||||
stats_queries = ["top_tv", "popular_tv", "top_movies", "popular_movies", "top_users", "top_platforms", "last_watched"]
|
|
||||||
home_stats = []
|
home_stats = []
|
||||||
|
|
||||||
for stat in stats_queries:
|
for stat in stats_cards:
|
||||||
if 'top_tv' in stat:
|
if 'top_tv' in stat:
|
||||||
top_tv = []
|
top_tv = []
|
||||||
try:
|
try:
|
||||||
|
@ -174,7 +170,7 @@ class DataFactory(object):
|
||||||
'>= datetime("now", "-%s days", "localtime") ' \
|
'>= datetime("now", "-%s days", "localtime") ' \
|
||||||
'AND session_history_metadata.media_type = "episode" ' \
|
'AND session_history_metadata.media_type = "episode" ' \
|
||||||
'GROUP BY session_history_metadata.grandparent_title ' \
|
'GROUP BY session_history_metadata.grandparent_title ' \
|
||||||
'ORDER BY %s DESC LIMIT %s' % (time_range, sort_type, stat_count)
|
'ORDER BY %s DESC LIMIT %s' % (time_range, sort_type, stats_count)
|
||||||
result = monitor_db.select(query)
|
result = monitor_db.select(query)
|
||||||
except:
|
except:
|
||||||
logger.warn("Unable to execute database query.")
|
logger.warn("Unable to execute database query.")
|
||||||
|
@ -210,6 +206,10 @@ class DataFactory(object):
|
||||||
'session_history_metadata.grandparent_rating_key, ' \
|
'session_history_metadata.grandparent_rating_key, ' \
|
||||||
'MAX(session_history.started) as last_watch, ' \
|
'MAX(session_history.started) as last_watch, ' \
|
||||||
'COUNT(session_history.id) as total_plays, ' \
|
'COUNT(session_history.id) as total_plays, ' \
|
||||||
|
'SUM(case when session_history.stopped > 0 ' \
|
||||||
|
'then (session_history.stopped - session_history.started) ' \
|
||||||
|
' - (case when session_history.paused_counter is NULL then 0 else session_history.paused_counter end) ' \
|
||||||
|
'else 0 end) as total_duration, ' \
|
||||||
'session_history_metadata.grandparent_thumb ' \
|
'session_history_metadata.grandparent_thumb ' \
|
||||||
'FROM session_history_metadata ' \
|
'FROM session_history_metadata ' \
|
||||||
'JOIN session_history ON session_history_metadata.id = session_history.id ' \
|
'JOIN session_history ON session_history_metadata.id = session_history.id ' \
|
||||||
|
@ -217,8 +217,8 @@ class DataFactory(object):
|
||||||
'>= datetime("now", "-%s days", "localtime") ' \
|
'>= datetime("now", "-%s days", "localtime") ' \
|
||||||
'AND session_history_metadata.media_type = "episode" ' \
|
'AND session_history_metadata.media_type = "episode" ' \
|
||||||
'GROUP BY session_history_metadata.grandparent_title ' \
|
'GROUP BY session_history_metadata.grandparent_title ' \
|
||||||
'ORDER BY users_watched DESC, total_plays DESC ' \
|
'ORDER BY users_watched DESC, %s DESC ' \
|
||||||
'LIMIT %s' % (time_range, stat_count)
|
'LIMIT %s' % (time_range, sort_type, stats_count)
|
||||||
result = monitor_db.select(query)
|
result = monitor_db.select(query)
|
||||||
except:
|
except:
|
||||||
logger.warn("Unable to execute database query.")
|
logger.warn("Unable to execute database query.")
|
||||||
|
@ -230,7 +230,7 @@ class DataFactory(object):
|
||||||
'rating_key': item[3],
|
'rating_key': item[3],
|
||||||
'last_play': item[4],
|
'last_play': item[4],
|
||||||
'total_plays': item[5],
|
'total_plays': item[5],
|
||||||
'grandparent_thumb': item[6],
|
'grandparent_thumb': item[7],
|
||||||
'thumb': '',
|
'thumb': '',
|
||||||
'user': '',
|
'user': '',
|
||||||
'friendly_name': '',
|
'friendly_name': '',
|
||||||
|
@ -262,7 +262,7 @@ class DataFactory(object):
|
||||||
'>= datetime("now", "-%s days", "localtime") ' \
|
'>= datetime("now", "-%s days", "localtime") ' \
|
||||||
'AND session_history_metadata.media_type = "movie" ' \
|
'AND session_history_metadata.media_type = "movie" ' \
|
||||||
'GROUP BY session_history_metadata.full_title ' \
|
'GROUP BY session_history_metadata.full_title ' \
|
||||||
'ORDER BY %s DESC LIMIT %s' % (time_range, sort_type, stat_count)
|
'ORDER BY %s DESC LIMIT %s' % (time_range, sort_type, stats_count)
|
||||||
result = monitor_db.select(query)
|
result = monitor_db.select(query)
|
||||||
except:
|
except:
|
||||||
logger.warn("Unable to execute database query.")
|
logger.warn("Unable to execute database query.")
|
||||||
|
@ -298,6 +298,10 @@ class DataFactory(object):
|
||||||
'session_history_metadata.rating_key, ' \
|
'session_history_metadata.rating_key, ' \
|
||||||
'MAX(session_history.started) as last_watch, ' \
|
'MAX(session_history.started) as last_watch, ' \
|
||||||
'COUNT(session_history.id) as total_plays, ' \
|
'COUNT(session_history.id) as total_plays, ' \
|
||||||
|
'SUM(case when session_history.stopped > 0 ' \
|
||||||
|
'then (session_history.stopped - session_history.started) ' \
|
||||||
|
' - (case when session_history.paused_counter is NULL then 0 else session_history.paused_counter end) ' \
|
||||||
|
'else 0 end) as total_duration, ' \
|
||||||
'session_history_metadata.thumb ' \
|
'session_history_metadata.thumb ' \
|
||||||
'FROM session_history_metadata ' \
|
'FROM session_history_metadata ' \
|
||||||
'JOIN session_history ON session_history_metadata.id = session_history.id ' \
|
'JOIN session_history ON session_history_metadata.id = session_history.id ' \
|
||||||
|
@ -305,8 +309,8 @@ class DataFactory(object):
|
||||||
'>= datetime("now", "-%s days", "localtime") ' \
|
'>= datetime("now", "-%s days", "localtime") ' \
|
||||||
'AND session_history_metadata.media_type = "movie" ' \
|
'AND session_history_metadata.media_type = "movie" ' \
|
||||||
'GROUP BY session_history_metadata.full_title ' \
|
'GROUP BY session_history_metadata.full_title ' \
|
||||||
'ORDER BY users_watched DESC, total_plays DESC ' \
|
'ORDER BY users_watched DESC, %s DESC ' \
|
||||||
'LIMIT %s' % (time_range, stat_count)
|
'LIMIT %s' % (time_range, sort_type, stats_count)
|
||||||
result = monitor_db.select(query)
|
result = monitor_db.select(query)
|
||||||
except:
|
except:
|
||||||
logger.warn("Unable to execute database query.")
|
logger.warn("Unable to execute database query.")
|
||||||
|
@ -319,7 +323,7 @@ class DataFactory(object):
|
||||||
'last_play': item[4],
|
'last_play': item[4],
|
||||||
'total_plays': item[5],
|
'total_plays': item[5],
|
||||||
'grandparent_thumb': '',
|
'grandparent_thumb': '',
|
||||||
'thumb': item[6],
|
'thumb': item[7],
|
||||||
'user': '',
|
'user': '',
|
||||||
'friendly_name': '',
|
'friendly_name': '',
|
||||||
'platform_type': '',
|
'platform_type': '',
|
||||||
|
@ -331,6 +335,98 @@ class DataFactory(object):
|
||||||
home_stats.append({'stat_id': stat,
|
home_stats.append({'stat_id': stat,
|
||||||
'rows': popular_movies})
|
'rows': popular_movies})
|
||||||
|
|
||||||
|
elif 'top_music' in stat:
|
||||||
|
top_music = []
|
||||||
|
try:
|
||||||
|
query = 'SELECT session_history_metadata.id, ' \
|
||||||
|
'session_history_metadata.grandparent_title, ' \
|
||||||
|
'COUNT(session_history_metadata.grandparent_title) as total_plays, ' \
|
||||||
|
'SUM(case when session_history.stopped > 0 ' \
|
||||||
|
'then (session_history.stopped - session_history.started) ' \
|
||||||
|
' - (case when session_history.paused_counter is NULL then 0 else session_history.paused_counter end) ' \
|
||||||
|
'else 0 end) as total_duration, ' \
|
||||||
|
'session_history_metadata.grandparent_rating_key, ' \
|
||||||
|
'MAX(session_history.started) as last_watch,' \
|
||||||
|
'session_history_metadata.grandparent_thumb ' \
|
||||||
|
'FROM session_history_metadata ' \
|
||||||
|
'JOIN session_history on session_history_metadata.id = session_history.id ' \
|
||||||
|
'WHERE datetime(session_history.stopped, "unixepoch", "localtime") ' \
|
||||||
|
'>= datetime("now", "-%s days", "localtime") ' \
|
||||||
|
'AND session_history_metadata.media_type = "track" ' \
|
||||||
|
'GROUP BY session_history_metadata.grandparent_title ' \
|
||||||
|
'ORDER BY %s DESC LIMIT %s' % (time_range, sort_type, stats_count)
|
||||||
|
result = monitor_db.select(query)
|
||||||
|
except:
|
||||||
|
logger.warn("Unable to execute database query.")
|
||||||
|
return None
|
||||||
|
|
||||||
|
for item in result:
|
||||||
|
row = {'title': item[1],
|
||||||
|
'total_plays': item[2],
|
||||||
|
'total_duration': item[3],
|
||||||
|
'users_watched': '',
|
||||||
|
'rating_key': item[4],
|
||||||
|
'last_play': item[5],
|
||||||
|
'grandparent_thumb': item[6],
|
||||||
|
'thumb': '',
|
||||||
|
'user': '',
|
||||||
|
'friendly_name': '',
|
||||||
|
'platform_type': '',
|
||||||
|
'platform': '',
|
||||||
|
'row_id': item[0]
|
||||||
|
}
|
||||||
|
top_music.append(row)
|
||||||
|
|
||||||
|
home_stats.append({'stat_id': stat,
|
||||||
|
'stat_type': sort_type,
|
||||||
|
'rows': top_music})
|
||||||
|
|
||||||
|
elif 'popular_music' in stat:
|
||||||
|
popular_music = []
|
||||||
|
try:
|
||||||
|
query = 'SELECT session_history_metadata.id, ' \
|
||||||
|
'session_history_metadata.grandparent_title, ' \
|
||||||
|
'COUNT(DISTINCT session_history.user_id) as users_watched, ' \
|
||||||
|
'session_history_metadata.grandparent_rating_key, ' \
|
||||||
|
'MAX(session_history.started) as last_watch, ' \
|
||||||
|
'COUNT(session_history.id) as total_plays, ' \
|
||||||
|
'SUM(case when session_history.stopped > 0 ' \
|
||||||
|
'then (session_history.stopped - session_history.started) ' \
|
||||||
|
' - (case when session_history.paused_counter is NULL then 0 else session_history.paused_counter end) ' \
|
||||||
|
'else 0 end) as total_duration, ' \
|
||||||
|
'session_history_metadata.grandparent_thumb ' \
|
||||||
|
'FROM session_history_metadata ' \
|
||||||
|
'JOIN session_history ON session_history_metadata.id = session_history.id ' \
|
||||||
|
'WHERE datetime(session_history.stopped, "unixepoch", "localtime") ' \
|
||||||
|
'>= datetime("now", "-%s days", "localtime") ' \
|
||||||
|
'AND session_history_metadata.media_type = "track" ' \
|
||||||
|
'GROUP BY session_history_metadata.grandparent_title ' \
|
||||||
|
'ORDER BY users_watched DESC, %s DESC ' \
|
||||||
|
'LIMIT %s' % (time_range, sort_type, stats_count)
|
||||||
|
result = monitor_db.select(query)
|
||||||
|
except:
|
||||||
|
logger.warn("Unable to execute database query.")
|
||||||
|
return None
|
||||||
|
|
||||||
|
for item in result:
|
||||||
|
row = {'title': item[1],
|
||||||
|
'users_watched': item[2],
|
||||||
|
'rating_key': item[3],
|
||||||
|
'last_play': item[4],
|
||||||
|
'total_plays': item[5],
|
||||||
|
'grandparent_thumb': item[7],
|
||||||
|
'thumb': '',
|
||||||
|
'user': '',
|
||||||
|
'friendly_name': '',
|
||||||
|
'platform_type': '',
|
||||||
|
'platform': '',
|
||||||
|
'row_id': item[0]
|
||||||
|
}
|
||||||
|
popular_music.append(row)
|
||||||
|
|
||||||
|
home_stats.append({'stat_id': stat,
|
||||||
|
'rows': popular_music})
|
||||||
|
|
||||||
elif 'top_users' in stat:
|
elif 'top_users' in stat:
|
||||||
top_users = []
|
top_users = []
|
||||||
try:
|
try:
|
||||||
|
@ -351,7 +447,7 @@ class DataFactory(object):
|
||||||
'WHERE datetime(session_history.stopped, "unixepoch", "localtime") >= ' \
|
'WHERE datetime(session_history.stopped, "unixepoch", "localtime") >= ' \
|
||||||
'datetime("now", "-%s days", "localtime") '\
|
'datetime("now", "-%s days", "localtime") '\
|
||||||
'GROUP BY session_history.user_id ' \
|
'GROUP BY session_history.user_id ' \
|
||||||
'ORDER BY %s DESC LIMIT %s' % (time_range, sort_type, stat_count)
|
'ORDER BY %s DESC LIMIT %s' % (time_range, sort_type, stats_count)
|
||||||
result = monitor_db.select(query)
|
result = monitor_db.select(query)
|
||||||
except:
|
except:
|
||||||
logger.warn("Unable to execute database query.")
|
logger.warn("Unable to execute database query.")
|
||||||
|
@ -399,7 +495,7 @@ class DataFactory(object):
|
||||||
'WHERE datetime(session_history.stopped, "unixepoch", "localtime") ' \
|
'WHERE datetime(session_history.stopped, "unixepoch", "localtime") ' \
|
||||||
'>= datetime("now", "-%s days", "localtime") ' \
|
'>= datetime("now", "-%s days", "localtime") ' \
|
||||||
'GROUP BY session_history.platform ' \
|
'GROUP BY session_history.platform ' \
|
||||||
'ORDER BY %s DESC LIMIT %s' % (time_range, sort_type, stat_count)
|
'ORDER BY %s DESC LIMIT %s' % (time_range, sort_type, stats_count)
|
||||||
result = monitor_db.select(query)
|
result = monitor_db.select(query)
|
||||||
except:
|
except:
|
||||||
logger.warn("Unable to execute database query.")
|
logger.warn("Unable to execute database query.")
|
||||||
|
@ -440,7 +536,11 @@ class DataFactory(object):
|
||||||
'session_history_metadata.thumb, ' \
|
'session_history_metadata.thumb, ' \
|
||||||
'session_history_metadata.grandparent_thumb, ' \
|
'session_history_metadata.grandparent_thumb, ' \
|
||||||
'MAX(session_history.started) as last_watch, ' \
|
'MAX(session_history.started) as last_watch, ' \
|
||||||
'session_history.player as platform ' \
|
'session_history.player as platform, ' \
|
||||||
|
'((CASE WHEN session_history.view_offset IS NULL THEN 0.1 ELSE \
|
||||||
|
session_history.view_offset * 1.0 END) / \
|
||||||
|
(CASE WHEN session_history_metadata.duration IS NULL THEN 1.0 ELSE \
|
||||||
|
session_history_metadata.duration * 1.0 END) * 100) as percent_complete ' \
|
||||||
'FROM session_history_metadata ' \
|
'FROM session_history_metadata ' \
|
||||||
'JOIN session_history ON session_history_metadata.id = session_history.id ' \
|
'JOIN session_history ON session_history_metadata.id = session_history.id ' \
|
||||||
'LEFT OUTER JOIN users ON session_history.user_id = users.user_id ' \
|
'LEFT OUTER JOIN users ON session_history.user_id = users.user_id ' \
|
||||||
|
@ -448,9 +548,10 @@ class DataFactory(object):
|
||||||
'>= datetime("now", "-%s days", "localtime") ' \
|
'>= datetime("now", "-%s days", "localtime") ' \
|
||||||
'AND (session_history_metadata.media_type = "movie" ' \
|
'AND (session_history_metadata.media_type = "movie" ' \
|
||||||
'OR session_history_metadata.media_type = "episode") ' \
|
'OR session_history_metadata.media_type = "episode") ' \
|
||||||
|
'AND percent_complete >= %s ' \
|
||||||
'GROUP BY session_history_metadata.full_title ' \
|
'GROUP BY session_history_metadata.full_title ' \
|
||||||
'ORDER BY last_watch DESC ' \
|
'ORDER BY last_watch DESC ' \
|
||||||
'LIMIT %s' % (time_range, stat_count)
|
'LIMIT %s' % (time_range, notify_watched_percent, stats_count)
|
||||||
result = monitor_db.select(query)
|
result = monitor_db.select(query)
|
||||||
except:
|
except:
|
||||||
logger.warn("Unable to execute database query.")
|
logger.warn("Unable to execute database query.")
|
||||||
|
|
|
@ -81,10 +81,11 @@ class HTTPHandler(object):
|
||||||
logger.warn(u"Failed to access uri endpoint %s with error %s" % (uri, e))
|
logger.warn(u"Failed to access uri endpoint %s with error %s" % (uri, e))
|
||||||
return None
|
return None
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logger.warn(u"Failed to access uri endpoint %s. Is your server maybe accepting SSL connections only?" % uri)
|
logger.warn(u"Failed to access uri endpoint %s. Is your server maybe accepting SSL connections only? %s" % (uri, e))
|
||||||
return None
|
return None
|
||||||
except:
|
except:
|
||||||
logger.warn(u"Failed to access uri endpoint %s with Uncaught exception." % uri)
|
logger.warn(u"Failed to access uri endpoint %s with Uncaught exception." % uri)
|
||||||
|
return None
|
||||||
|
|
||||||
if request_status == 200:
|
if request_status == 200:
|
||||||
if output_format == 'dict':
|
if output_format == 'dict':
|
||||||
|
|
|
@ -49,7 +49,8 @@ AGENT_IDS = {"Growl": 0,
|
||||||
"Pushover": 7,
|
"Pushover": 7,
|
||||||
"OSX Notify": 8,
|
"OSX Notify": 8,
|
||||||
"Boxcar2": 9,
|
"Boxcar2": 9,
|
||||||
"Email": 10}
|
"Email": 10,
|
||||||
|
"Twitter": 11}
|
||||||
|
|
||||||
def available_notification_agents():
|
def available_notification_agents():
|
||||||
agents = [{'name': 'Growl',
|
agents = [{'name': 'Growl',
|
||||||
|
@ -171,6 +172,18 @@ def available_notification_agents():
|
||||||
'on_resume': plexpy.CONFIG.EMAIL_ON_RESUME,
|
'on_resume': plexpy.CONFIG.EMAIL_ON_RESUME,
|
||||||
'on_buffer': plexpy.CONFIG.EMAIL_ON_BUFFER,
|
'on_buffer': plexpy.CONFIG.EMAIL_ON_BUFFER,
|
||||||
'on_watched': plexpy.CONFIG.EMAIL_ON_WATCHED
|
'on_watched': plexpy.CONFIG.EMAIL_ON_WATCHED
|
||||||
|
},
|
||||||
|
{'name': 'Twitter',
|
||||||
|
'id': AGENT_IDS['Twitter'],
|
||||||
|
'config_prefix': 'twitter',
|
||||||
|
'has_config': True,
|
||||||
|
'state': checked(plexpy.CONFIG.TWITTER_ENABLED),
|
||||||
|
'on_play': plexpy.CONFIG.TWITTER_ON_PLAY,
|
||||||
|
'on_stop': plexpy.CONFIG.TWITTER_ON_STOP,
|
||||||
|
'on_pause': plexpy.CONFIG.TWITTER_ON_PAUSE,
|
||||||
|
'on_resume': plexpy.CONFIG.TWITTER_ON_RESUME,
|
||||||
|
'on_buffer': plexpy.CONFIG.TWITTER_ON_BUFFER,
|
||||||
|
'on_watched': plexpy.CONFIG.TWITTER_ON_WATCHED
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -229,6 +242,9 @@ def get_notification_agent_config(config_id):
|
||||||
elif config_id == 10:
|
elif config_id == 10:
|
||||||
email = Email()
|
email = Email()
|
||||||
return email.return_config_options()
|
return email.return_config_options()
|
||||||
|
elif config_id == 11:
|
||||||
|
tweet = TwitterNotifier()
|
||||||
|
return tweet.return_config_options()
|
||||||
else:
|
else:
|
||||||
return []
|
return []
|
||||||
else:
|
else:
|
||||||
|
@ -271,6 +287,9 @@ def send_notification(config_id, subject, body):
|
||||||
elif config_id == 10:
|
elif config_id == 10:
|
||||||
email = Email()
|
email = Email()
|
||||||
email.notify(subject=subject, message=body)
|
email.notify(subject=subject, message=body)
|
||||||
|
elif config_id == 11:
|
||||||
|
tweet = TwitterNotifier()
|
||||||
|
tweet.notify(subject=subject, message=body)
|
||||||
else:
|
else:
|
||||||
logger.debug(u"PlexPy Notifier :: Unknown agent id received.")
|
logger.debug(u"PlexPy Notifier :: Unknown agent id received.")
|
||||||
else:
|
else:
|
||||||
|
@ -912,19 +931,17 @@ class TwitterNotifier(object):
|
||||||
SIGNIN_URL = 'https://api.twitter.com/oauth/authenticate'
|
SIGNIN_URL = 'https://api.twitter.com/oauth/authenticate'
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.consumer_key = "oYKnp2ddX5gbARjqX8ZAAg"
|
self.consumer_key = "2LdJKXHDUwJtjYBsdwJisIOsh"
|
||||||
self.consumer_secret = "A4Xkw9i5SjHbTk7XT8zzOPqivhj9MmRDR9Qn95YA9sk"
|
self.consumer_secret = "QWbUcZzAIiL4zbDCIhy2EdUkV8yEEav3qMdo5y3FugxCFelWrA"
|
||||||
|
|
||||||
def notify_snatch(self, title):
|
def notify(self, subject, message):
|
||||||
if plexpy.CONFIG.TWITTER_ONSNATCH:
|
if not subject or not message:
|
||||||
self._notifyTwitter(common.notifyStrings[common.NOTIFY_SNATCH] + ': ' + title + ' at ' + helpers.now())
|
return
|
||||||
|
else:
|
||||||
def notify_download(self, title):
|
self._send_tweet(subject + ': ' + message)
|
||||||
if plexpy.CONFIG.TWITTER_ENABLED:
|
|
||||||
self._notifyTwitter(common.notifyStrings[common.NOTIFY_DOWNLOAD] + ': ' + title + ' at ' + helpers.now())
|
|
||||||
|
|
||||||
def test_notify(self):
|
def test_notify(self):
|
||||||
return self._notifyTwitter("This is a test notification from PlexPy at " + helpers.now(), force=True)
|
return self._send_tweet("This is a test notification from PlexPy at " + helpers.now())
|
||||||
|
|
||||||
def _get_authorization(self):
|
def _get_authorization(self):
|
||||||
|
|
||||||
|
@ -958,7 +975,7 @@ class TwitterNotifier(object):
|
||||||
logger.info('Generating and signing request for an access token using key ' + key)
|
logger.info('Generating and signing request for an access token using key ' + key)
|
||||||
|
|
||||||
oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret)
|
oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret)
|
||||||
logger.info('oauth_consumer: ' + str(oauth_consumer))
|
# logger.debug('oauth_consumer: ' + str(oauth_consumer))
|
||||||
oauth_client = oauth.Client(oauth_consumer, token)
|
oauth_client = oauth.Client(oauth_consumer, token)
|
||||||
logger.info('oauth_client: ' + str(oauth_client))
|
logger.info('oauth_client: ' + str(oauth_client))
|
||||||
resp, content = oauth_client.request(self.ACCESS_TOKEN_URL, method='POST', body='oauth_verifier=%s' % key)
|
resp, content = oauth_client.request(self.ACCESS_TOKEN_URL, method='POST', body='oauth_verifier=%s' % key)
|
||||||
|
@ -979,7 +996,6 @@ class TwitterNotifier(object):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _send_tweet(self, message=None):
|
def _send_tweet(self, message=None):
|
||||||
|
|
||||||
username = self.consumer_key
|
username = self.consumer_key
|
||||||
password = self.consumer_secret
|
password = self.consumer_secret
|
||||||
access_token_key = plexpy.CONFIG.TWITTER_USERNAME
|
access_token_key = plexpy.CONFIG.TWITTER_USERNAME
|
||||||
|
@ -997,13 +1013,36 @@ class TwitterNotifier(object):
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _notifyTwitter(self, message='', force=False):
|
def return_config_options(self):
|
||||||
prefix = plexpy.CONFIG.TWITTER_PREFIX
|
config_option = [{'label': 'Request Authorisation',
|
||||||
|
'value': 'Request Authorisation',
|
||||||
|
'name': 'twitterStep1',
|
||||||
|
'description': 'Step 1: Click Request button above. (Ensure you allow the browser pop-up).',
|
||||||
|
'input_type': 'button'
|
||||||
|
},
|
||||||
|
{'label': 'Authorisation Key',
|
||||||
|
'value': '',
|
||||||
|
'name': 'twitter_key',
|
||||||
|
'description': 'Step 2: Input the authorisation key you received from Step 1.',
|
||||||
|
'input_type': 'text'
|
||||||
|
},
|
||||||
|
{'label': 'Verify Key',
|
||||||
|
'value': 'Verify Key',
|
||||||
|
'name': 'twitterStep2',
|
||||||
|
'description': 'Step 3: Verify the key.',
|
||||||
|
'input_type': 'button'
|
||||||
|
},
|
||||||
|
{'label': 'Test Twitter',
|
||||||
|
'value': 'Test Twitter',
|
||||||
|
'name': 'testTwitter',
|
||||||
|
'description': 'Test if Twitter notifications are working. See logs for troubleshooting.',
|
||||||
|
'input_type': 'button'
|
||||||
|
},
|
||||||
|
{'input_type': 'nosave'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
if not plexpy.CONFIG.TWITTER_ENABLED and not force:
|
return config_option
|
||||||
return False
|
|
||||||
|
|
||||||
return self._send_tweet(prefix + ": " + message)
|
|
||||||
|
|
||||||
class OSX_NOTIFY(object):
|
class OSX_NOTIFY(object):
|
||||||
|
|
||||||
|
@ -1067,6 +1106,7 @@ class OSX_NOTIFY(object):
|
||||||
|
|
||||||
notification_center = NSUserNotificationCenter.defaultUserNotificationCenter()
|
notification_center = NSUserNotificationCenter.defaultUserNotificationCenter()
|
||||||
notification_center.deliverNotification_(notification)
|
notification_center.deliverNotification_(notification)
|
||||||
|
logger.info(u"OSX Notify notifications sent.")
|
||||||
|
|
||||||
del pool
|
del pool
|
||||||
return True
|
return True
|
||||||
|
@ -1204,7 +1244,7 @@ class Email(object):
|
||||||
'input_type': 'password'
|
'input_type': 'password'
|
||||||
},
|
},
|
||||||
{'label': 'TLS',
|
{'label': 'TLS',
|
||||||
'value': checked(plexpy.CONFIG.EMAIL_TLS),
|
'value': plexpy.CONFIG.EMAIL_TLS,
|
||||||
'name': 'email_tls',
|
'name': 'email_tls',
|
||||||
'description': 'Does the server use encryption.',
|
'description': 'Does the server use encryption.',
|
||||||
'input_type': 'checkbox'
|
'input_type': 'checkbox'
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
|
|
||||||
import sqlite3
|
import sqlite3
|
||||||
|
|
||||||
from plexpy import logger, helpers, monitor, users, plextv
|
from plexpy import logger, helpers, activity_pinger, activity_processor, users, plextv
|
||||||
from xml.dom import minidom
|
from xml.dom import minidom
|
||||||
|
|
||||||
import plexpy
|
import plexpy
|
||||||
|
@ -245,9 +245,10 @@ def import_from_plexwatch(database=None, table_name=None, import_ignore_interval
|
||||||
logger.debug(u"PlexPy Importer :: PlexWatch data import in progress...")
|
logger.debug(u"PlexPy Importer :: PlexWatch data import in progress...")
|
||||||
|
|
||||||
logger.debug(u"PlexPy Importer :: Disabling monitoring while import in progress.")
|
logger.debug(u"PlexPy Importer :: Disabling monitoring while import in progress.")
|
||||||
plexpy.schedule_job(monitor.check_active_sessions, 'Check for active sessions', hours=0, minutes=0, seconds=0)
|
plexpy.schedule_job(activity_pinger.check_active_sessions, 'Check for active sessions',
|
||||||
|
hours=0, minutes=0, seconds=0)
|
||||||
|
|
||||||
monitor_processing = monitor.MonitorProcessing()
|
ap = activity_processor.ActivityProcessor()
|
||||||
user_data = users.Users()
|
user_data = users.Users()
|
||||||
|
|
||||||
# Get the latest friends list so we can pull user id's
|
# Get the latest friends list so we can pull user id's
|
||||||
|
@ -373,7 +374,7 @@ def import_from_plexwatch(database=None, table_name=None, import_ignore_interval
|
||||||
# On older versions of PMS, "clip" items were still classified as "movie" and had bad ratingKey values
|
# On older versions of PMS, "clip" items were still classified as "movie" and had bad ratingKey values
|
||||||
# Just make sure that the ratingKey is indeed an integer
|
# Just make sure that the ratingKey is indeed an integer
|
||||||
if session_history_metadata['rating_key'].isdigit():
|
if session_history_metadata['rating_key'].isdigit():
|
||||||
monitor_processing.write_session_history(session=session_history,
|
ap.write_session_history(session=session_history,
|
||||||
import_metadata=session_history_metadata,
|
import_metadata=session_history_metadata,
|
||||||
is_import=True,
|
is_import=True,
|
||||||
import_ignore_interval=import_ignore_interval)
|
import_ignore_interval=import_ignore_interval)
|
||||||
|
|
|
@ -705,9 +705,9 @@ class PmsConnect(object):
|
||||||
'transcode_container': transcode_container,
|
'transcode_container': transcode_container,
|
||||||
'transcode_protocol': transcode_protocol,
|
'transcode_protocol': transcode_protocol,
|
||||||
'duration': duration,
|
'duration': duration,
|
||||||
'progress': progress,
|
'view_offset': progress,
|
||||||
'progress_percent': str(helpers.get_percent(progress, duration)),
|
'progress_percent': str(helpers.get_percent(progress, duration)),
|
||||||
'type': 'track',
|
'media_type': 'track',
|
||||||
'indexes': 0
|
'indexes': 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -826,14 +826,14 @@ class PmsConnect(object):
|
||||||
'transcode_container': transcode_container,
|
'transcode_container': transcode_container,
|
||||||
'transcode_protocol': transcode_protocol,
|
'transcode_protocol': transcode_protocol,
|
||||||
'duration': duration,
|
'duration': duration,
|
||||||
'progress': progress,
|
'view_offset': progress,
|
||||||
'progress_percent': str(helpers.get_percent(progress, duration)),
|
'progress_percent': str(helpers.get_percent(progress, duration)),
|
||||||
'indexes': use_indexes
|
'indexes': use_indexes
|
||||||
}
|
}
|
||||||
if helpers.get_xml_attr(session, 'ratingKey').isdigit():
|
if helpers.get_xml_attr(session, 'ratingKey').isdigit():
|
||||||
session_output['type'] = helpers.get_xml_attr(session, 'type')
|
session_output['media_type'] = helpers.get_xml_attr(session, 'type')
|
||||||
else:
|
else:
|
||||||
session_output['type'] = 'clip'
|
session_output['media_type'] = 'clip'
|
||||||
|
|
||||||
elif helpers.get_xml_attr(session, 'type') == 'movie':
|
elif helpers.get_xml_attr(session, 'type') == 'movie':
|
||||||
session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'),
|
session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'),
|
||||||
|
@ -882,14 +882,14 @@ class PmsConnect(object):
|
||||||
'transcode_container': transcode_container,
|
'transcode_container': transcode_container,
|
||||||
'transcode_protocol': transcode_protocol,
|
'transcode_protocol': transcode_protocol,
|
||||||
'duration': duration,
|
'duration': duration,
|
||||||
'progress': progress,
|
'view_offset': progress,
|
||||||
'progress_percent': str(helpers.get_percent(progress, duration)),
|
'progress_percent': str(helpers.get_percent(progress, duration)),
|
||||||
'indexes': use_indexes
|
'indexes': use_indexes
|
||||||
}
|
}
|
||||||
if helpers.get_xml_attr(session, 'ratingKey').isdigit():
|
if helpers.get_xml_attr(session, 'ratingKey').isdigit():
|
||||||
session_output['type'] = helpers.get_xml_attr(session, 'type')
|
session_output['media_type'] = helpers.get_xml_attr(session, 'type')
|
||||||
else:
|
else:
|
||||||
session_output['type'] = 'clip'
|
session_output['media_type'] = 'clip'
|
||||||
|
|
||||||
elif helpers.get_xml_attr(session, 'type') == 'clip':
|
elif helpers.get_xml_attr(session, 'type') == 'clip':
|
||||||
session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'),
|
session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'),
|
||||||
|
@ -938,9 +938,9 @@ class PmsConnect(object):
|
||||||
'transcode_container': transcode_container,
|
'transcode_container': transcode_container,
|
||||||
'transcode_protocol': transcode_protocol,
|
'transcode_protocol': transcode_protocol,
|
||||||
'duration': duration,
|
'duration': duration,
|
||||||
'progress': progress,
|
'view_offset': progress,
|
||||||
'progress_percent': str(helpers.get_percent(progress, duration)),
|
'progress_percent': str(helpers.get_percent(progress, duration)),
|
||||||
'type': helpers.get_xml_attr(session, 'type'),
|
'media_type': helpers.get_xml_attr(session, 'type'),
|
||||||
'indexes': 0
|
'indexes': 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1027,9 +1027,9 @@ class PmsConnect(object):
|
||||||
'transcode_container': transcode_container,
|
'transcode_container': transcode_container,
|
||||||
'transcode_protocol': transcode_protocol,
|
'transcode_protocol': transcode_protocol,
|
||||||
'duration': '',
|
'duration': '',
|
||||||
'progress': '',
|
'view_offset': '',
|
||||||
'progress_percent': '100',
|
'progress_percent': '100',
|
||||||
'type': 'photo',
|
'media_type': 'photo',
|
||||||
'indexes': 0
|
'indexes': 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1083,7 +1083,6 @@ class PmsConnect(object):
|
||||||
}
|
}
|
||||||
children_list.append(children_output)
|
children_list.append(children_output)
|
||||||
|
|
||||||
|
|
||||||
output = {'children_count': helpers.get_xml_attr(xml_head[0], 'size'),
|
output = {'children_count': helpers.get_xml_attr(xml_head[0], 'size'),
|
||||||
'children_type': helpers.get_xml_attr(xml_head[0], 'viewGroup'),
|
'children_type': helpers.get_xml_attr(xml_head[0], 'viewGroup'),
|
||||||
'title': helpers.get_xml_attr(xml_head[0], 'title2'),
|
'title': helpers.get_xml_attr(xml_head[0], 'title2'),
|
||||||
|
@ -1270,11 +1269,11 @@ class PmsConnect(object):
|
||||||
return output
|
return output
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Return processed and validated server statistics.
|
Return processed and validated library statistics.
|
||||||
|
|
||||||
Output: array
|
Output: array
|
||||||
"""
|
"""
|
||||||
def get_library_stats(self):
|
def get_library_stats(self, library_cards=''):
|
||||||
server_libraries = self.get_server_children()
|
server_libraries = self.get_server_children()
|
||||||
|
|
||||||
server_library_stats = []
|
server_library_stats = []
|
||||||
|
@ -1285,7 +1284,10 @@ class PmsConnect(object):
|
||||||
for library in libraries_list:
|
for library in libraries_list:
|
||||||
library_type = library['type']
|
library_type = library['type']
|
||||||
section_key = library['key']
|
section_key = library['key']
|
||||||
|
if section_key in library_cards:
|
||||||
library_list = self.get_library_children(library_type, section_key)
|
library_list = self.get_library_children(library_type, section_key)
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
if library_list['library_count'] != '0':
|
if library_list['library_count'] != '0':
|
||||||
library_stats = {'title': library['title'],
|
library_stats = {'title': library['title'],
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
PLEXPY_VERSION = "master"
|
PLEXPY_VERSION = "master"
|
||||||
PLEXPY_RELEASE_VERSION = "1.1.9"
|
PLEXPY_RELEASE_VERSION = "1.1.10"
|
||||||
|
|
142
plexpy/web_socket.py
Normal file
142
plexpy/web_socket.py
Normal file
|
@ -0,0 +1,142 @@
|
||||||
|
# This file is part of PlexPy.
|
||||||
|
#
|
||||||
|
# PlexPy is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# PlexPy is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
# Mostly borrowed from https://github.com/trakt/Plex-Trakt-Scrobbler
|
||||||
|
|
||||||
|
from plexpy import logger, activity_pinger
|
||||||
|
|
||||||
|
import threading
|
||||||
|
import plexpy
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
import websocket
|
||||||
|
|
||||||
|
name = 'websocket'
|
||||||
|
opcode_data = (websocket.ABNF.OPCODE_TEXT, websocket.ABNF.OPCODE_BINARY)
|
||||||
|
|
||||||
|
|
||||||
|
def start_thread():
|
||||||
|
# Check for any existing sessions on start up
|
||||||
|
activity_pinger.check_active_sessions(ws_request=True)
|
||||||
|
# Start the websocket listener on it's own thread
|
||||||
|
threading.Thread(target=run).start()
|
||||||
|
|
||||||
|
|
||||||
|
def run():
|
||||||
|
from websocket import create_connection
|
||||||
|
|
||||||
|
uri = 'ws://%s:%s/:/websockets/notifications' % (
|
||||||
|
plexpy.CONFIG.PMS_IP,
|
||||||
|
plexpy.CONFIG.PMS_PORT
|
||||||
|
)
|
||||||
|
|
||||||
|
# Set authentication token (if one is available)
|
||||||
|
if plexpy.CONFIG.PMS_TOKEN:
|
||||||
|
uri += '?X-Plex-Token=' + plexpy.CONFIG.PMS_TOKEN
|
||||||
|
|
||||||
|
ws_connected = False
|
||||||
|
reconnects = 0
|
||||||
|
|
||||||
|
# Try an open the websocket connection - if it fails after 5 retries fallback to polling
|
||||||
|
while not ws_connected and reconnects < 15:
|
||||||
|
try:
|
||||||
|
logger.info(u'PlexPy WebSocket :: Opening websocket, connection attempt %s.' % str(reconnects + 1))
|
||||||
|
ws = create_connection(uri)
|
||||||
|
reconnects = 0
|
||||||
|
ws_connected = True
|
||||||
|
logger.info(u'PlexPy WebSocket :: Ready')
|
||||||
|
except IOError, e:
|
||||||
|
logger.error(u'PlexPy WebSocket :: %s.' % e)
|
||||||
|
reconnects += 1
|
||||||
|
time.sleep(5)
|
||||||
|
|
||||||
|
while ws_connected:
|
||||||
|
try:
|
||||||
|
process(*receive(ws))
|
||||||
|
|
||||||
|
# successfully received data, reset reconnects counter
|
||||||
|
reconnects = 0
|
||||||
|
except websocket.WebSocketConnectionClosedException:
|
||||||
|
if reconnects <= 5:
|
||||||
|
reconnects += 1
|
||||||
|
|
||||||
|
# Increasing sleep interval between reconnections
|
||||||
|
if reconnects > 1:
|
||||||
|
time.sleep(2 * (reconnects - 1))
|
||||||
|
|
||||||
|
logger.warn(u'PlexPy WebSocket :: Connection has closed, reconnecting...')
|
||||||
|
try:
|
||||||
|
ws = create_connection(uri)
|
||||||
|
except IOError, e:
|
||||||
|
logger.info(u'PlexPy WebSocket :: %s.' % e)
|
||||||
|
|
||||||
|
else:
|
||||||
|
ws_connected = False
|
||||||
|
break
|
||||||
|
|
||||||
|
if not ws_connected:
|
||||||
|
logger.error(u'PlexPy WebSocket :: Connection unavailable, falling back to polling.')
|
||||||
|
plexpy.POLLING_FAILOVER = True
|
||||||
|
plexpy.initialize_scheduler()
|
||||||
|
|
||||||
|
logger.debug(u'Leaving thread.')
|
||||||
|
|
||||||
|
|
||||||
|
def receive(ws):
|
||||||
|
frame = ws.recv_frame()
|
||||||
|
|
||||||
|
if not frame:
|
||||||
|
raise websocket.WebSocketException("Not a valid frame %s" % frame)
|
||||||
|
elif frame.opcode in opcode_data:
|
||||||
|
return frame.opcode, frame.data
|
||||||
|
elif frame.opcode == websocket.ABNF.OPCODE_CLOSE:
|
||||||
|
ws.send_close()
|
||||||
|
return frame.opcode, None
|
||||||
|
elif frame.opcode == websocket.ABNF.OPCODE_PING:
|
||||||
|
ws.pong("Hi!")
|
||||||
|
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
|
def process(opcode, data):
|
||||||
|
from plexpy import activity_handler
|
||||||
|
|
||||||
|
if opcode not in opcode_data:
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
info = json.loads(data)
|
||||||
|
except Exception as ex:
|
||||||
|
logger.warn(u'PlexPy WebSocket :: Error decoding message from websocket: %s' % ex)
|
||||||
|
logger.debug(data)
|
||||||
|
return False
|
||||||
|
|
||||||
|
type = info.get('type')
|
||||||
|
|
||||||
|
if not type:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if type == 'playing':
|
||||||
|
# logger.debug('%s.playing %s' % (name, info))
|
||||||
|
try:
|
||||||
|
time_line = info.get('_children')
|
||||||
|
except:
|
||||||
|
logger.debug(u"PlexPy WebSocket :: Session found but unable to get timeline data.")
|
||||||
|
return False
|
||||||
|
|
||||||
|
activity = activity_handler.ActivityHandler(timeline=time_line[0])
|
||||||
|
activity.process()
|
||||||
|
|
||||||
|
return True
|
|
@ -66,9 +66,9 @@ class WebInterface(object):
|
||||||
def home(self):
|
def home(self):
|
||||||
config = {
|
config = {
|
||||||
"home_stats_length": plexpy.CONFIG.HOME_STATS_LENGTH,
|
"home_stats_length": plexpy.CONFIG.HOME_STATS_LENGTH,
|
||||||
"home_stats_type": plexpy.CONFIG.HOME_STATS_TYPE,
|
"home_stats_cards": plexpy.CONFIG.HOME_STATS_CARDS,
|
||||||
"home_stats_count": plexpy.CONFIG.HOME_STATS_COUNT,
|
"home_library_cards": plexpy.CONFIG.HOME_LIBRARY_CARDS,
|
||||||
"pms_identifier": plexpy.CONFIG.PMS_IDENTIFIER,
|
"pms_identifier": plexpy.CONFIG.PMS_IDENTIFIER
|
||||||
}
|
}
|
||||||
return serve_template(templatename="index.html", title="Home", config=config)
|
return serve_template(templatename="index.html", title="Home", config=config)
|
||||||
|
|
||||||
|
@ -99,6 +99,7 @@ class WebInterface(object):
|
||||||
# The setup wizard just refreshes the page on submit so we must redirect to home if config set.
|
# The setup wizard just refreshes the page on submit so we must redirect to home if config set.
|
||||||
# Also redirecting to home if a PMS token already exists - will remove this in future.
|
# Also redirecting to home if a PMS token already exists - will remove this in future.
|
||||||
if plexpy.CONFIG.FIRST_RUN_COMPLETE or plexpy.CONFIG.PMS_TOKEN:
|
if plexpy.CONFIG.FIRST_RUN_COMPLETE or plexpy.CONFIG.PMS_TOKEN:
|
||||||
|
plexpy.initialize_scheduler()
|
||||||
raise cherrypy.HTTPRedirect("home")
|
raise cherrypy.HTTPRedirect("home")
|
||||||
else:
|
else:
|
||||||
return serve_template(templatename="welcome.html", title="Welcome", config=config)
|
return serve_template(templatename="welcome.html", title="Welcome", config=config)
|
||||||
|
@ -121,16 +122,41 @@ class WebInterface(object):
|
||||||
return json.dumps(formats)
|
return json.dumps(formats)
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def home_stats(self, time_range='30', stat_type='0', stat_count='5', **kwargs):
|
def home_stats(self, **kwargs):
|
||||||
data_factory = datafactory.DataFactory()
|
data_factory = datafactory.DataFactory()
|
||||||
stats_data = data_factory.get_home_stats(time_range=time_range, stat_type=stat_type, stat_count=stat_count)
|
|
||||||
|
time_range = plexpy.CONFIG.HOME_STATS_LENGTH
|
||||||
|
stats_type = plexpy.CONFIG.HOME_STATS_TYPE
|
||||||
|
stats_count = plexpy.CONFIG.HOME_STATS_COUNT
|
||||||
|
stats_cards = plexpy.CONFIG.HOME_STATS_CARDS.split(', ')
|
||||||
|
notify_watched_percent = plexpy.CONFIG.NOTIFY_WATCHED_PERCENT
|
||||||
|
|
||||||
|
stats_data = data_factory.get_home_stats(time_range=time_range,
|
||||||
|
stats_type=stats_type,
|
||||||
|
stats_count=stats_count,
|
||||||
|
stats_cards=stats_cards,
|
||||||
|
notify_watched_percent=notify_watched_percent)
|
||||||
|
|
||||||
return serve_template(templatename="home_stats.html", title="Stats", data=stats_data)
|
return serve_template(templatename="home_stats.html", title="Stats", data=stats_data)
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def library_stats(self, **kwargs):
|
def library_stats(self, **kwargs):
|
||||||
pms_connect = pmsconnect.PmsConnect()
|
pms_connect = pmsconnect.PmsConnect()
|
||||||
stats_data = pms_connect.get_library_stats()
|
|
||||||
|
library_cards = plexpy.CONFIG.HOME_LIBRARY_CARDS.split(', ')
|
||||||
|
|
||||||
|
if library_cards == ['library_statistics_first']:
|
||||||
|
library_cards = ['library_statistics']
|
||||||
|
server_children = pms_connect.get_server_children()
|
||||||
|
server_libraries = server_children['libraries_list']
|
||||||
|
|
||||||
|
for library in server_libraries:
|
||||||
|
library_cards.append(library['key'])
|
||||||
|
|
||||||
|
plexpy.CONFIG.HOME_LIBRARY_CARDS = ', '.join(library_cards)
|
||||||
|
plexpy.CONFIG.write()
|
||||||
|
|
||||||
|
stats_data = pms_connect.get_library_stats(library_cards=library_cards)
|
||||||
|
|
||||||
return serve_template(templatename="library_stats.html", title="Library Stats", data=stats_data)
|
return serve_template(templatename="library_stats.html", title="Library Stats", data=stats_data)
|
||||||
|
|
||||||
|
@ -144,7 +170,12 @@ class WebInterface(object):
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def graphs(self):
|
def graphs(self):
|
||||||
return serve_template(templatename="graphs.html", title="Graphs")
|
|
||||||
|
config = {
|
||||||
|
"music_logging_enable": plexpy.CONFIG.MUSIC_LOGGING_ENABLE
|
||||||
|
}
|
||||||
|
|
||||||
|
return serve_template(templatename="graphs.html", title="Graphs", config=config)
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def sync(self):
|
def sync(self):
|
||||||
|
@ -373,46 +404,7 @@ class WebInterface(object):
|
||||||
"cache_dir": plexpy.CONFIG.CACHE_DIR,
|
"cache_dir": plexpy.CONFIG.CACHE_DIR,
|
||||||
"check_github": checked(plexpy.CONFIG.CHECK_GITHUB),
|
"check_github": checked(plexpy.CONFIG.CHECK_GITHUB),
|
||||||
"interface_list": interface_list,
|
"interface_list": interface_list,
|
||||||
"growl_enabled": checked(plexpy.CONFIG.GROWL_ENABLED),
|
|
||||||
"growl_host": plexpy.CONFIG.GROWL_HOST,
|
|
||||||
"growl_password": plexpy.CONFIG.GROWL_PASSWORD,
|
|
||||||
"prowl_enabled": checked(plexpy.CONFIG.PROWL_ENABLED),
|
|
||||||
"prowl_keys": plexpy.CONFIG.PROWL_KEYS,
|
|
||||||
"prowl_priority": plexpy.CONFIG.PROWL_PRIORITY,
|
|
||||||
"xbmc_enabled": checked(plexpy.CONFIG.XBMC_ENABLED),
|
|
||||||
"xbmc_host": plexpy.CONFIG.XBMC_HOST,
|
|
||||||
"xbmc_username": plexpy.CONFIG.XBMC_USERNAME,
|
|
||||||
"xbmc_password": plexpy.CONFIG.XBMC_PASSWORD,
|
|
||||||
"plex_enabled": checked(plexpy.CONFIG.PLEX_ENABLED),
|
|
||||||
"plex_client_host": plexpy.CONFIG.PLEX_CLIENT_HOST,
|
|
||||||
"plex_username": plexpy.CONFIG.PLEX_USERNAME,
|
|
||||||
"plex_password": plexpy.CONFIG.PLEX_PASSWORD,
|
|
||||||
"nma_enabled": checked(plexpy.CONFIG.NMA_ENABLED),
|
|
||||||
"nma_apikey": plexpy.CONFIG.NMA_APIKEY,
|
|
||||||
"nma_priority": int(plexpy.CONFIG.NMA_PRIORITY),
|
|
||||||
"pushalot_enabled": checked(plexpy.CONFIG.PUSHALOT_ENABLED),
|
|
||||||
"pushalot_apikey": plexpy.CONFIG.PUSHALOT_APIKEY,
|
|
||||||
"pushover_enabled": checked(plexpy.CONFIG.PUSHOVER_ENABLED),
|
|
||||||
"pushover_keys": plexpy.CONFIG.PUSHOVER_KEYS,
|
|
||||||
"pushover_apitoken": plexpy.CONFIG.PUSHOVER_APITOKEN,
|
|
||||||
"pushover_priority": plexpy.CONFIG.PUSHOVER_PRIORITY,
|
|
||||||
"pushbullet_enabled": checked(plexpy.CONFIG.PUSHBULLET_ENABLED),
|
|
||||||
"pushbullet_apikey": plexpy.CONFIG.PUSHBULLET_APIKEY,
|
|
||||||
"pushbullet_deviceid": plexpy.CONFIG.PUSHBULLET_DEVICEID,
|
|
||||||
"twitter_enabled": checked(plexpy.CONFIG.TWITTER_ENABLED),
|
|
||||||
"osx_notify_enabled": checked(plexpy.CONFIG.OSX_NOTIFY_ENABLED),
|
|
||||||
"osx_notify_app": plexpy.CONFIG.OSX_NOTIFY_APP,
|
|
||||||
"boxcar_enabled": checked(plexpy.CONFIG.BOXCAR_ENABLED),
|
|
||||||
"boxcar_token": plexpy.CONFIG.BOXCAR_TOKEN,
|
|
||||||
"cache_sizemb": plexpy.CONFIG.CACHE_SIZEMB,
|
"cache_sizemb": plexpy.CONFIG.CACHE_SIZEMB,
|
||||||
"email_enabled": checked(plexpy.CONFIG.EMAIL_ENABLED),
|
|
||||||
"email_from": plexpy.CONFIG.EMAIL_FROM,
|
|
||||||
"email_to": plexpy.CONFIG.EMAIL_TO,
|
|
||||||
"email_smtp_server": plexpy.CONFIG.EMAIL_SMTP_SERVER,
|
|
||||||
"email_smtp_user": plexpy.CONFIG.EMAIL_SMTP_USER,
|
|
||||||
"email_smtp_password": plexpy.CONFIG.EMAIL_SMTP_PASSWORD,
|
|
||||||
"email_smtp_port": int(plexpy.CONFIG.EMAIL_SMTP_PORT),
|
|
||||||
"email_tls": checked(plexpy.CONFIG.EMAIL_TLS),
|
|
||||||
"pms_identifier": plexpy.CONFIG.PMS_IDENTIFIER,
|
"pms_identifier": plexpy.CONFIG.PMS_IDENTIFIER,
|
||||||
"pms_ip": plexpy.CONFIG.PMS_IP,
|
"pms_ip": plexpy.CONFIG.PMS_IP,
|
||||||
"pms_logs_folder": plexpy.CONFIG.PMS_LOGS_FOLDER,
|
"pms_logs_folder": plexpy.CONFIG.PMS_LOGS_FOLDER,
|
||||||
|
@ -421,7 +413,6 @@ class WebInterface(object):
|
||||||
"pms_ssl": checked(plexpy.CONFIG.PMS_SSL),
|
"pms_ssl": checked(plexpy.CONFIG.PMS_SSL),
|
||||||
"pms_use_bif": checked(plexpy.CONFIG.PMS_USE_BIF),
|
"pms_use_bif": checked(plexpy.CONFIG.PMS_USE_BIF),
|
||||||
"pms_uuid": plexpy.CONFIG.PMS_UUID,
|
"pms_uuid": plexpy.CONFIG.PMS_UUID,
|
||||||
"plexwatch_database": plexpy.CONFIG.PLEXWATCH_DATABASE,
|
|
||||||
"date_format": plexpy.CONFIG.DATE_FORMAT,
|
"date_format": plexpy.CONFIG.DATE_FORMAT,
|
||||||
"time_format": plexpy.CONFIG.TIME_FORMAT,
|
"time_format": plexpy.CONFIG.TIME_FORMAT,
|
||||||
"grouping_global_history": checked(plexpy.CONFIG.GROUPING_GLOBAL_HISTORY),
|
"grouping_global_history": checked(plexpy.CONFIG.GROUPING_GLOBAL_HISTORY),
|
||||||
|
@ -440,6 +431,7 @@ class WebInterface(object):
|
||||||
"movie_notify_on_pause": checked(plexpy.CONFIG.MOVIE_NOTIFY_ON_PAUSE),
|
"movie_notify_on_pause": checked(plexpy.CONFIG.MOVIE_NOTIFY_ON_PAUSE),
|
||||||
"music_notify_on_pause": checked(plexpy.CONFIG.MUSIC_NOTIFY_ON_PAUSE),
|
"music_notify_on_pause": checked(plexpy.CONFIG.MUSIC_NOTIFY_ON_PAUSE),
|
||||||
"monitoring_interval": plexpy.CONFIG.MONITORING_INTERVAL,
|
"monitoring_interval": plexpy.CONFIG.MONITORING_INTERVAL,
|
||||||
|
"monitoring_use_websocket": checked(plexpy.CONFIG.MONITORING_USE_WEBSOCKET),
|
||||||
"refresh_users_interval": plexpy.CONFIG.REFRESH_USERS_INTERVAL,
|
"refresh_users_interval": plexpy.CONFIG.REFRESH_USERS_INTERVAL,
|
||||||
"refresh_users_on_startup": checked(plexpy.CONFIG.REFRESH_USERS_ON_STARTUP),
|
"refresh_users_on_startup": checked(plexpy.CONFIG.REFRESH_USERS_ON_STARTUP),
|
||||||
"ip_logging_enable": checked(plexpy.CONFIG.IP_LOGGING_ENABLE),
|
"ip_logging_enable": checked(plexpy.CONFIG.IP_LOGGING_ENABLE),
|
||||||
|
@ -463,6 +455,8 @@ class WebInterface(object):
|
||||||
"home_stats_length": plexpy.CONFIG.HOME_STATS_LENGTH,
|
"home_stats_length": plexpy.CONFIG.HOME_STATS_LENGTH,
|
||||||
"home_stats_type": checked(plexpy.CONFIG.HOME_STATS_TYPE),
|
"home_stats_type": checked(plexpy.CONFIG.HOME_STATS_TYPE),
|
||||||
"home_stats_count": plexpy.CONFIG.HOME_STATS_COUNT,
|
"home_stats_count": plexpy.CONFIG.HOME_STATS_COUNT,
|
||||||
|
"home_stats_cards": plexpy.CONFIG.HOME_STATS_CARDS,
|
||||||
|
"home_library_cards": plexpy.CONFIG.HOME_LIBRARY_CARDS,
|
||||||
"buffer_threshold": plexpy.CONFIG.BUFFER_THRESHOLD,
|
"buffer_threshold": plexpy.CONFIG.BUFFER_THRESHOLD,
|
||||||
"buffer_wait": plexpy.CONFIG.BUFFER_WAIT,
|
"buffer_wait": plexpy.CONFIG.BUFFER_WAIT,
|
||||||
"group_history_tables": checked(plexpy.CONFIG.GROUP_HISTORY_TABLES)
|
"group_history_tables": checked(plexpy.CONFIG.GROUP_HISTORY_TABLES)
|
||||||
|
@ -475,14 +469,9 @@ class WebInterface(object):
|
||||||
# Handle the variable config options. Note - keys with False values aren't getting passed
|
# Handle the variable config options. Note - keys with False values aren't getting passed
|
||||||
|
|
||||||
checked_configs = [
|
checked_configs = [
|
||||||
"launch_browser", "enable_https", "api_enabled", "freeze_db", "growl_enabled",
|
"launch_browser", "enable_https", "api_enabled", "freeze_db", "check_github",
|
||||||
"prowl_enabled", "xbmc_enabled", "check_github",
|
|
||||||
"plex_enabled", "nma_enabled", "pushalot_enabled",
|
|
||||||
"pushover_enabled", "pushbullet_enabled",
|
|
||||||
"twitter_enabled", "osx_notify_enabled",
|
|
||||||
"boxcar_enabled", "email_enabled", "email_tls",
|
|
||||||
"grouping_global_history", "grouping_user_history", "grouping_charts", "pms_use_bif", "pms_ssl",
|
"grouping_global_history", "grouping_user_history", "grouping_charts", "pms_use_bif", "pms_ssl",
|
||||||
"tv_notify_enable", "movie_notify_enable", "music_notify_enable",
|
"tv_notify_enable", "movie_notify_enable", "music_notify_enable", "monitoring_use_websocket",
|
||||||
"tv_notify_on_start", "movie_notify_on_start", "music_notify_on_start",
|
"tv_notify_on_start", "movie_notify_on_start", "music_notify_on_start",
|
||||||
"tv_notify_on_stop", "movie_notify_on_stop", "music_notify_on_stop",
|
"tv_notify_on_stop", "movie_notify_on_stop", "music_notify_on_stop",
|
||||||
"tv_notify_on_pause", "movie_notify_on_pause", "music_notify_on_pause", "refresh_users_on_startup",
|
"tv_notify_on_pause", "movie_notify_on_pause", "music_notify_on_pause", "refresh_users_on_startup",
|
||||||
|
@ -517,6 +506,14 @@ class WebInterface(object):
|
||||||
if kwargs['pms_ip'] != plexpy.CONFIG.PMS_IP:
|
if kwargs['pms_ip'] != plexpy.CONFIG.PMS_IP:
|
||||||
refresh_users = True
|
refresh_users = True
|
||||||
|
|
||||||
|
if 'home_stats_cards' in kwargs:
|
||||||
|
if kwargs['home_stats_cards'] != 'watch_statistics':
|
||||||
|
kwargs['home_stats_cards'] = ', '.join(kwargs['home_stats_cards'])
|
||||||
|
|
||||||
|
if 'home_library_cards' in kwargs:
|
||||||
|
if kwargs['home_library_cards'] != 'library_statistics':
|
||||||
|
kwargs['home_library_cards'] = ', '.join(kwargs['home_library_cards'])
|
||||||
|
|
||||||
plexpy.CONFIG.process_kwargs(kwargs)
|
plexpy.CONFIG.process_kwargs(kwargs)
|
||||||
|
|
||||||
# Write the config
|
# Write the config
|
||||||
|
@ -537,15 +534,6 @@ class WebInterface(object):
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def set_notification_config(self, **kwargs):
|
def set_notification_config(self, **kwargs):
|
||||||
# Handle the variable config options. Note - keys with False values aren't getting passed
|
|
||||||
|
|
||||||
checked_configs = [
|
|
||||||
"email_tls"
|
|
||||||
]
|
|
||||||
for checked_config in checked_configs:
|
|
||||||
if checked_config not in kwargs:
|
|
||||||
# checked items should be zero or one. if they were not sent then the item was not checked
|
|
||||||
kwargs[checked_config] = 0
|
|
||||||
|
|
||||||
for plain_config, use_config in [(x[4:], x) for x in kwargs if x.startswith('use_')]:
|
for plain_config, use_config in [(x[4:], x) for x in kwargs if x.startswith('use_')]:
|
||||||
# the use prefix is fairly nice in the html, but does not match the actual config
|
# the use prefix is fairly nice in the html, but does not match the actual config
|
||||||
|
@ -1124,6 +1112,18 @@ class WebInterface(object):
|
||||||
else:
|
else:
|
||||||
logger.warn('Unable to retrieve data.')
|
logger.warn('Unable to retrieve data.')
|
||||||
|
|
||||||
|
@cherrypy.expose
|
||||||
|
def get_server_children(self, **kwargs):
|
||||||
|
|
||||||
|
pms_connect = pmsconnect.PmsConnect()
|
||||||
|
result = pms_connect.get_server_children()
|
||||||
|
|
||||||
|
if result:
|
||||||
|
cherrypy.response.headers['Content-type'] = 'application/json'
|
||||||
|
return json.dumps(result)
|
||||||
|
else:
|
||||||
|
logger.warn('Unable to retrieve data.')
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def get_activity(self, **kwargs):
|
def get_activity(self, **kwargs):
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue