mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-31 03:50:11 -07:00
Merge branch 'dev'
This commit is contained in:
commit
26ac539bc4
68 changed files with 10497 additions and 499 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -15,6 +15,7 @@
|
|||
version.lock
|
||||
logs/*
|
||||
cache/*
|
||||
*.mmdb
|
||||
|
||||
# HTTPS Cert/Key #
|
||||
##################
|
||||
|
|
63
API.md
63
API.md
|
@ -169,6 +169,10 @@ Return the api docs formatted with markdown.
|
|||
Download the PlexPy log file.
|
||||
|
||||
|
||||
### download_plex_log
|
||||
Download the Plex log file.
|
||||
|
||||
|
||||
### edit_library
|
||||
Update a library section on PlexPy.
|
||||
|
||||
|
@ -318,6 +322,34 @@ Returns:
|
|||
```
|
||||
|
||||
|
||||
### get_geoip_lookup
|
||||
Get the geolocation info for an IP address. The GeoLite2 database must be installed.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
ip_address
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"continent": "North America",
|
||||
"country": "United States",
|
||||
"region": "California",
|
||||
"city": "Mountain View",
|
||||
"postal_code": "94035",
|
||||
"timezone": "America/Los_Angeles",
|
||||
"latitude": 37.386,
|
||||
"longitude": -122.0838,
|
||||
"accuracy": 1000
|
||||
}
|
||||
json:
|
||||
{"error": "The address 127.0.0.1 is not in the database."
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### get_history
|
||||
Get the PlexPy history.
|
||||
|
||||
|
@ -555,16 +587,16 @@ Optional parameters:
|
|||
|
||||
Returns:
|
||||
json:
|
||||
{"child_count": null,
|
||||
"count": 887,
|
||||
"do_notify": 1,
|
||||
"do_notify_created": 1,
|
||||
"keep_history": 1,
|
||||
"library_art": "/:/resources/movie-fanart.jpg",
|
||||
"library_thumb": "/:/resources/movie.png",
|
||||
"parent_count": null,
|
||||
"section_id": 1,
|
||||
"section_name": "Movies",
|
||||
{"child_count": null,
|
||||
"count": 887,
|
||||
"do_notify": 1,
|
||||
"do_notify_created": 1,
|
||||
"keep_history": 1,
|
||||
"library_art": "/:/resources/movie-fanart.jpg",
|
||||
"library_thumb": "/:/resources/movie.png",
|
||||
"parent_count": null,
|
||||
"section_id": 1,
|
||||
"section_name": "Movies",
|
||||
"section_type": "movie"
|
||||
}
|
||||
```
|
||||
|
@ -1419,7 +1451,7 @@ Returns:
|
|||
"is_home_user": 1,
|
||||
"is_restricted": 0,
|
||||
"keep_history": 1,
|
||||
"shared_libraries": ["10", "1", "4", "5", "15", "20", "2"],
|
||||
"shared_libraries": ["10", "1", "4", "5", "15", "20", "2"],
|
||||
"user_id": 133788,
|
||||
"user_thumb": "https://plex.tv/users/k10w42309cynaopq/avatar",
|
||||
"username": "LordCommanderSnow"
|
||||
|
@ -1690,6 +1722,10 @@ Returns:
|
|||
```
|
||||
|
||||
|
||||
### install_geoip_db
|
||||
Downloads and installs the GeoLite2 database
|
||||
|
||||
|
||||
### notify
|
||||
Send a notification using PlexPy.
|
||||
|
||||
|
@ -1701,6 +1737,7 @@ Required parameters:
|
|||
10 # Email
|
||||
16 # Facebook
|
||||
0 # Growl
|
||||
19 # Hipchat
|
||||
12 # IFTTT
|
||||
18 # Join
|
||||
4 # NotifyMyAndroid
|
||||
|
@ -1812,6 +1849,10 @@ Returns:
|
|||
```
|
||||
|
||||
|
||||
### uninstall_geoip_db
|
||||
Uninstalls the GeoLite2 database
|
||||
|
||||
|
||||
### update
|
||||
Check for PlexPy updates on Github.
|
||||
|
||||
|
|
25
CHANGELOG.md
25
CHANGELOG.md
|
@ -1,5 +1,30 @@
|
|||
# Changelog
|
||||
|
||||
## v1.4.7 (2016-07-14)
|
||||
|
||||
* New: Use MaxMind GeoLite2 for IP address lookup.
|
||||
* Note: The GeoLite2 database must be installed from the settings page.
|
||||
* New: Check for Plex updates using plex.tv downloads instead of the server API.
|
||||
* Note: Check for Plex updates has been disabled and must be re-enabled in the settings.
|
||||
* New: More notification options for Plex updates.
|
||||
* New: Notifications for concurrent streams by a single user.
|
||||
* New: Notifications for user streaming from a new device.
|
||||
* New: HipChat notification agent. (Thanks @aboron)
|
||||
* Fix: Username showing as blank when friendly name is blank.
|
||||
* Fix: Direct stream count wrong in the current activity header.
|
||||
* Fix: Current activity reporting direct stream when reducing the stream quality switches to transcoding.
|
||||
* Fix: Apostophe in an Arnold quote causing the shutdown/restart page to crash.
|
||||
* Fix: Disable refreshing posters in guest mode.
|
||||
* Fix: PlexWatch/Plexivity import unable to select the "grouped" database table.
|
||||
* Change: Updated Facebook notification instructions.
|
||||
* Change: Subject line optional for Join notifications.
|
||||
* Change: Line break between subject and body text instead of a colon for Facebook, Slack, Twitter, and Telegram.
|
||||
* Change: Allow Mattermost notifications using the Slack config.
|
||||
* Change: Better formatting for Slack poster notifications.
|
||||
* Change: Telegram only notifies once instead of twice when posters are enabled.
|
||||
* Change: Host Open Sans font locally instead of querying Google Fonts.
|
||||
|
||||
|
||||
## v1.4.6 (2016-06-11)
|
||||
|
||||
* New: Added User and Library statistics to the API.
|
||||
|
|
|
@ -30,7 +30,7 @@
|
|||
<div class="col-xs-4">
|
||||
<select id="table_name" class="form-control" name="table_name">
|
||||
<option value="processed">processed</option>
|
||||
<option value="processed">grouped</option>
|
||||
<option value="grouped">grouped</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
<link href="${http_root}css/bootstrap3/bootstrap.css" rel="stylesheet">
|
||||
<link href="${http_root}css/pnotify.custom.min.css" rel="stylesheet" />
|
||||
<link href="${http_root}css/plexpy.css" rel="stylesheet">
|
||||
<link href="https://fonts.googleapis.com/css?family=Open+Sans:400,600" rel="stylesheet" type="text/css">
|
||||
<link href="${http_root}css/opensans.min.css" rel="stylesheet">
|
||||
<link href="${http_root}css/font-awesome.min.css" rel="stylesheet">
|
||||
${next.headIncludes()}
|
||||
|
||||
|
@ -170,7 +170,7 @@
|
|||
<form action="search" method="post" class="form" id="search_form">
|
||||
<div class="input-group">
|
||||
<span class="input-textbox">
|
||||
<input type="text" class="form-control" name="query" id="query" aria-label="Search" placeholder="Search..."/>
|
||||
<input type="text" class="form-control" name="query" id="query" aria-label="Search" placeholder="Search Plex library..."/>
|
||||
</span>
|
||||
<span class="input-group-btn">
|
||||
<button class="btn btn-dark btn-inactive" type="submit" id="search_button"><i class="fa fa-search"></i></button>
|
||||
|
|
129
data/interfaces/default/configuration_table.html
Normal file
129
data/interfaces/default/configuration_table.html
Normal file
|
@ -0,0 +1,129 @@
|
|||
<%doc>
|
||||
USAGE DOCUMENTATION :: PLEASE LEAVE THIS AT THE TOP OF THIS FILE
|
||||
|
||||
For Mako templating syntax documentation please visit: http://docs.makotemplates.org/en/latest/
|
||||
|
||||
Filename: configuration_table.html
|
||||
Version: 0.1
|
||||
|
||||
DOCUMENTATION :: END
|
||||
</%doc>
|
||||
|
||||
<%!
|
||||
import os
|
||||
import sys
|
||||
import plexpy
|
||||
from plexpy import common, logger
|
||||
from plexpy.helpers import anon_url
|
||||
%>
|
||||
|
||||
<table class="config-info-table small-muted">
|
||||
<tbody>
|
||||
% if plexpy.CURRENT_VERSION:
|
||||
<tr>
|
||||
<td>Git Branch:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://github.com/drzoidberg33/plexpy/tree/%s' % plexpy.CONFIG.GIT_BRANCH)}">${plexpy.CONFIG.GIT_BRANCH}</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Git Commit Hash:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://github.com/drzoidberg33/plexpy/commit/%s' % plexpy.CURRENT_VERSION)}">${plexpy.CURRENT_VERSION}</a></td>
|
||||
</tr>
|
||||
% endif
|
||||
<tr>
|
||||
<td>Configuration File:</td>
|
||||
<td>${plexpy.CONFIG_FILE}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Database File:</td>
|
||||
<td>${plexpy.DB_FILE}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Log File:</td>
|
||||
<td><a class="no-highlight" href="logFile" target="_blank">${os.path.join(plexpy.CONFIG.LOG_DIR, logger.FILENAME)}</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Backup Directory:</td>
|
||||
<td>${plexpy.CONFIG.BACKUP_DIR}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Cache Directory:</td>
|
||||
<td>${plexpy.CONFIG.CACHE_DIR}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>GeoLite2 Database:</td>
|
||||
% if plexpy.CONFIG.GEOIP_DB:
|
||||
<td>${plexpy.CONFIG.GEOIP_DB} | <a class="no-highlight" href="#" id="reinstall_geoip_db">Reinstall / Update</a> | <a class="no-highlight" href="#" id="uninstall_geoip_db">Uninstall</a></td>
|
||||
% else:
|
||||
<td><a class="no-highlight" href="#" id="install_geoip_db">Click here to install the GeoLite2 database.</a></td>
|
||||
% endif
|
||||
</tr>
|
||||
% if plexpy.ARGS:
|
||||
<tr>
|
||||
<td>Arguments:</td>
|
||||
<td>${plexpy.ARGS}</td>
|
||||
</tr>
|
||||
% endif
|
||||
<tr>
|
||||
<td>Platform:</td>
|
||||
<td>${common.PLATFORM} ${common.PLATFORM_VERSION}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Python Version:</td>
|
||||
<td>${sys.version}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="top-line">Plex Forums:</td>
|
||||
<td class="top-line"><a class="no-highlight" href="${anon_url('https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program')}" target="_blank">https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Source:</td>
|
||||
<td><a id="source-link" class="no-highlight" href="${anon_url('https://github.com/drzoidberg33/plexpy')}" target="_blank">https://github.com/drzoidberg33/plexpy</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Wiki:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://github.com/drzoidberg33/plexpy/wiki')}" target="_blank">https://github.com/drzoidberg33/plexpy/wiki</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Issues:</td>
|
||||
<td><a class="no-highlight guidelines-modal-link" href="${anon_url('https://github.com/drzoidberg33/plexpy/issues')}" data-id="issue">https://github.com/drzoidberg33/plexpy/issues</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Feature Requests:</td>
|
||||
<td><a class="no-highlight guidelines-modal-link" href="${anon_url('http://feathub.com/drzoidberg33/plexpy')}" data-id="feature request">http://feathub.com/drzoidberg33/plexpy</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Gitter Chat:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://gitter.im/drzoidberg33/plexpy')}" target="_blank">https://gitter.im/drzoidberg33/plexpy</a></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
$("#install_geoip_db, #reinstall_geoip_db").click(function () {
|
||||
var msg = 'Are you sure you want to install the GeoLite2 database?<br /><br />' +
|
||||
'The database is used to lookup IP address geolocation info.<br />' +
|
||||
'The database will be downloaded from <a href="${anon_url("https://dev.maxmind.com/geoip/geoip2/geolite2/")}" target="_blank">MaxMind</a>, <br />' +
|
||||
'and requires <strong>100MB</strong> of free space to install in your PlexPy directory.<br />'
|
||||
var url = 'install_geoip_db';
|
||||
confirmAjaxCall(url, msg, 'Installing GeoLite2 database.', getConfigurationTable);
|
||||
});
|
||||
|
||||
$("#uninstall_geoip_db").click(function () {
|
||||
var msg = 'Are you sure you want to uninstall the GeoLite2 database?<br /><br />' +
|
||||
'You will not be able to lookup IP address geolocation info.';
|
||||
var url = 'uninstall_geoip_db';
|
||||
confirmAjaxCall(url, msg, 'Uninstalling GeoLite2 database.', getConfigurationTable);
|
||||
});
|
||||
|
||||
$('.guidelines-modal-link').on('click', function (e) {
|
||||
e.preventDefault();
|
||||
$('#guidelines-link').attr('href', $('#source-link').attr('href'));
|
||||
$('#guidelines-type').text($(this).data('id'))
|
||||
$('#guidelines-modal').modal();
|
||||
$('#guidelines-continue').attr('href', $(this).attr('href')).on('click', function () {
|
||||
$('#guidelines-modal').modal('hide');
|
||||
});
|
||||
});
|
||||
});
|
||||
</script>
|
1
data/interfaces/default/css/opensans.min.css
vendored
Normal file
1
data/interfaces/default/css/opensans.min.css
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
@font-face{font-family:'Open Sans';font-weight:400;font-style:normal;src:url(../fonts/Open-Sans-regular/Open-Sans-regular.eot);src:url(../fonts/Open-Sans-regular/Open-Sans-regular.eot?#iefix) format('embedded-opentype'),local('Open Sans'),local('Open-Sans-regular'),url(../fonts/Open-Sans-regular/Open-Sans-regular.woff2) format('woff2'),url(../fonts/Open-Sans-regular/Open-Sans-regular.woff) format('woff'),url(../fonts/Open-Sans-regular/Open-Sans-regular.ttf) format('truetype'),url(../fonts/Open-Sans-regular/Open-Sans-regular.svg#OpenSans) format('svg')}@font-face{font-family:'Open Sans';font-weight:600;font-style:normal;src:url(../fonts/Open-Sans-600/Open-Sans-600.eot);src:url(../fonts/Open-Sans-600/Open-Sans-600.eot?#iefix) format('embedded-opentype'),local('Open Sans Semibold'),local('Open-Sans-600'),url(../fonts/Open-Sans-600/Open-Sans-600.woff2) format('woff2'),url(../fonts/Open-Sans-600/Open-Sans-600.woff) format('woff'),url(../fonts/Open-Sans-600/Open-Sans-600.ttf) format('truetype'),url(../fonts/Open-Sans-600/Open-Sans-600.svg#OpenSans) format('svg')}
|
|
@ -3004,4 +3004,9 @@ a:hover .overlay-refresh-image {
|
|||
}
|
||||
a:hover .overlay-refresh-image:hover {
|
||||
opacity: .9;
|
||||
}
|
||||
#ip_error {
|
||||
color: #aaa;
|
||||
display: none;
|
||||
text-align: center;
|
||||
}
|
|
@ -106,7 +106,9 @@ DOCUMENTATION :: END
|
|||
% else:
|
||||
<div class="dashboard-activity-poster-face" style="background-image: url(${a['art']});"></div>
|
||||
% endif
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
<div class="dashboard-activity-button-info">
|
||||
<button type="button" class="btn btn-activity-info btn-lg" data-target="#stream-${a['session_key']}">
|
||||
<i class="fa fa-info-circle"></i>
|
||||
|
|
|
@ -108,7 +108,9 @@ DOCUMENTATION :: END
|
|||
% else:
|
||||
<div class="dashboard-activity-poster-face" style="background-image: url(${data['art']});"></div>
|
||||
% endif
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image left" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
<div class="dashboard-activity-button-info">
|
||||
<button type="button" class="btn btn-activity-info btn-lg" data-target="#stream-${data['session_key']}" data-id="${data['session_key']}">
|
||||
<i class="fa fa-info-circle"></i>
|
||||
|
@ -133,79 +135,39 @@ DOCUMENTATION :: END
|
|||
% endif
|
||||
</span>
|
||||
</div>
|
||||
% if data['media_type'] == 'track':
|
||||
% if data['audio_decision'] == 'direct play':
|
||||
Stream <strong>Direct Play</strong>
|
||||
% elif data['audio_decision'] == 'copy':
|
||||
Stream <strong>Direct Stream</strong>
|
||||
% else:
|
||||
Stream <strong>
|
||||
Transcoding
|
||||
<span id="transcode-state-${data['session_key']}">
|
||||
(Speed: ${data['transcode_speed']})
|
||||
<span id="transcode-state-${data['session_key']}">
|
||||
% if data['video_decision'] == 'transcode' or data['audio_decision'] == 'transcode':
|
||||
Stream <strong>Transcode (Speed: ${data['transcode_speed']})
|
||||
% if data['throttled'] == '1':
|
||||
(Throttled)
|
||||
% endif
|
||||
</span>
|
||||
</strong>
|
||||
% endif
|
||||
<br />
|
||||
% if data['audio_decision'] == 'direct play':
|
||||
Audio <strong>Direct Play (${data['audio_codec']}) (${data['audio_channels']}ch)</strong>
|
||||
% elif data['audio_decision'] == 'copy':
|
||||
Audio <strong>Direct Stream (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% elif data['audio_decision'] == 'transcode':
|
||||
Audio <strong>Transcode (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% endif
|
||||
% elif data['media_type'] == 'episode' or data['media_type'] == 'movie' or data['media_type'] == 'clip':
|
||||
% if data['video_decision'] == 'direct play' and data['audio_decision'] == 'direct play':
|
||||
Stream <strong>Direct Play</strong>
|
||||
% elif data['video_decision'] == 'copy' and data['audio_decision'] == 'copy':
|
||||
Stream <strong>Direct Stream</strong>
|
||||
% else:
|
||||
Stream <strong>
|
||||
Transcoding
|
||||
<span id="transcode-state-${data['session_key']}">
|
||||
(Speed: ${data['transcode_speed']})
|
||||
% if data['throttled'] == '1':
|
||||
(Throttled)
|
||||
</strong>
|
||||
% elif data['video_decision'] == 'copy' or data['audio_decision'] == 'copy':
|
||||
Stream <strong>Direct Stream</strong>
|
||||
% else:
|
||||
Stream <strong>Direct Play</strong>
|
||||
% endif
|
||||
<br />
|
||||
% if data['video_decision'] and data['media_type'] != 'photo':
|
||||
% if data['video_decision'] == 'transcode':
|
||||
Video <strong>Transcode (${data['transcode_video_codec']}) (${data['transcode_width']}x${data['transcode_height']})</strong>
|
||||
% elif data['video_decision'] == 'copy':
|
||||
Video <strong>Direct Stream (${data['transcode_video_codec']}) (${data['width']}x${data['height']})</strong>
|
||||
% else:
|
||||
Video <strong>Direct Play (${data['video_codec']}) (${data['width']}x${data['height']})</strong>
|
||||
% endif
|
||||
</span>
|
||||
</strong>
|
||||
% endif
|
||||
<br />
|
||||
% if data['video_decision'] == 'direct play':
|
||||
Video <strong>Direct Play (${data['video_codec']}) (${data['width']}x${data['height']})</strong>
|
||||
% elif data['video_decision'] == 'copy':
|
||||
Video <strong>Direct Stream (${data['transcode_video_codec']}) (${data['width']}x${data['height']})</strong>
|
||||
% elif data['video_decision'] == 'transcode':
|
||||
Video <strong>Transcode (${data['transcode_video_codec']}) (${data['transcode_width']}x${data['transcode_height']})</strong>
|
||||
% endif
|
||||
<br />
|
||||
% if data['audio_decision'] == 'direct play':
|
||||
Audio <strong>Direct Play (${data['audio_codec']}) (${data['audio_channels']}ch)</strong>
|
||||
% elif data['audio_decision'] == 'copy':
|
||||
Audio <strong>Direct Stream (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% elif data['audio_decision'] == 'transcode':
|
||||
Audio <strong>Transcode (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% endif
|
||||
% elif data['media_type'] == 'photo':
|
||||
% if data['video_decision'] == 'direct play':
|
||||
Stream <strong>Direct Play</strong>
|
||||
% elif data['video_decision'] == 'copy':
|
||||
Stream <strong>Direct Stream</strong>
|
||||
% else:
|
||||
Stream <strong>
|
||||
<span id="transcode-state-${data['session_key']}">
|
||||
(Speed: ${data['transcode_speed']})
|
||||
% if data['throttled'] == '1':
|
||||
(Throttled)
|
||||
<br />
|
||||
% endif
|
||||
% if data['audio_decision']:
|
||||
% if data['audio_decision'] == 'transcode':
|
||||
Audio <strong>Transcode (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% elif data['audio_decision'] == 'copy':
|
||||
Audio <strong>Direct Stream (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% else:
|
||||
Audio <strong>Direct Play (${data['audio_codec']}) (${data['audio_channels']}ch)</strong>
|
||||
% endif
|
||||
</span>
|
||||
</strong>
|
||||
% endif
|
||||
% endif
|
||||
<br>
|
||||
% endif
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
% if data['media_type'] != 'photo':
|
||||
|
|
202
data/interfaces/default/fonts/Open-Sans-600/LICENSE.txt
Normal file
202
data/interfaces/default/fonts/Open-Sans-600/LICENSE.txt
Normal file
|
@ -0,0 +1,202 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.eot
Normal file
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.eot
Normal file
Binary file not shown.
1637
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.svg
Normal file
1637
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.svg
Normal file
File diff suppressed because it is too large
Load diff
After Width: | Height: | Size: 104 KiB |
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.ttf
Normal file
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.ttf
Normal file
Binary file not shown.
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.woff
Normal file
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.woff
Normal file
Binary file not shown.
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.woff2
Normal file
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.woff2
Normal file
Binary file not shown.
202
data/interfaces/default/fonts/Open-Sans-regular/LICENSE.txt
Normal file
202
data/interfaces/default/fonts/Open-Sans-regular/LICENSE.txt
Normal file
|
@ -0,0 +1,202 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
Binary file not shown.
File diff suppressed because it is too large
Load diff
After Width: | Height: | Size: 105 KiB |
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -103,7 +103,9 @@ DOCUMENTATION :: END
|
|||
% if top_stat['rows'][0]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
|
@ -150,7 +152,9 @@ DOCUMENTATION :: END
|
|||
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
|
@ -201,7 +205,9 @@ DOCUMENTATION :: END
|
|||
% if top_stat['rows'][0]['grandparent_thumb'] != '':
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
|
@ -244,7 +250,9 @@ DOCUMENTATION :: END
|
|||
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
|
@ -299,7 +307,9 @@ DOCUMENTATION :: END
|
|||
% if top_stat['rows'][0]['thumb']:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
|
@ -346,7 +356,9 @@ DOCUMENTATION :: END
|
|||
% if top_stat['rows'][loop.index]['thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
|
@ -397,7 +409,9 @@ DOCUMENTATION :: END
|
|||
% if top_stat['rows'][0]['thumb']:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
|
@ -440,7 +454,9 @@ DOCUMENTATION :: END
|
|||
% if top_stat['rows'][loop.index]['thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
|
@ -495,7 +511,9 @@ DOCUMENTATION :: END
|
|||
% if top_stat['rows'][0]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
|
@ -542,7 +560,9 @@ DOCUMENTATION :: END
|
|||
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
|
@ -593,7 +613,9 @@ DOCUMENTATION :: END
|
|||
% if top_stat['rows'][0]['grandparent_thumb'] != '':
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
|
@ -636,7 +658,9 @@ DOCUMENTATION :: END
|
|||
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
|
@ -859,7 +883,9 @@ DOCUMENTATION :: END
|
|||
% if top_stat['rows'][0]['thumb']:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
|
@ -916,7 +942,9 @@ DOCUMENTATION :: END
|
|||
% if top_stat['rows'][loop.index]['thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
|
|
|
@ -169,10 +169,34 @@
|
|||
}
|
||||
|
||||
// if transcoding, update the transcode state
|
||||
var ts = '';
|
||||
if (s.video_decision == 'transcode' || s.audio_decision == 'transcode') {
|
||||
var throttled = (s.throttled == '1') ? ' (Throttled)' : '';
|
||||
$('#transcode-state-' + key).html('(Speed: ' + s.transcode_speed + ')' + throttled);
|
||||
ts += 'Stream <strong>Transcode (Speed: ' + s.transcode_speed + ')' + throttled + '</strong><br>';
|
||||
} else if (s.video_decision == 'copy' || s.audio_decision == 'copy') {
|
||||
ts += 'Stream <strong>Direct Stream</strong><br>';
|
||||
} else {
|
||||
ts += 'Stream <strong>Direct Play</strong><br>';
|
||||
}
|
||||
if (s.video_decision != '' && s.media_type != 'photo') {
|
||||
if (s.video_decision == 'transcode') {
|
||||
ts += 'Video <strong>Transcode (' + s.transcode_video_codec + ') (' + s.transcode_width + 'x' + s.transcode_height + ')</strong><br>';
|
||||
} else if (s.video_decision == 'copy') {
|
||||
ts += 'Video <strong>Direct Stream (' + s.transcode_video_codec + ') (' + s.width + 'x' + s.height + ')</strong><br>';
|
||||
} else {
|
||||
ts += 'Video <strong>Direct Play (' + s.video_codec + ') (' + s.width + 'x' + s.height + ')</strong><br>';
|
||||
}
|
||||
}
|
||||
if (s.audio_decision != '') {
|
||||
if (s.audio_decision == 'transcode') {
|
||||
ts += 'Audio <strong>Transcode (' + s.transcode_audio_codec + ') (' + s.transcode_audio_channels + 'ch)</strong>';
|
||||
} else if (s.audio_decision == 'copy') {
|
||||
ts += 'Audio <strong>Direct Stream (' + s.transcode_audio_codec + ') (' + s.transcode_audio_channels + 'ch)</strong>';
|
||||
} else {
|
||||
ts += 'Audio <strong>Direct Play (' + s.audio_codec + ') (' + s.audio_channels + 'ch)</strong>';
|
||||
}
|
||||
}
|
||||
$('#transcode-state-' + key).html(ts);
|
||||
|
||||
// update the stream progress times
|
||||
$('#stream-eta-' + key).html(moment().add(parseInt(s.duration) - parseInt(s.view_offset), 'milliseconds').format(time_format));
|
||||
|
|
|
@ -68,7 +68,9 @@ DOCUMENTATION :: END
|
|||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
<div class="art-face" style="background-image:url(pms_image_proxy?img=${data['art']}&width=1920&height=1080)"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image info-art" title="Refresh background image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
<div class="summary-container">
|
||||
<div class="summary-navbar">
|
||||
<div class="col-md-12">
|
||||
|
@ -120,22 +122,28 @@ DOCUMENTATION :: END
|
|||
<span></span>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
% elif data['media_type'] == 'artist' or data['media_type'] == 'album' or data['media_type'] == 'track':
|
||||
<div class="summary-poster-face-track" style="background-image: url(pms_image_proxy?img=${data['thumb']}&width=500&height=500&fallback=cover);">
|
||||
<div class="summary-poster-face-overlay">
|
||||
<span></span>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
% else:
|
||||
<div class="summary-poster-face" style="background-image: url(pms_image_proxy?img=${data['thumb']}&width=300&height=450&fallback=poster);">
|
||||
<div class="summary-poster-face-overlay">
|
||||
<span></span>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
% endif
|
||||
</a>
|
||||
</div>
|
||||
<div class="summary-content-title">
|
||||
|
|
|
@ -51,7 +51,9 @@ DOCUMENTATION :: END
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
% elif data['children_type'] == 'episode':
|
||||
|
@ -64,7 +66,9 @@ DOCUMENTATION :: END
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="item-children-instance-text-wrapper episode-item">
|
||||
|
@ -76,7 +80,9 @@ DOCUMENTATION :: END
|
|||
<a href="info?rating_key=${child['rating_key']}" title="${child['title']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face album-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=300&fallback=cover);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="item-children-instance-text-wrapper album-item">
|
||||
|
|
|
@ -65,7 +65,9 @@ DOCUMENTATION :: END
|
|||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face season-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper season-item">
|
||||
<h3 title="${child['title']}">${child['title']}</h3>
|
||||
|
@ -88,7 +90,9 @@ DOCUMENTATION :: END
|
|||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face season-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper season-item">
|
||||
<h3 title="${child['title']}">${child['title']}</h3>
|
||||
|
@ -111,7 +115,9 @@ DOCUMENTATION :: END
|
|||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face season-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper season-item">
|
||||
<h3 title="${child['parent_title']}">${child['parent_title']}</h3>
|
||||
|
@ -134,7 +140,9 @@ DOCUMENTATION :: END
|
|||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face episode-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=500&height=250&fallback=art);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper episode-item">
|
||||
<h3 title="${child['grandparent_title']}">${child['grandparent_title']}</h3>
|
||||
|
@ -158,7 +166,9 @@ DOCUMENTATION :: END
|
|||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face album-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=300&fallback=cover);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper album-item">
|
||||
<h3 title="${child['title']}">${child['title']}</h3>
|
||||
|
@ -180,7 +190,9 @@ DOCUMENTATION :: END
|
|||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face album-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=300&fallback=cover);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper album-item">
|
||||
<h3 title="${child['parent_title']}">${child['parent_title']}</h3>
|
||||
|
@ -210,7 +222,9 @@ DOCUMENTATION :: END
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
<div class="item-children-instance-text-wrapper album-item">
|
||||
<h3 title="${child['grandparent_title']}">${child['grandparent_title']}</h3>
|
||||
<h3 title="${child['title']}">${child['title']}</h3>
|
||||
|
|
|
@ -3,37 +3,41 @@
|
|||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title" id="myModalLabel">
|
||||
% if data:
|
||||
<strong><span id="modal_header_ip_address">
|
||||
<i class="fa fa-spin fa-refresh"></i> Loading Details...
|
||||
% if data:
|
||||
<i class="fa fa-map-marker"></i> IP Address: ${data}
|
||||
% else:
|
||||
<i class="fa fa-exclamation-circle"></i> Invalid IP Address
|
||||
% endif
|
||||
</span></strong>
|
||||
% else:
|
||||
<i class="fa fa-exclamation-circle"></i> Invalid IP Address</span></strong>
|
||||
% endif
|
||||
</h4>
|
||||
</div>
|
||||
<div class="modal-body" id="modal-text">
|
||||
<div class="col-sm-6">
|
||||
<div id="ip_error" class="text-muted"></div>
|
||||
<div class="col-sm-12">
|
||||
<h4><strong>Location Details</strong></h4>
|
||||
</div>
|
||||
<div class="col-sm-6">
|
||||
<ul class="list-unstyled">
|
||||
<li>Continent: <strong><span id="continent"></span></strong></li>
|
||||
<li>Country: <strong><span id="country"></span></strong></li>
|
||||
<li>Region: <strong><span id="region"></span></strong></li>
|
||||
<li>City: <strong><span id="city"></span></strong></li>
|
||||
<li>Timezone: <strong><span id="timezone"></span></strong></li>
|
||||
<li>Latitude: <strong><span id="lat"></span></strong></li>
|
||||
<li>Longitude: <strong><span id="lon"></span></strong></li>
|
||||
<li>Postal Code: <strong><span id="postal_code"></span></strong></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="col-sm-6">
|
||||
<h4><strong>Connection Details</strong></h4>
|
||||
<ul class="list-unstyled">
|
||||
<li>Organization: <strong><span id="organization"></span></strong></li>
|
||||
<li>Timezone: <strong><span id="timezone"></span></strong></li>
|
||||
<li>Latitude: <strong><span id="latitude"></span></strong></li>
|
||||
<li>Longitude: <strong><span id="longitude"></span></strong></li>
|
||||
<li>Accuracy Radius: <strong><span id="accuracy"></span></strong></li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<% from plexpy.helpers import anon_url %>
|
||||
<span class="text-muted">Telize service written by <a href="${anon_url('https://github.com/fcambus/telize')}" target="_blank">Frederic Cambus</a>.</span>
|
||||
<span class="text-muted">GeoLite2 data created by <a href="${anon_url('http://www.maxmind.com')}" target="_blank">MaxMind</a>.</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -42,25 +46,29 @@
|
|||
<script>
|
||||
function getUserLocation(ip_address) {
|
||||
$.ajax({
|
||||
url: 'https://telize.myhtpc.co.za/geoip/' + ip_address,
|
||||
cache: true,
|
||||
async: true,
|
||||
url: 'get_geoip_lookup',
|
||||
type: 'GET',
|
||||
dataType: 'json',
|
||||
error: function(){
|
||||
$('#modal_header_ip_address').html("Request failed. Server may be too busy.");
|
||||
data: { ip_address: ip_address },
|
||||
cache: true,
|
||||
async: true,
|
||||
error: function () {
|
||||
$('#ip_error').html('<i class="fa fa-exclamation-circle"></i> Request failed.<br /><br />').show();
|
||||
},
|
||||
success: function(data) {
|
||||
$('#modal_header_ip_address').html('<i class="fa fa-map-marker"></i> IP Address: ' + ip_address);
|
||||
$('#country').html(data.country);
|
||||
$('#city').html(data.city);
|
||||
$('#region').html(data.region);
|
||||
$('#timezone').html(data.timezone);
|
||||
$('#lat').html(data.latitude);
|
||||
$('#lon').html(data.longitude);
|
||||
$('#organization').html(data.organization);
|
||||
},
|
||||
timeout: 5000
|
||||
success: function (data) {
|
||||
if ('error' in data) {
|
||||
$('#ip_error').html('<i class="fa fa-exclamation-circle"></i> ' + data.error + '<br /><br />').show();
|
||||
} else {
|
||||
$('#continent').html(data.continent);
|
||||
$('#country').html(data.country);
|
||||
$('#region').html(data.region);
|
||||
$('#city').html(data.city);
|
||||
$('#postal_code').html(data.postal_code);
|
||||
$('#timezone').html(data.timezone);
|
||||
$('#latitude').html(data.latitude);
|
||||
$('#longitude').html(data.longitude);
|
||||
$('#accuracy').html(data.accuracy + ' km');
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
getUserLocation('${data}');
|
||||
|
|
|
@ -54,6 +54,32 @@ function showMsg(msg, loader, timeout, ms, error) {
|
|||
}
|
||||
}
|
||||
|
||||
function confirmAjaxCall(url, msg, loader_msg, callback) {
|
||||
$("#confirm-message").html(msg);
|
||||
$('#confirm-modal').modal();
|
||||
$('#confirm-modal').one('click', '#confirm-button', function () {
|
||||
if (loader_msg) {
|
||||
showMsg(loader_msg, true, false)
|
||||
}
|
||||
$.ajax({
|
||||
url: url,
|
||||
type: 'POST',
|
||||
complete: function (xhr, status) {
|
||||
result = $.parseJSON(xhr.responseText);
|
||||
msg = result.message;
|
||||
if (result.result == 'success') {
|
||||
showMsg('<i class="fa fa-check"></i> ' + msg, false, true, 5000)
|
||||
} else {
|
||||
showMsg('<i class="fa fa-times"></i> ' + msg, false, true, 5000, true)
|
||||
}
|
||||
if (typeof callback === "function") {
|
||||
callback();
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function doAjaxCall(url, elem, reload, form, callback) {
|
||||
// Set Message
|
||||
feedback = $("#ajaxMsg");
|
||||
|
|
|
@ -39,8 +39,10 @@ DOCUMENTATION :: END
|
|||
<div class="row">
|
||||
% if data['library_art']:
|
||||
<div class="art-face" style="background-image:url(pms_image_proxy?img=${data['library_art']}&width=1920&height=1080)"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image info-art" title="Refresh background image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
% endif
|
||||
<div class="summary-container">
|
||||
<div class="summary-navbar">
|
||||
<div class="col-md-12">
|
||||
|
|
|
@ -60,7 +60,9 @@ DOCUMENTATION :: END
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="dashboard-recent-media-metacontainer">
|
||||
|
|
|
@ -57,6 +57,20 @@
|
|||
</label>
|
||||
<p class="help-block">Trigger notification when a media item triggers the defined buffer threshold.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-size="small" data-id="${data['id']}" data-config-name="${data['config_prefix']}_on_concurrent" ${helpers.checked(data['on_concurrent'])} class="toggle-switches">
|
||||
Notify on user concurrent streams
|
||||
</label>
|
||||
<p class="help-block">Trigger notification when a user has concurrent streams.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-size="small" data-id="${data['id']}" data-config-name="${data['config_prefix']}_on_newdevice" ${helpers.checked(data['on_newdevice'])} class="toggle-switches">
|
||||
Notify on user new device
|
||||
</label>
|
||||
<p class="help-block">Trigger notification when a user streams from a new device.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-size="small" data-id="${data['id']}" data-config-name="${data['config_prefix']}_on_created" ${helpers.checked(data['on_created'])} class="toggle-switches">
|
||||
|
|
|
@ -49,7 +49,9 @@ DOCUMENTATION :: END
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="dashboard-recent-media-metacontainer">
|
||||
|
@ -70,7 +72,9 @@ DOCUMENTATION :: END
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="dashboard-recent-media-metacontainer">
|
||||
|
@ -93,7 +97,9 @@ DOCUMENTATION :: END
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="dashboard-recent-media-metacontainer">
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
import sys
|
||||
|
||||
import plexpy
|
||||
from plexpy import common, logger, notifiers, versioncheck
|
||||
from plexpy import common, notifiers, versioncheck
|
||||
from plexpy.helpers import anon_url
|
||||
|
||||
available_notification_agents = sorted(notifiers.available_notification_agents(), key=lambda k: k['name'])
|
||||
|
@ -62,78 +62,10 @@
|
|||
<div class="padded-header">
|
||||
<h3>PlexPy Configuration</h3>
|
||||
</div>
|
||||
<table class="config-info-table small-muted">
|
||||
<tbody>
|
||||
% if plexpy.CURRENT_VERSION:
|
||||
<tr>
|
||||
<td>Git Branch:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://github.com/drzoidberg33/plexpy/tree/%s' % plexpy.CONFIG.GIT_BRANCH)}">${plexpy.CONFIG.GIT_BRANCH}</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Git Commit Hash:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://github.com/drzoidberg33/plexpy/commit/%s' % plexpy.CURRENT_VERSION)}">${plexpy.CURRENT_VERSION}</a></td>
|
||||
</tr>
|
||||
% endif
|
||||
<tr>
|
||||
<td>Configuration File:</td>
|
||||
<td>${plexpy.CONFIG_FILE}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Database File:</td>
|
||||
<td>${plexpy.DB_FILE}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Log File:</td>
|
||||
<td><a class="no-highlight" href="logFile" target="_blank">${os.path.join(config['log_dir'], logger.FILENAME)}</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Backup Directory:</td>
|
||||
<td>${config['backup_dir']}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Cache Directory:</td>
|
||||
<td>${config['cache_dir']}</td>
|
||||
</tr>
|
||||
% if plexpy.ARGS:
|
||||
<tr>
|
||||
<td>Arguments:</td>
|
||||
<td>${plexpy.ARGS}</td>
|
||||
</tr>
|
||||
% endif
|
||||
<tr>
|
||||
<td>Platform:</td>
|
||||
<td>${common.PLATFORM} ${common.PLATFORM_VERSION}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Python Version:</td>
|
||||
<td>${sys.version}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="top-line">Plex Forums:</td>
|
||||
<td class="top-line"><a class="no-highlight" href="${anon_url('https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program')}" target="_blank">https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Source:</td>
|
||||
<td><a id="source-link" class="no-highlight" href="${anon_url('https://github.com/drzoidberg33/plexpy')}" target="_blank">https://github.com/drzoidberg33/plexpy</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Wiki:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://github.com/drzoidberg33/plexpy/wiki')}" target="_blank">https://github.com/drzoidberg33/plexpy/wiki</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Issues:</td>
|
||||
<td><a class="no-highlight guidelines-modal-link" href="${anon_url('https://github.com/drzoidberg33/plexpy/issues')}" data-id="issue">https://github.com/drzoidberg33/plexpy/issues</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Feature Requests:</td>
|
||||
<td><a class="no-highlight guidelines-modal-link" href="${anon_url('http://feathub.com/drzoidberg33/plexpy')}" data-id="feature request">http://feathub.com/drzoidberg33/plexpy</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Gitter Chat:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://gitter.im/drzoidberg33/plexpy')}" target="_blank">https://gitter.im/drzoidberg33/plexpy</a></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<div id="plexpy-configuration-table">
|
||||
<div class='text-muted'><i class="fa fa-refresh fa-spin"></i> Loading configuration table...</div>
|
||||
<br>
|
||||
</div>
|
||||
<div class="padded-header">
|
||||
<h3>PlexPy Scheduled Tasks</h3>
|
||||
</div>
|
||||
|
@ -370,7 +302,7 @@
|
|||
</div>
|
||||
<div id="home_stats_count_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||
</div>
|
||||
<p class="help-block">Specify the number of items to show in the top lists for the watch statistics on the home page. Max is 10 items, default is 5 items, 0 to disable.</p>
|
||||
<p class="help-block">Specify the number of items to show in the top lists for the watch statistics on the home page. Maximum 10 items, default 5 items, 0 to disable.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
|
@ -581,15 +513,31 @@
|
|||
<div role="tabpanel" class="tab-pane" id="tabs-5">
|
||||
|
||||
<div class="padded-header">
|
||||
<h3>Plex Media Server <small style="color: #fff;">Version <span id="pms_version">unknown</span></small></h3>
|
||||
<h3>Plex Media Server <small style="color: #fff;">Version <span id="pms_version">${config['pms_version']}</span></small></h3>
|
||||
</div>
|
||||
<p class="help-block">If you're using websocket monitoring, any server changes require a restart of PlexPy.</p>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="monitor_pms_updates" name="monitor_pms_updates" value="1" ${config['monitor_pms_updates']}> Monitor Plex Updates
|
||||
</label>
|
||||
<p class="help-block">Enable to have PlexPy check if updates are available for the Plex Media Server.<br />
|
||||
Note: The Plex updater is broken on certain Plex Pass version of Plex Media Server. PlexPy will automatically disable checking for Plex updates if one of these versions is found.</p>
|
||||
<p class="help-block">Enable to have PlexPy check if updates are available for the Plex Media Server.</p>
|
||||
</div>
|
||||
<div id="pms_update_options">
|
||||
<div class="form-group">
|
||||
<div class="row">
|
||||
<div class="col-md-2">
|
||||
<label for="pms_update_channel">Update Channel</label>
|
||||
<select class="form-control" id="pms_update_channel" name="pms_update_channel">
|
||||
<option value="public">Public</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="col-md-5">
|
||||
<label for="pms_update_distro_build">Release</label>
|
||||
<select class="form-control" id="pms_update_distro_build" name="pms_update_distro_build">
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
|
@ -792,7 +740,7 @@
|
|||
</div>
|
||||
<div id="monitoring_interval_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||
</div>
|
||||
<p class="help-block">The interval (in seconds) PlexPy will ping your Plex Server. Min 30 seconds, recommended 60 seconds.</p>
|
||||
<p class="help-block">The interval (in seconds) PlexPy will ping your Plex Server. Minimum 30 seconds, recommended 60 seconds.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
|
@ -934,6 +882,22 @@
|
|||
</label>
|
||||
<p class="help-block">Disable to prevent consecutive notifications (i.e. both watched & stopped notifications).</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" name="notify_concurrent_by_ip" id="notify_concurrent_by_ip" value="1" ${config['notify_concurrent_by_ip']}> User Concurrent Streams Notifications by IP Address
|
||||
</label>
|
||||
<p class="help-block">Enable to only get notified of concurrent streams by a single user from different IP addresses.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="notify_concurrent_threshold">User Concurrent Stream Threshold</label>
|
||||
<div class="row">
|
||||
<div class="col-md-2">
|
||||
<input type="text" class="form-control" data-parsley-type="integer" id="notify_concurrent_threshold" name="notify_concurrent_threshold" value="${config['notify_concurrent_threshold']}" data-parsley-min="2" data-parsley-trigger="change" data-parsley-errors-container="#notify_concurrent_threshold_error" required>
|
||||
</div>
|
||||
<div id="notify_concurrent_threshold_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||
</div>
|
||||
<p class="help-block">The number of concurrent streams by a single user for PlexPy to trigger a notification. Minimum 2.</p>
|
||||
</div>
|
||||
|
||||
<div class="padded-header">
|
||||
<h3>Recently Added Notifications</h3>
|
||||
|
@ -1078,6 +1042,40 @@
|
|||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>
|
||||
<div class="link"><i class="fa fa-arrow-circle-o-right fa-fw"></i> User Concurrent Streams<i class="fa fa-chevron-down"></i></div>
|
||||
<ul class="submenu">
|
||||
<li>
|
||||
<div class="form-group">
|
||||
<label for="notify_on_concurrent_subject_text">Subject Line</label>
|
||||
<input class="form-control" type="text" id="notify_on_concurrent_subject_text" name="notify_on_concurrent_subject_text" value="${config['notify_on_concurrent_subject_text']}" data-parsley-trigger="change" required>
|
||||
<p class="help-block">Set a custom subject line.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="notify_on_buffer_body_text">Message Body</label>
|
||||
<textarea class="form-control" id="notify_on_concurrent_body_text" name="notify_on_concurrent_body_text" data-parsley-trigger="change" data-autoresize required>${config['notify_on_concurrent_body_text']}</textarea>
|
||||
<p class="help-block">Set a custom body.</p>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>
|
||||
<div class="link"><i class="fa fa-desktop fa-fw"></i> User New Device<i class="fa fa-chevron-down"></i></div>
|
||||
<ul class="submenu">
|
||||
<li>
|
||||
<div class="form-group">
|
||||
<label for="notify_on_newdevice_subject_text">Subject Line</label>
|
||||
<input class="form-control" type="text" id="notify_on_newdevice_subject_text" name="notify_on_newdevice_subject_text" value="${config['notify_on_newdevice_subject_text']}" data-parsley-trigger="change" required>
|
||||
<p class="help-block">Set a custom subject line.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="notify_on_buffer_body_text">Message Body</label>
|
||||
<textarea class="form-control" id="notify_on_newdevice_body_text" name="notify_on_newdevice_body_text" data-parsley-trigger="change" data-autoresize required>${config['notify_on_newdevice_body_text']}</textarea>
|
||||
<p class="help-block">Set a custom body.</p>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
<ul id="accordion-timeline" class="accordion list-unstyled">
|
||||
<li>
|
||||
|
@ -1217,11 +1215,7 @@
|
|||
% else:
|
||||
<a href="javascript:void(0)" data-target="#notification-triggers-modal" data-id="${agent['id']}" class="toggle-notification-triggers-modal toggle-left" data-toggle="modal"><i class="fa fa-lg fa-bell"></i></a>
|
||||
% endif
|
||||
% if agent['id'] == 17:
|
||||
${agent['name']} <span style="color: #eb8600; padding-left: 10px;">[experimental]</span>
|
||||
% else:
|
||||
${agent['name']}
|
||||
% endif
|
||||
% if agent['has_config']:
|
||||
<a href="javascript:void(0)" rel="tooltip" data-target="#notification-config-modal" data-placement="top" title data-title="Open configuration" data-id="${agent['id']}" class="toggle-notification-config-modal toggle-right" data-toggle="modal"><i class="fa fa-lg fa-cog"></i></a>
|
||||
% endif
|
||||
|
@ -1614,6 +1608,10 @@
|
|||
<td><strong>{streams}</strong></td>
|
||||
<td>The number of concurrent streams.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{user_streams}</strong></td>
|
||||
<td>The number of concurrent streams by the person streaming.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{user}</strong></td>
|
||||
<td>The friendly name of the person streaming.</td>
|
||||
|
@ -1968,8 +1966,40 @@
|
|||
<td>The available update download URL.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_changelog}</strong></td>
|
||||
<td>The changelog for the available update.</td>
|
||||
<td><strong>{update_release_date}</strong></td>
|
||||
<td>The release date of the update version.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_channel}</strong></td>
|
||||
<td>The update channel. <span class="small-muted">(Public or Plex Pass)</span></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_platform}</strong></td>
|
||||
<td>The platform of your Plex Server.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_distro}</strong></td>
|
||||
<td>The distro of your Plex Server.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_distro_build}</strong></td>
|
||||
<td>The distro build of your Plex Server.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_requirements}</strong></td>
|
||||
<td>The requirements for the available update.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_extra_info}</strong></td>
|
||||
<td>Any extra info for the available update.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_changelog_added}</strong></td>
|
||||
<td>The added changelog for the available update.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_changelog_fixed}</strong></td>
|
||||
<td>The fixed changelog for the available update.</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
@ -2078,6 +2108,33 @@
|
|||
<script src="${http_root}js/Sortable.min.js"></script>
|
||||
<script src="${http_root}js/moment-with-locale.js"></script>
|
||||
<script>
|
||||
function getConfigurationTable() {
|
||||
$.ajax({
|
||||
url: 'get_configuration_table',
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function(xhr, status) {
|
||||
$("#plexpy-configuration-table").html(xhr.responseText);
|
||||
if ("${kwargs.get('install_geoip')}" == 'true') {
|
||||
$('#install_geoip_db').removeClass('no-highlight').css('color','#e9a049');
|
||||
} else if ("${kwargs.get('reinstall_geoip')}" == 'true') {
|
||||
$('#reinstall_geoip_db').removeClass('no-highlight').css('color','#e9a049');
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function getSchedulerTable() {
|
||||
$.ajax({
|
||||
url: 'get_scheduler_table',
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function(xhr, status) {
|
||||
$("#plexpy-scheduler-table").html(xhr.responseText);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
$(document).ready(function() {
|
||||
|
||||
// Javascript to enable link to tab
|
||||
|
@ -2114,7 +2171,9 @@ $(document).ready(function() {
|
|||
$('#restart-modal').modal('show');
|
||||
}
|
||||
$("#http_hashed_password").val($("#http_hash_password").is(":checked") ? 1 : 0)
|
||||
getConfigurationTable();
|
||||
getSchedulerTable();
|
||||
loadUpdateDistros();
|
||||
settingsChanged = false;
|
||||
}
|
||||
|
||||
|
@ -2145,7 +2204,8 @@ $(document).ready(function() {
|
|||
initConfigCheckbox('#https_create_cert');
|
||||
initConfigCheckbox('#check_github');
|
||||
initConfigCheckbox('#notify_upload_posters');
|
||||
|
||||
initConfigCheckbox('#monitor_pms_updates');
|
||||
|
||||
$("#menu_link_shutdown").click(function() {
|
||||
$("#confirm-message").text("Are you sure you want to shutdown PlexPy?");
|
||||
$('#confirm-modal').modal();
|
||||
|
@ -2174,38 +2234,9 @@ $(document).ready(function() {
|
|||
window.location.href = "restart";
|
||||
});
|
||||
|
||||
function getSchedulerTable() {
|
||||
$.ajax({
|
||||
url: 'get_scheduler_table',
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function(xhr, status) {
|
||||
$("#plexpy-scheduler-table").html(xhr.responseText);
|
||||
}
|
||||
});
|
||||
}
|
||||
getConfigurationTable();
|
||||
getSchedulerTable();
|
||||
|
||||
function confirmAjaxCall (url, msg) {
|
||||
$("#confirm-message").text(msg);
|
||||
$('#confirm-modal').modal();
|
||||
$('#confirm-modal').one('click', '#confirm-button', function () {
|
||||
$.ajax({
|
||||
url: url,
|
||||
type: 'POST',
|
||||
complete: function (xhr, status) {
|
||||
result = $.parseJSON(xhr.responseText);
|
||||
msg = result.message;
|
||||
if (result.result == 'success') {
|
||||
showMsg('<i class="fa fa-check"></i> ' + msg, false, true, 5000)
|
||||
} else {
|
||||
showMsg('<i class="fa fa-times"></i> ' + msg, false, true, 5000, true)
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
$("#backup_config").click(function () {
|
||||
var msg = 'Are you sure you want to create a backup of the PlexPy config?';
|
||||
var url = 'backup_config';
|
||||
|
@ -2236,7 +2267,6 @@ $(document).ready(function() {
|
|||
confirmAjaxCall(url, msg);
|
||||
});
|
||||
|
||||
|
||||
$('#api_key').click(function(){ $('#api_key').select() });
|
||||
$("#generate_api").click(function() {
|
||||
$.get('generateAPI',
|
||||
|
@ -2348,6 +2378,7 @@ $(document).ready(function() {
|
|||
} else {
|
||||
$("#pms-token-status").html('<i class="fa fa-exclamation-circle"></i> Invalid username or password.');
|
||||
}
|
||||
loadUpdateDistros();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
|
@ -2406,33 +2437,26 @@ $(document).ready(function() {
|
|||
pms_logs = false;
|
||||
|
||||
// Checks to see if PMS server version is >= 0.9.14 with automaatically logged IP addresses
|
||||
$.ajax({
|
||||
url: 'get_server_identity',
|
||||
async: true,
|
||||
success: function(data) {
|
||||
if (data.version){ $("#pms_version").text(data.version); }
|
||||
var version = (data.version ? data.version.split('.') : null);
|
||||
if (version && parseInt(version[0]) >= 0 && parseInt(version[1]) >= 9 && parseInt(version[2]) >= 14) {
|
||||
$("#debugLogCheck").html("IP address is automatically logged for PMS version 0.9.14 and above.");
|
||||
$("#ip_logging_enable").attr("disabled", true);
|
||||
$("#ip_logging_enable").attr("checked", true);
|
||||
pms_version = true;
|
||||
var version = "${config['pms_version']}".split('.');
|
||||
if (version && parseInt(version[0]) >= 0 && parseInt(version[1]) >= 9 && parseInt(version[2]) >= 14) {
|
||||
$("#debugLogCheck").html("IP address is automatically logged for PMS version 0.9.14 and above.");
|
||||
$("#ip_logging_enable").attr("disabled", true);
|
||||
$("#ip_logging_enable").attr("checked", true);
|
||||
pms_version = true;
|
||||
checkLogsPath();
|
||||
} else {
|
||||
// Check to see if debug logs are enabled on the PMS.
|
||||
$.ajax({
|
||||
url: 'get_server_pref',
|
||||
data: { pref: 'logDebug' },
|
||||
async: true,
|
||||
success: function(data) {
|
||||
pms_logs_debug = (data == 'true' ? true : false);
|
||||
// Check to see if our logs folder is set before allowing IP logging to be enabled.
|
||||
checkLogsPath();
|
||||
} else {
|
||||
// Check to see if debug logs are enabled on the PMS.
|
||||
$.ajax({
|
||||
url: 'get_server_pref',
|
||||
data: { pref: 'logDebug' },
|
||||
async: true,
|
||||
success: function(data) {
|
||||
pms_logs_debug = (data == 'true' ? true : false);
|
||||
// Check to see if our logs folder is set before allowing IP logging to be enabled.
|
||||
checkLogsPath();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
$("#pms_logs_folder").change(function() {
|
||||
checkLogsPath();
|
||||
|
@ -2586,16 +2610,6 @@ $(document).ready(function() {
|
|||
$('#notify_recently_added_grandparent_note').css('color', c);
|
||||
});
|
||||
|
||||
$('.guidelines-modal-link').on('click', function (e) {
|
||||
e.preventDefault();
|
||||
$('#guidelines-link').attr('href', $('#source-link').attr('href'));
|
||||
$('#guidelines-type').text($(this).data('id'))
|
||||
$('#guidelines-modal').modal();
|
||||
$('#guidelines-continue').attr('href', $(this).attr('href')).on('click', function () {
|
||||
$('#guidelines-modal').modal('hide');
|
||||
});
|
||||
});
|
||||
|
||||
function allowGuestAccessCheck () {
|
||||
if ($("#http_basic_auth").is(":checked")) {
|
||||
$("#allow_guest_access").attr("disabled", true);
|
||||
|
@ -2642,6 +2656,47 @@ $(document).ready(function() {
|
|||
$("#http_hashed_password").val($("#http_hash_password").is(":checked") ? 1 : 0);
|
||||
$("#http_hash_password_error").html("");
|
||||
});
|
||||
|
||||
// Load PMS downloads
|
||||
function loadUpdateDistros(distro_build) {
|
||||
var update_params_ajax = $.getJSON('get_server_update_params', function (data) { return data; });
|
||||
|
||||
$.when(update_params_ajax).done(function() {
|
||||
var update_params = update_params_ajax.responseJSON;
|
||||
|
||||
var plexpass = update_params.plexpass;
|
||||
var platform = update_params.pms_platform;
|
||||
var update_channel = update_params.pms_update_channel;
|
||||
var update_distro_build = update_params.pms_update_distro_build;
|
||||
|
||||
$("#pms_update_channel option[value='plexpass']").remove();
|
||||
if (plexpass) {
|
||||
var selected = (update_channel == 'plexpass') ? true : false;
|
||||
$('#pms_update_channel')
|
||||
.append($('<option></option>')
|
||||
.text('Plex Pass')
|
||||
.val('plexpass')
|
||||
.prop('selected', selected));
|
||||
}
|
||||
|
||||
$.getJSON('https://plex.tv/api/downloads/1.json?channel=' + update_channel, function (downloads) {
|
||||
platform_downloads = downloads.computer[platform] || downloads.nas[platform];
|
||||
if (platform_downloads) {
|
||||
$("#pms_update_distro_build option").remove();
|
||||
$.each(platform_downloads.releases, function (index, item) {
|
||||
var label = (platform_downloads.releases.length == 1) ? platform_downloads.name : platform_downloads.name + ' - ' + item.label;
|
||||
var selected = (item.build == update_distro_build) ? true : false;
|
||||
$('#pms_update_distro_build')
|
||||
.append($('<option></option>')
|
||||
.text(label)
|
||||
.val(item.build)
|
||||
.prop('selected', selected));
|
||||
})
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
loadUpdateDistros();
|
||||
});
|
||||
</script>
|
||||
</%def>
|
||||
|
|
|
@ -53,7 +53,9 @@ DOCUMENTATION :: END
|
|||
<div class="modal-body">
|
||||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
<h4><strong>Stream Details</strong></h4>
|
||||
<div class="col-sm-12">
|
||||
<h4><strong>Stream Details</strong></h4>
|
||||
</div>
|
||||
<div class="col-sm-4">
|
||||
<h5>Media</h5>
|
||||
<ul class="list-unstyled">
|
||||
|
@ -95,7 +97,9 @@ DOCUMENTATION :: END
|
|||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<h4><strong>Source Details</strong></h4>
|
||||
<div class="col-sm-12">
|
||||
<h4><strong>Source Details</strong></h4>
|
||||
</div>
|
||||
<div class="col-sm-4">
|
||||
<h5>Media</h5>
|
||||
<ul class="list-unstyled">
|
||||
|
|
|
@ -49,7 +49,9 @@ DOCUMENTATION :: END
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="dashboard-recent-media-metacontainer">
|
||||
|
|
7
lib/geoip2/__init__.py
Normal file
7
lib/geoip2/__init__.py
Normal file
|
@ -0,0 +1,7 @@
|
|||
# pylint:disable=C0111
|
||||
|
||||
__title__ = 'geoip2'
|
||||
__version__ = '2.4.0'
|
||||
__author__ = 'Gregory Oschwald'
|
||||
__license__ = 'Apache License, Version 2.0'
|
||||
__copyright__ = 'Copyright (c) 2013-2016 Maxmind, Inc.'
|
17
lib/geoip2/compat.py
Normal file
17
lib/geoip2/compat.py
Normal file
|
@ -0,0 +1,17 @@
|
|||
"""Intended for internal use only."""
|
||||
import sys
|
||||
|
||||
import ipaddress
|
||||
|
||||
# pylint: skip-file
|
||||
|
||||
if sys.version_info[0] == 2:
|
||||
def compat_ip_address(address):
|
||||
"""Intended for internal use only."""
|
||||
if isinstance(address, bytes):
|
||||
address = address.decode()
|
||||
return ipaddress.ip_address(address)
|
||||
else:
|
||||
def compat_ip_address(address):
|
||||
"""Intended for internal use only."""
|
||||
return ipaddress.ip_address(address)
|
199
lib/geoip2/database.py
Normal file
199
lib/geoip2/database.py
Normal file
|
@ -0,0 +1,199 @@
|
|||
"""
|
||||
======================
|
||||
GeoIP2 Database Reader
|
||||
======================
|
||||
|
||||
"""
|
||||
import inspect
|
||||
|
||||
import maxminddb
|
||||
# pylint: disable=unused-import
|
||||
from maxminddb import (MODE_AUTO, MODE_MMAP, MODE_MMAP_EXT, MODE_FILE,
|
||||
MODE_MEMORY)
|
||||
|
||||
import geoip2
|
||||
import geoip2.models
|
||||
import geoip2.errors
|
||||
|
||||
|
||||
class Reader(object):
|
||||
"""GeoIP2 database Reader object.
|
||||
|
||||
Instances of this class provide a reader for the GeoIP2 database format.
|
||||
IP addresses can be looked up using the ``country`` and ``city`` methods.
|
||||
|
||||
The basic API for this class is the same for every database. First, you
|
||||
create a reader object, specifying a file name. You then call the method
|
||||
corresponding to the specific database, passing it the IP address you want
|
||||
to look up.
|
||||
|
||||
If the request succeeds, the method call will return a model class for the
|
||||
method you called. This model in turn contains multiple record classes,
|
||||
each of which represents part of the data returned by the database. If the
|
||||
database does not contain the requested information, the attributes on the
|
||||
record class will have a ``None`` value.
|
||||
|
||||
If the address is not in the database, an
|
||||
``geoip2.errors.AddressNotFoundError`` exception will be thrown. If the
|
||||
database is corrupt or invalid, a ``maxminddb.InvalidDatabaseError`` will
|
||||
be thrown.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, filename, locales=None, mode=MODE_AUTO):
|
||||
"""Create GeoIP2 Reader.
|
||||
|
||||
:param filename: The path to the GeoIP2 database.
|
||||
:param locales: This is list of locale codes. This argument will be
|
||||
passed on to record classes to use when their name properties are
|
||||
called. The default value is ['en'].
|
||||
|
||||
The order of the locales is significant. When a record class has
|
||||
multiple names (country, city, etc.), its name property will return
|
||||
the name in the first locale that has one.
|
||||
|
||||
Note that the only locale which is always present in the GeoIP2
|
||||
data is "en". If you do not include this locale, the name property
|
||||
may end up returning None even when the record has an English name.
|
||||
|
||||
Currently, the valid locale codes are:
|
||||
|
||||
* de -- German
|
||||
* en -- English names may still include accented characters if that
|
||||
is the accepted spelling in English. In other words, English does
|
||||
not mean ASCII.
|
||||
* es -- Spanish
|
||||
* fr -- French
|
||||
* ja -- Japanese
|
||||
* pt-BR -- Brazilian Portuguese
|
||||
* ru -- Russian
|
||||
* zh-CN -- Simplified Chinese.
|
||||
:param mode: The mode to open the database with. Valid mode are:
|
||||
* MODE_MMAP_EXT - use the C extension with memory map.
|
||||
* MODE_MMAP - read from memory map. Pure Python.
|
||||
* MODE_FILE - read database as standard file. Pure Python.
|
||||
* MODE_MEMORY - load database into memory. Pure Python.
|
||||
* MODE_AUTO - try MODE_MMAP_EXT, MODE_MMAP, MODE_FILE in that order.
|
||||
Default.
|
||||
|
||||
"""
|
||||
if locales is None:
|
||||
locales = ['en']
|
||||
self._db_reader = maxminddb.open_database(filename, mode)
|
||||
self._locales = locales
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
self.close()
|
||||
|
||||
def country(self, ip_address):
|
||||
"""Get the Country object for the IP address.
|
||||
|
||||
:param ip_address: IPv4 or IPv6 address as a string.
|
||||
|
||||
:returns: :py:class:`geoip2.models.Country` object
|
||||
|
||||
"""
|
||||
|
||||
return self._model_for(geoip2.models.Country, 'Country', ip_address)
|
||||
|
||||
def city(self, ip_address):
|
||||
"""Get the City object for the IP address.
|
||||
|
||||
:param ip_address: IPv4 or IPv6 address as a string.
|
||||
|
||||
:returns: :py:class:`geoip2.models.City` object
|
||||
|
||||
"""
|
||||
return self._model_for(geoip2.models.City, 'City', ip_address)
|
||||
|
||||
def anonymous_ip(self, ip_address):
|
||||
"""Get the AnonymousIP object for the IP address.
|
||||
|
||||
:param ip_address: IPv4 or IPv6 address as a string.
|
||||
|
||||
:returns: :py:class:`geoip2.models.AnonymousIP` object
|
||||
|
||||
"""
|
||||
return self._flat_model_for(geoip2.models.AnonymousIP,
|
||||
'GeoIP2-Anonymous-IP', ip_address)
|
||||
|
||||
def connection_type(self, ip_address):
|
||||
"""Get the ConnectionType object for the IP address.
|
||||
|
||||
:param ip_address: IPv4 or IPv6 address as a string.
|
||||
|
||||
:returns: :py:class:`geoip2.models.ConnectionType` object
|
||||
|
||||
"""
|
||||
return self._flat_model_for(geoip2.models.ConnectionType,
|
||||
'GeoIP2-Connection-Type', ip_address)
|
||||
|
||||
def domain(self, ip_address):
|
||||
"""Get the Domain object for the IP address.
|
||||
|
||||
:param ip_address: IPv4 or IPv6 address as a string.
|
||||
|
||||
:returns: :py:class:`geoip2.models.Domain` object
|
||||
|
||||
"""
|
||||
return self._flat_model_for(geoip2.models.Domain, 'GeoIP2-Domain',
|
||||
ip_address)
|
||||
|
||||
def enterprise(self, ip_address):
|
||||
"""Get the Enterprise object for the IP address.
|
||||
|
||||
:param ip_address: IPv4 or IPv6 address as a string.
|
||||
|
||||
:returns: :py:class:`geoip2.models.Enterprise` object
|
||||
|
||||
"""
|
||||
return self._model_for(geoip2.models.Enterprise, 'Enterprise',
|
||||
ip_address)
|
||||
|
||||
def isp(self, ip_address):
|
||||
"""Get the ISP object for the IP address.
|
||||
|
||||
:param ip_address: IPv4 or IPv6 address as a string.
|
||||
|
||||
:returns: :py:class:`geoip2.models.ISP` object
|
||||
|
||||
"""
|
||||
return self._flat_model_for(geoip2.models.ISP, 'GeoIP2-ISP',
|
||||
ip_address)
|
||||
|
||||
def _get(self, database_type, ip_address):
|
||||
if database_type not in self.metadata().database_type:
|
||||
caller = inspect.stack()[2][3]
|
||||
raise TypeError("The %s method cannot be used with the "
|
||||
"%s database" %
|
||||
(caller, self.metadata().database_type))
|
||||
record = self._db_reader.get(ip_address)
|
||||
if record is None:
|
||||
raise geoip2.errors.AddressNotFoundError(
|
||||
"The address %s is not in the database." % ip_address)
|
||||
return record
|
||||
|
||||
def _model_for(self, model_class, types, ip_address):
|
||||
record = self._get(types, ip_address)
|
||||
record.setdefault('traits', {})['ip_address'] = ip_address
|
||||
return model_class(record, locales=self._locales)
|
||||
|
||||
def _flat_model_for(self, model_class, types, ip_address):
|
||||
record = self._get(types, ip_address)
|
||||
record['ip_address'] = ip_address
|
||||
return model_class(record)
|
||||
|
||||
def metadata(self):
|
||||
"""The metadata for the open database.
|
||||
|
||||
:returns: :py:class:`maxminddb.reader.Metadata` object
|
||||
"""
|
||||
return self._db_reader.metadata()
|
||||
|
||||
def close(self):
|
||||
"""Closes the GeoIP2 database."""
|
||||
|
||||
self._db_reader.close()
|
51
lib/geoip2/errors.py
Normal file
51
lib/geoip2/errors.py
Normal file
|
@ -0,0 +1,51 @@
|
|||
"""
|
||||
Errors
|
||||
======
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class GeoIP2Error(RuntimeError):
|
||||
"""There was a generic error in GeoIP2.
|
||||
|
||||
This class represents a generic error. It extends :py:exc:`RuntimeError`
|
||||
and does not add any additional attributes.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class AddressNotFoundError(GeoIP2Error):
|
||||
"""The address you were looking up was not found."""
|
||||
|
||||
|
||||
class AuthenticationError(GeoIP2Error):
|
||||
"""There was a problem authenticating the request."""
|
||||
|
||||
|
||||
class HTTPError(GeoIP2Error):
|
||||
"""There was an error when making your HTTP request.
|
||||
|
||||
This class represents an HTTP transport error. It extends
|
||||
:py:exc:`GeoIP2Error` and adds attributes of its own.
|
||||
|
||||
:ivar http_status: The HTTP status code returned
|
||||
:ivar uri: The URI queried
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, message, http_status=None, uri=None):
|
||||
super(HTTPError, self).__init__(message)
|
||||
self.http_status = http_status
|
||||
self.uri = uri
|
||||
|
||||
|
||||
class InvalidRequestError(GeoIP2Error):
|
||||
"""The request was invalid."""
|
||||
|
||||
|
||||
class OutOfQueriesError(GeoIP2Error):
|
||||
"""Your account is out of funds for the service queried."""
|
||||
|
||||
|
||||
class PermissionRequiredError(GeoIP2Error):
|
||||
"""Your account does not have permission to access this service."""
|
16
lib/geoip2/mixins.py
Normal file
16
lib/geoip2/mixins.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
"""This package contains utility mixins"""
|
||||
# pylint: disable=too-few-public-methods
|
||||
from abc import ABCMeta
|
||||
|
||||
|
||||
class SimpleEquality(object):
|
||||
"""Naive __dict__ equality mixin"""
|
||||
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
def __eq__(self, other):
|
||||
return (isinstance(other, self.__class__) and
|
||||
self.__dict__ == other.__dict__)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
472
lib/geoip2/models.py
Normal file
472
lib/geoip2/models.py
Normal file
|
@ -0,0 +1,472 @@
|
|||
"""
|
||||
Models
|
||||
======
|
||||
|
||||
These classes provide models for the data returned by the GeoIP2
|
||||
web service and databases.
|
||||
|
||||
The only difference between the City and Insights model classes is which
|
||||
fields in each record may be populated. See
|
||||
http://dev.maxmind.com/geoip/geoip2/web-services for more details.
|
||||
|
||||
"""
|
||||
# pylint: disable=too-many-instance-attributes,too-few-public-methods
|
||||
from abc import ABCMeta
|
||||
|
||||
import geoip2.records
|
||||
from geoip2.mixins import SimpleEquality
|
||||
|
||||
|
||||
class Country(SimpleEquality):
|
||||
"""Model for the GeoIP2 Precision: Country and the GeoIP2 Country database.
|
||||
|
||||
This class provides the following attributes:
|
||||
|
||||
.. attribute:: continent
|
||||
|
||||
Continent object for the requested IP address.
|
||||
|
||||
:type: :py:class:`geoip2.records.Continent`
|
||||
|
||||
.. attribute:: country
|
||||
|
||||
Country object for the requested IP address. This record represents the
|
||||
country where MaxMind believes the IP is located.
|
||||
|
||||
:type: :py:class:`geoip2.records.Country`
|
||||
|
||||
.. attribute:: maxmind
|
||||
|
||||
Information related to your MaxMind account.
|
||||
|
||||
:type: :py:class:`geoip2.records.MaxMind`
|
||||
|
||||
.. attribute:: registered_country
|
||||
|
||||
The registered country object for the requested IP address. This record
|
||||
represents the country where the ISP has registered a given IP block in
|
||||
and may differ from the user's country.
|
||||
|
||||
:type: :py:class:`geoip2.records.Country`
|
||||
|
||||
.. attribute:: represented_country
|
||||
|
||||
Object for the country represented by the users of the IP address
|
||||
when that country is different than the country in ``country``. For
|
||||
instance, the country represented by an overseas military base.
|
||||
|
||||
:type: :py:class:`geoip2.records.RepresentedCountry`
|
||||
|
||||
.. attribute:: traits
|
||||
|
||||
Object with the traits of the requested IP address.
|
||||
|
||||
:type: :py:class:`geoip2.records.Traits`
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, raw_response, locales=None):
|
||||
if locales is None:
|
||||
locales = ['en']
|
||||
self._locales = locales
|
||||
self.continent = \
|
||||
geoip2.records.Continent(locales,
|
||||
**raw_response.get('continent', {}))
|
||||
self.country = \
|
||||
geoip2.records.Country(locales,
|
||||
**raw_response.get('country', {}))
|
||||
self.registered_country = \
|
||||
geoip2.records.Country(locales,
|
||||
**raw_response.get('registered_country',
|
||||
{}))
|
||||
self.represented_country \
|
||||
= geoip2.records.RepresentedCountry(locales,
|
||||
**raw_response.get(
|
||||
'represented_country', {}))
|
||||
|
||||
self.maxmind = \
|
||||
geoip2.records.MaxMind(**raw_response.get('maxmind', {}))
|
||||
|
||||
self.traits = geoip2.records.Traits(**raw_response.get('traits', {}))
|
||||
self.raw = raw_response
|
||||
|
||||
def __repr__(self):
|
||||
return '{module}.{class_name}({data}, {locales})'.format(
|
||||
module=self.__module__,
|
||||
class_name=self.__class__.__name__,
|
||||
data=self.raw,
|
||||
locales=self._locales)
|
||||
|
||||
|
||||
class City(Country):
|
||||
"""Model for the GeoIP2 Precision: City and the GeoIP2 City database.
|
||||
|
||||
.. attribute:: city
|
||||
|
||||
City object for the requested IP address.
|
||||
|
||||
:type: :py:class:`geoip2.records.City`
|
||||
|
||||
.. attribute:: continent
|
||||
|
||||
Continent object for the requested IP address.
|
||||
|
||||
:type: :py:class:`geoip2.records.Continent`
|
||||
|
||||
.. attribute:: country
|
||||
|
||||
Country object for the requested IP address. This record represents the
|
||||
country where MaxMind believes the IP is located.
|
||||
|
||||
:type: :py:class:`geoip2.records.Country`
|
||||
|
||||
.. attribute:: location
|
||||
|
||||
Location object for the requested IP address.
|
||||
|
||||
.. attribute:: maxmind
|
||||
|
||||
Information related to your MaxMind account.
|
||||
|
||||
:type: :py:class:`geoip2.records.MaxMind`
|
||||
|
||||
.. attribute:: registered_country
|
||||
|
||||
The registered country object for the requested IP address. This record
|
||||
represents the country where the ISP has registered a given IP block in
|
||||
and may differ from the user's country.
|
||||
|
||||
:type: :py:class:`geoip2.records.Country`
|
||||
|
||||
.. attribute:: represented_country
|
||||
|
||||
Object for the country represented by the users of the IP address
|
||||
when that country is different than the country in ``country``. For
|
||||
instance, the country represented by an overseas military base.
|
||||
|
||||
:type: :py:class:`geoip2.records.RepresentedCountry`
|
||||
|
||||
.. attribute:: subdivisions
|
||||
|
||||
Object (tuple) representing the subdivisions of the country to which
|
||||
the location of the requested IP address belongs.
|
||||
|
||||
:type: :py:class:`geoip2.records.Subdivisions`
|
||||
|
||||
.. attribute:: traits
|
||||
|
||||
Object with the traits of the requested IP address.
|
||||
|
||||
:type: :py:class:`geoip2.records.Traits`
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, raw_response, locales=None):
|
||||
super(City, self).__init__(raw_response, locales)
|
||||
self.city = \
|
||||
geoip2.records.City(locales, **raw_response.get('city', {}))
|
||||
self.location = \
|
||||
geoip2.records.Location(**raw_response.get('location', {}))
|
||||
self.postal = \
|
||||
geoip2.records.Postal(**raw_response.get('postal', {}))
|
||||
self.subdivisions = \
|
||||
geoip2.records.Subdivisions(locales,
|
||||
*raw_response.get('subdivisions', []))
|
||||
|
||||
|
||||
class Insights(City):
|
||||
"""Model for the GeoIP2 Precision: Insights web service endpoint.
|
||||
|
||||
.. attribute:: city
|
||||
|
||||
City object for the requested IP address.
|
||||
|
||||
:type: :py:class:`geoip2.records.City`
|
||||
|
||||
.. attribute:: continent
|
||||
|
||||
Continent object for the requested IP address.
|
||||
|
||||
:type: :py:class:`geoip2.records.Continent`
|
||||
|
||||
.. attribute:: country
|
||||
|
||||
Country object for the requested IP address. This record represents the
|
||||
country where MaxMind believes the IP is located.
|
||||
|
||||
:type: :py:class:`geoip2.records.Country`
|
||||
|
||||
.. attribute:: location
|
||||
|
||||
Location object for the requested IP address.
|
||||
|
||||
.. attribute:: maxmind
|
||||
|
||||
Information related to your MaxMind account.
|
||||
|
||||
:type: :py:class:`geoip2.records.MaxMind`
|
||||
|
||||
.. attribute:: registered_country
|
||||
|
||||
The registered country object for the requested IP address. This record
|
||||
represents the country where the ISP has registered a given IP block in
|
||||
and may differ from the user's country.
|
||||
|
||||
:type: :py:class:`geoip2.records.Country`
|
||||
|
||||
.. attribute:: represented_country
|
||||
|
||||
Object for the country represented by the users of the IP address
|
||||
when that country is different than the country in ``country``. For
|
||||
instance, the country represented by an overseas military base.
|
||||
|
||||
:type: :py:class:`geoip2.records.RepresentedCountry`
|
||||
|
||||
.. attribute:: subdivisions
|
||||
|
||||
Object (tuple) representing the subdivisions of the country to which
|
||||
the location of the requested IP address belongs.
|
||||
|
||||
:type: :py:class:`geoip2.records.Subdivisions`
|
||||
|
||||
.. attribute:: traits
|
||||
|
||||
Object with the traits of the requested IP address.
|
||||
|
||||
:type: :py:class:`geoip2.records.Traits`
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class Enterprise(City):
|
||||
"""Model for the GeoIP2 Enterprise database.
|
||||
|
||||
.. attribute:: city
|
||||
|
||||
City object for the requested IP address.
|
||||
|
||||
:type: :py:class:`geoip2.records.City`
|
||||
|
||||
.. attribute:: continent
|
||||
|
||||
Continent object for the requested IP address.
|
||||
|
||||
:type: :py:class:`geoip2.records.Continent`
|
||||
|
||||
.. attribute:: country
|
||||
|
||||
Country object for the requested IP address. This record represents the
|
||||
country where MaxMind believes the IP is located.
|
||||
|
||||
:type: :py:class:`geoip2.records.Country`
|
||||
|
||||
.. attribute:: location
|
||||
|
||||
Location object for the requested IP address.
|
||||
|
||||
.. attribute:: maxmind
|
||||
|
||||
Information related to your MaxMind account.
|
||||
|
||||
:type: :py:class:`geoip2.records.MaxMind`
|
||||
|
||||
.. attribute:: registered_country
|
||||
|
||||
The registered country object for the requested IP address. This record
|
||||
represents the country where the ISP has registered a given IP block in
|
||||
and may differ from the user's country.
|
||||
|
||||
:type: :py:class:`geoip2.records.Country`
|
||||
|
||||
.. attribute:: represented_country
|
||||
|
||||
Object for the country represented by the users of the IP address
|
||||
when that country is different than the country in ``country``. For
|
||||
instance, the country represented by an overseas military base.
|
||||
|
||||
:type: :py:class:`geoip2.records.RepresentedCountry`
|
||||
|
||||
.. attribute:: subdivisions
|
||||
|
||||
Object (tuple) representing the subdivisions of the country to which
|
||||
the location of the requested IP address belongs.
|
||||
|
||||
:type: :py:class:`geoip2.records.Subdivisions`
|
||||
|
||||
.. attribute:: traits
|
||||
|
||||
Object with the traits of the requested IP address.
|
||||
|
||||
:type: :py:class:`geoip2.records.Traits`
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class SimpleModel(SimpleEquality):
|
||||
"""Provides basic methods for non-location models"""
|
||||
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
def __repr__(self):
|
||||
# pylint: disable=no-member
|
||||
return '{module}.{class_name}({data})'.format(
|
||||
module=self.__module__,
|
||||
class_name=self.__class__.__name__,
|
||||
data=str(self.raw))
|
||||
|
||||
|
||||
class AnonymousIP(SimpleModel):
|
||||
"""Model class for the GeoIP2 Anonymous IP.
|
||||
|
||||
This class provides the following attribute:
|
||||
|
||||
.. attribute:: is_anonymous
|
||||
|
||||
This is true if the IP address belongs to any sort of anonymous network.
|
||||
|
||||
:type: bool
|
||||
|
||||
.. attribute:: is_anonymous_vpn
|
||||
|
||||
This is true if the IP address belongs to an anonymous VPN system.
|
||||
|
||||
:type: bool
|
||||
|
||||
.. attribute:: is_hosting_provider
|
||||
|
||||
This is true if the IP address belongs to a hosting provider.
|
||||
|
||||
:type: bool
|
||||
|
||||
.. attribute:: is_public_proxy
|
||||
|
||||
This is true if the IP address belongs to a public proxy.
|
||||
|
||||
:type: bool
|
||||
|
||||
.. attribute:: is_tor_exit_node
|
||||
|
||||
This is true if the IP address is a Tor exit node.
|
||||
|
||||
:type: bool
|
||||
|
||||
.. attribute:: ip_address
|
||||
|
||||
The IP address used in the lookup.
|
||||
|
||||
:type: unicode
|
||||
"""
|
||||
|
||||
def __init__(self, raw):
|
||||
self.is_anonymous = raw.get('is_anonymous', False)
|
||||
self.is_anonymous_vpn = raw.get('is_anonymous_vpn', False)
|
||||
self.is_hosting_provider = raw.get('is_hosting_provider', False)
|
||||
self.is_public_proxy = raw.get('is_public_proxy', False)
|
||||
self.is_tor_exit_node = raw.get('is_tor_exit_node', False)
|
||||
|
||||
self.ip_address = raw.get('ip_address')
|
||||
self.raw = raw
|
||||
|
||||
|
||||
class ConnectionType(SimpleModel):
|
||||
"""Model class for the GeoIP2 Connection-Type.
|
||||
|
||||
This class provides the following attribute:
|
||||
|
||||
.. attribute:: connection_type
|
||||
|
||||
The connection type may take the following values:
|
||||
|
||||
- Dialup
|
||||
- Cable/DSL
|
||||
- Corporate
|
||||
- Cellular
|
||||
|
||||
Additional values may be added in the future.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: ip_address
|
||||
|
||||
The IP address used in the lookup.
|
||||
|
||||
:type: unicode
|
||||
"""
|
||||
|
||||
def __init__(self, raw):
|
||||
self.connection_type = raw.get('connection_type')
|
||||
self.ip_address = raw.get('ip_address')
|
||||
self.raw = raw
|
||||
|
||||
|
||||
class Domain(SimpleModel):
|
||||
"""Model class for the GeoIP2 Domain.
|
||||
|
||||
This class provides the following attribute:
|
||||
|
||||
.. attribute:: domain
|
||||
|
||||
The domain associated with the IP address.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: ip_address
|
||||
|
||||
The IP address used in the lookup.
|
||||
|
||||
:type: unicode
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, raw):
|
||||
self.domain = raw.get('domain')
|
||||
self.ip_address = raw.get('ip_address')
|
||||
self.raw = raw
|
||||
|
||||
|
||||
class ISP(SimpleModel):
|
||||
"""Model class for the GeoIP2 ISP.
|
||||
|
||||
This class provides the following attribute:
|
||||
|
||||
.. attribute:: autonomous_system_number
|
||||
|
||||
The autonomous system number associated with the IP address.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: autonomous_system_organization
|
||||
|
||||
The organization associated with the registered autonomous system number
|
||||
for the IP address.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: isp
|
||||
|
||||
The name of the ISP associated with the IP address.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: organization
|
||||
|
||||
The name of the organization associated with the IP address.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: ip_address
|
||||
|
||||
The IP address used in the lookup.
|
||||
|
||||
:type: unicode
|
||||
"""
|
||||
|
||||
# pylint:disable=too-many-arguments
|
||||
def __init__(self, raw):
|
||||
self.autonomous_system_number = raw.get('autonomous_system_number')
|
||||
self.autonomous_system_organization = raw.get(
|
||||
'autonomous_system_organization')
|
||||
self.isp = raw.get('isp')
|
||||
self.organization = raw.get('organization')
|
||||
self.ip_address = raw.get('ip_address')
|
||||
self.raw = raw
|
605
lib/geoip2/records.py
Normal file
605
lib/geoip2/records.py
Normal file
|
@ -0,0 +1,605 @@
|
|||
"""
|
||||
|
||||
Records
|
||||
=======
|
||||
|
||||
"""
|
||||
|
||||
# pylint:disable=R0903
|
||||
from abc import ABCMeta
|
||||
|
||||
from geoip2.mixins import SimpleEquality
|
||||
|
||||
|
||||
class Record(SimpleEquality):
|
||||
"""All records are subclasses of the abstract class ``Record``."""
|
||||
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
_valid_attributes = set()
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
valid_args = dict((k, kwargs.get(k)) for k in self._valid_attributes)
|
||||
self.__dict__.update(valid_args)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
raise AttributeError("can't set attribute")
|
||||
|
||||
def __repr__(self):
|
||||
args = ', '.join('%s=%r' % x for x in self.__dict__.items())
|
||||
return '{module}.{class_name}({data})'.format(
|
||||
module=self.__module__,
|
||||
class_name=self.__class__.__name__,
|
||||
data=args)
|
||||
|
||||
|
||||
class PlaceRecord(Record):
|
||||
"""All records with :py:attr:`names` subclass :py:class:`PlaceRecord`."""
|
||||
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
def __init__(self, locales=None, **kwargs):
|
||||
if locales is None:
|
||||
locales = ['en']
|
||||
if kwargs.get('names') is None:
|
||||
kwargs['names'] = {}
|
||||
object.__setattr__(self, '_locales', locales)
|
||||
super(PlaceRecord, self).__init__(**kwargs)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Dict with locale codes as keys and localized name as value."""
|
||||
# pylint:disable=E1101
|
||||
return next(
|
||||
(self.names.get(x) for x in self._locales
|
||||
if x in self.names), None)
|
||||
|
||||
|
||||
class City(PlaceRecord):
|
||||
"""Contains data for the city record associated with an IP address.
|
||||
|
||||
This class contains the city-level data associated with an IP address.
|
||||
|
||||
This record is returned by ``city``, ``enterprise``, and ``insights``.
|
||||
|
||||
Attributes:
|
||||
|
||||
.. attribute:: confidence
|
||||
|
||||
A value from 0-100 indicating MaxMind's
|
||||
confidence that the city is correct. This attribute is only available
|
||||
from the Insights end point and the GeoIP2 Enterprise database.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: geoname_id
|
||||
|
||||
The GeoName ID for the city.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: name
|
||||
|
||||
The name of the city based on the locales list passed to the
|
||||
constructor.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: names
|
||||
|
||||
A dictionary where the keys are locale codes
|
||||
and the values are names.
|
||||
|
||||
:type: dict
|
||||
|
||||
"""
|
||||
|
||||
_valid_attributes = set(['confidence', 'geoname_id', 'names'])
|
||||
|
||||
|
||||
class Continent(PlaceRecord):
|
||||
"""Contains data for the continent record associated with an IP address.
|
||||
|
||||
This class contains the continent-level data associated with an IP
|
||||
address.
|
||||
|
||||
Attributes:
|
||||
|
||||
|
||||
.. attribute:: code
|
||||
|
||||
A two character continent code like "NA" (North America)
|
||||
or "OC" (Oceania).
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: geoname_id
|
||||
|
||||
The GeoName ID for the continent.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: name
|
||||
|
||||
Returns the name of the continent based on the locales list passed to
|
||||
the constructor.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: names
|
||||
|
||||
A dictionary where the keys are locale codes
|
||||
and the values are names.
|
||||
|
||||
:type: dict
|
||||
|
||||
"""
|
||||
|
||||
_valid_attributes = set(['code', 'geoname_id', 'names'])
|
||||
|
||||
|
||||
class Country(PlaceRecord):
|
||||
"""Contains data for the country record associated with an IP address.
|
||||
|
||||
This class contains the country-level data associated with an IP address.
|
||||
|
||||
Attributes:
|
||||
|
||||
|
||||
.. attribute:: confidence
|
||||
|
||||
A value from 0-100 indicating MaxMind's confidence that
|
||||
the country is correct. This attribute is only available from the
|
||||
Insights end point and the GeoIP2 Enterprise database.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: geoname_id
|
||||
|
||||
The GeoName ID for the country.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: iso_code
|
||||
|
||||
The two-character `ISO 3166-1
|
||||
<http://en.wikipedia.org/wiki/ISO_3166-1>`_ alpha code for the
|
||||
country.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: name
|
||||
|
||||
The name of the country based on the locales list passed to the
|
||||
constructor.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: names
|
||||
|
||||
A dictionary where the keys are locale codes and the values
|
||||
are names.
|
||||
|
||||
:type: dict
|
||||
|
||||
"""
|
||||
|
||||
_valid_attributes = set(['confidence', 'geoname_id', 'iso_code', 'names'])
|
||||
|
||||
|
||||
class RepresentedCountry(Country):
|
||||
"""Contains data for the represented country associated with an IP address.
|
||||
|
||||
This class contains the country-level data associated with an IP address
|
||||
for the IP's represented country. The represented country is the country
|
||||
represented by something like a military base.
|
||||
|
||||
Attributes:
|
||||
|
||||
|
||||
.. attribute:: confidence
|
||||
|
||||
A value from 0-100 indicating MaxMind's confidence that
|
||||
the country is correct. This attribute is only available from the
|
||||
Insights end point and the GeoIP2 Enterprise database.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: geoname_id
|
||||
|
||||
The GeoName ID for the country.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: iso_code
|
||||
|
||||
The two-character `ISO 3166-1
|
||||
<http://en.wikipedia.org/wiki/ISO_3166-1>`_ alpha code for the country.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: name
|
||||
|
||||
The name of the country based on the locales list passed to the
|
||||
constructor.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: names
|
||||
|
||||
A dictionary where the keys are locale codes and the values
|
||||
are names.
|
||||
|
||||
:type: dict
|
||||
|
||||
|
||||
.. attribute:: type
|
||||
|
||||
A string indicating the type of entity that is representing the
|
||||
country. Currently we only return ``military`` but this could expand to
|
||||
include other types in the future.
|
||||
|
||||
:type: unicode
|
||||
|
||||
"""
|
||||
|
||||
_valid_attributes = set(['confidence', 'geoname_id', 'iso_code', 'names',
|
||||
'type'])
|
||||
|
||||
|
||||
class Location(Record):
|
||||
"""Contains data for the location record associated with an IP address.
|
||||
|
||||
This class contains the location data associated with an IP address.
|
||||
|
||||
This record is returned by ``city``, ``enterprise``, and ``insights``.
|
||||
|
||||
Attributes:
|
||||
|
||||
.. attribute:: average_income
|
||||
|
||||
The average income in US dollars associated with the requested IP
|
||||
address. This attribute is only available from the Insights end point.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: accuracy_radius
|
||||
|
||||
The radius in kilometers around the specified location where the IP
|
||||
address is likely to be.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: latitude
|
||||
|
||||
The approximate latitude of the location associated with the IP
|
||||
address. This value is not precise and should not be used to identify a
|
||||
particular address or household.
|
||||
|
||||
:type: float
|
||||
|
||||
.. attribute:: longitude
|
||||
|
||||
The approximate longitude of the location associated with the IP
|
||||
address. This value is not precise and should not be used to identify a
|
||||
particular address or household.
|
||||
|
||||
:type: float
|
||||
|
||||
.. attribute:: metro_code
|
||||
|
||||
The metro code of the location if the
|
||||
location is in the US. MaxMind returns the same metro codes as the
|
||||
`Google AdWords API
|
||||
<https://developers.google.com/adwords/api/docs/appendix/cities-DMAregions>`_.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: population_density
|
||||
|
||||
The estimated population per square kilometer associated with the IP
|
||||
address. This attribute is only available from the Insights end point.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: time_zone
|
||||
|
||||
The time zone associated with location, as specified by the `IANA Time
|
||||
Zone Database <http://www.iana.org/time-zones>`_, e.g.,
|
||||
"America/New_York".
|
||||
|
||||
:type: unicode
|
||||
|
||||
"""
|
||||
|
||||
_valid_attributes = set(['average_income', 'accuracy_radius', 'latitude',
|
||||
'longitude', 'metro_code', 'population_density',
|
||||
'postal_code', 'postal_confidence', 'time_zone'])
|
||||
|
||||
|
||||
class MaxMind(Record):
|
||||
"""Contains data related to your MaxMind account.
|
||||
|
||||
Attributes:
|
||||
|
||||
.. attribute:: queries_remaining
|
||||
|
||||
The number of remaining queries you have
|
||||
for the end point you are calling.
|
||||
|
||||
:type: int
|
||||
|
||||
"""
|
||||
|
||||
_valid_attributes = set(['queries_remaining'])
|
||||
|
||||
|
||||
class Postal(Record):
|
||||
"""Contains data for the postal record associated with an IP address.
|
||||
|
||||
This class contains the postal data associated with an IP address.
|
||||
|
||||
This attribute is returned by ``city``, ``enterprise``, and ``insights``.
|
||||
|
||||
Attributes:
|
||||
|
||||
.. attribute:: code
|
||||
|
||||
The postal code of the location. Postal
|
||||
codes are not available for all countries. In some countries, this will
|
||||
only contain part of the postal code.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: confidence
|
||||
|
||||
A value from 0-100 indicating
|
||||
MaxMind's confidence that the postal code is correct. This attribute is
|
||||
only available from the Insights end point and the GeoIP2 Enterprise
|
||||
database.
|
||||
|
||||
:type: int
|
||||
|
||||
"""
|
||||
|
||||
_valid_attributes = set(['code', 'confidence'])
|
||||
|
||||
|
||||
class Subdivision(PlaceRecord):
|
||||
"""Contains data for the subdivisions associated with an IP address.
|
||||
|
||||
This class contains the subdivision data associated with an IP address.
|
||||
|
||||
This attribute is returned by ``city``, ``enterprise``, and ``insights``.
|
||||
|
||||
Attributes:
|
||||
|
||||
.. attribute:: confidence
|
||||
|
||||
This is a value from 0-100 indicating MaxMind's
|
||||
confidence that the subdivision is correct. This attribute is only
|
||||
available from the Insights end point and the GeoIP2 Enterprise
|
||||
database.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: geoname_id
|
||||
|
||||
This is a GeoName ID for the subdivision.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: iso_code
|
||||
|
||||
This is a string up to three characters long
|
||||
contain the subdivision portion of the `ISO 3166-2 code
|
||||
<http://en.wikipedia.org/wiki/ISO_3166-2>`_.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: name
|
||||
|
||||
The name of the subdivision based on the locales list passed to the
|
||||
constructor.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: names
|
||||
|
||||
A dictionary where the keys are locale codes and the
|
||||
values are names
|
||||
|
||||
:type: dict
|
||||
|
||||
"""
|
||||
|
||||
_valid_attributes = set(['confidence', 'geoname_id', 'iso_code', 'names'])
|
||||
|
||||
|
||||
class Subdivisions(tuple):
|
||||
"""A tuple-like collection of subdivisions associated with an IP address.
|
||||
|
||||
This class contains the subdivisions of the country associated with the
|
||||
IP address from largest to smallest.
|
||||
|
||||
For instance, the response for Oxford in the United Kingdom would have
|
||||
England as the first element and Oxfordshire as the second element.
|
||||
|
||||
This attribute is returned by ``city``, ``enterprise``, and ``insights``.
|
||||
"""
|
||||
|
||||
def __new__(cls, locales, *subdivisions):
|
||||
subdivisions = [Subdivision(locales, **x) for x in subdivisions]
|
||||
obj = super(cls, Subdivisions).__new__(cls, subdivisions)
|
||||
return obj
|
||||
|
||||
def __init__(self, locales, *subdivisions): # pylint:disable=W0613
|
||||
self._locales = locales
|
||||
super(Subdivisions, self).__init__()
|
||||
|
||||
@property
|
||||
def most_specific(self):
|
||||
"""The most specific (smallest) subdivision available.
|
||||
|
||||
If there are no :py:class:`Subdivision` objects for the response,
|
||||
this returns an empty :py:class:`Subdivision`.
|
||||
|
||||
:type: :py:class:`Subdivision`
|
||||
"""
|
||||
try:
|
||||
return self[-1]
|
||||
except IndexError:
|
||||
return Subdivision(self._locales)
|
||||
|
||||
|
||||
class Traits(Record):
|
||||
"""Contains data for the traits record associated with an IP address.
|
||||
|
||||
This class contains the traits data associated with an IP address.
|
||||
|
||||
This class has the following attributes:
|
||||
|
||||
|
||||
.. attribute:: autonomous_system_number
|
||||
|
||||
The `autonomous system
|
||||
number <http://en.wikipedia.org/wiki/Autonomous_system_(Internet)>`_
|
||||
associated with the IP address. This attribute is only available from
|
||||
the City and Insights web service end points and the GeoIP2 Enterprise
|
||||
database.
|
||||
|
||||
:type: int
|
||||
|
||||
.. attribute:: autonomous_system_organization
|
||||
|
||||
The organization associated with the registered `autonomous system
|
||||
number <http://en.wikipedia.org/wiki/Autonomous_system_(Internet)>`_ for
|
||||
the IP address. This attribute is only available from the City and
|
||||
Insights web service end points and the GeoIP2 Enterprise database.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: connection_type
|
||||
|
||||
The connection type may take the following values:
|
||||
|
||||
- Dialup
|
||||
- Cable/DSL
|
||||
- Corporate
|
||||
- Cellular
|
||||
|
||||
Additional values may be added in the future.
|
||||
|
||||
This attribute is only available in the GeoIP2 Enterprise database.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: domain
|
||||
|
||||
The second level domain associated with the
|
||||
IP address. This will be something like "example.com" or
|
||||
"example.co.uk", not "foo.example.com". This attribute is only available
|
||||
from the City and Insights web service end points and the GeoIP2
|
||||
Enterprise database.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: ip_address
|
||||
|
||||
The IP address that the data in the model
|
||||
is for. If you performed a "me" lookup against the web service, this
|
||||
will be the externally routable IP address for the system the code is
|
||||
running on. If the system is behind a NAT, this may differ from the IP
|
||||
address locally assigned to it.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: is_anonymous_proxy
|
||||
|
||||
This is true if the IP is an anonymous
|
||||
proxy. See http://dev.maxmind.com/faq/geoip#anonproxy for further
|
||||
details.
|
||||
|
||||
:type: bool
|
||||
|
||||
.. deprecated:: 2.2.0
|
||||
Use our our `GeoIP2 Anonymous IP database
|
||||
<https://www.maxmind.com/en/geoip2-anonymous-ip-database GeoIP2>`_
|
||||
instead.
|
||||
|
||||
.. attribute:: is_legitimate_proxy
|
||||
|
||||
This attribute is true if MaxMind believes this IP address to be a
|
||||
legitimate proxy, such as an internal VPN used by a corporation. This
|
||||
attribute is only available in the GeoIP2 Enterprise database.
|
||||
|
||||
:type: bool
|
||||
|
||||
.. attribute:: is_satellite_provider
|
||||
|
||||
This is true if the IP address is from a satellite provider that
|
||||
provides service to multiple countries.
|
||||
|
||||
:type: bool
|
||||
|
||||
.. deprecated:: 2.2.0
|
||||
Due to the increased coverage by mobile carriers, very few
|
||||
satellite providers now serve multiple countries. As a result, the
|
||||
output does not provide sufficiently relevant data for us to maintain
|
||||
it.
|
||||
|
||||
.. attribute:: isp
|
||||
|
||||
The name of the ISP associated with the IP address. This attribute is
|
||||
only available from the City and Insights web service end points and the
|
||||
GeoIP2 Enterprise database.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: organization
|
||||
|
||||
The name of the organization associated with the IP address. This
|
||||
attribute is only available from the City and Insights web service end
|
||||
points and the GeoIP2 Enterprise database.
|
||||
|
||||
:type: unicode
|
||||
|
||||
.. attribute:: user_type
|
||||
|
||||
The user type associated with the IP
|
||||
address. This can be one of the following values:
|
||||
|
||||
* business
|
||||
* cafe
|
||||
* cellular
|
||||
* college
|
||||
* content_delivery_network
|
||||
* dialup
|
||||
* government
|
||||
* hosting
|
||||
* library
|
||||
* military
|
||||
* residential
|
||||
* router
|
||||
* school
|
||||
* search_engine_spider
|
||||
* traveler
|
||||
|
||||
This attribute is only available from the Insights end point and the
|
||||
GeoIP2 Enterprise database.
|
||||
|
||||
:type: unicode
|
||||
|
||||
"""
|
||||
|
||||
_valid_attributes = set(
|
||||
['autonomous_system_number', 'autonomous_system_organization',
|
||||
'connection_type', 'domain', 'is_anonymous_proxy',
|
||||
'is_legitimate_proxy', 'is_satellite_provider', 'isp', 'ip_address',
|
||||
'organization', 'user_type'])
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
for k in ['is_anonymous_proxy', 'is_legitimate_proxy',
|
||||
'is_satellite_provider']:
|
||||
kwargs[k] = bool(kwargs.get(k, False))
|
||||
super(Traits, self).__init__(**kwargs)
|
219
lib/geoip2/webservice.py
Normal file
219
lib/geoip2/webservice.py
Normal file
|
@ -0,0 +1,219 @@
|
|||
"""
|
||||
============================
|
||||
WebServices Client API
|
||||
============================
|
||||
|
||||
This class provides a client API for all the GeoIP2 Precision web service end
|
||||
points. The end points are Country, City, and Insights. Each end point returns
|
||||
a different set of data about an IP address, with Country returning the least
|
||||
data and Insights the most.
|
||||
|
||||
Each web service end point is represented by a different model class, and
|
||||
these model classes in turn contain multiple record classes. The record
|
||||
classes have attributes which contain data about the IP address.
|
||||
|
||||
If the web service does not return a particular piece of data for an IP
|
||||
address, the associated attribute is not populated.
|
||||
|
||||
The web service may not return any information for an entire record, in which
|
||||
case all of the attributes for that record class will be empty.
|
||||
|
||||
SSL
|
||||
---
|
||||
|
||||
Requests to the GeoIP2 Precision web service are always made with SSL.
|
||||
|
||||
"""
|
||||
|
||||
import requests
|
||||
|
||||
from requests.utils import default_user_agent
|
||||
|
||||
import geoip2
|
||||
import geoip2.models
|
||||
|
||||
from .compat import compat_ip_address
|
||||
|
||||
from .errors import (AddressNotFoundError, AuthenticationError, GeoIP2Error,
|
||||
HTTPError, InvalidRequestError, OutOfQueriesError,
|
||||
PermissionRequiredError)
|
||||
|
||||
|
||||
class Client(object):
|
||||
"""Creates a new client object.
|
||||
|
||||
It accepts the following required arguments:
|
||||
|
||||
:param user_id: Your MaxMind User ID.
|
||||
:param license_key: Your MaxMind license key.
|
||||
|
||||
Go to https://www.maxmind.com/en/my_license_key to see your MaxMind
|
||||
User ID and license key.
|
||||
|
||||
The following keyword arguments are also accepted:
|
||||
|
||||
:param host: The hostname to make a request against. This defaults to
|
||||
"geoip.maxmind.com". In most cases, you should not need to set this
|
||||
explicitly.
|
||||
:param locales: This is list of locale codes. This argument will be
|
||||
passed on to record classes to use when their name properties are
|
||||
called. The default value is ['en'].
|
||||
|
||||
The order of the locales is significant. When a record class has
|
||||
multiple names (country, city, etc.), its name property will return
|
||||
the name in the first locale that has one.
|
||||
|
||||
Note that the only locale which is always present in the GeoIP2
|
||||
data is "en". If you do not include this locale, the name property
|
||||
may end up returning None even when the record has an English name.
|
||||
|
||||
Currently, the valid locale codes are:
|
||||
|
||||
* de -- German
|
||||
* en -- English names may still include accented characters if that is
|
||||
the accepted spelling in English. In other words, English does not
|
||||
mean ASCII.
|
||||
* es -- Spanish
|
||||
* fr -- French
|
||||
* ja -- Japanese
|
||||
* pt-BR -- Brazilian Portuguese
|
||||
* ru -- Russian
|
||||
* zh-CN -- Simplified Chinese.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
user_id,
|
||||
license_key,
|
||||
host='geoip.maxmind.com',
|
||||
locales=None,
|
||||
timeout=None):
|
||||
"""Construct a Client."""
|
||||
# pylint: disable=too-many-arguments
|
||||
if locales is None:
|
||||
locales = ['en']
|
||||
self._locales = locales
|
||||
self._user_id = user_id
|
||||
self._license_key = license_key
|
||||
self._base_uri = 'https://%s/geoip/v2.1' % host
|
||||
self._timeout = timeout
|
||||
|
||||
def city(self, ip_address='me'):
|
||||
"""Call GeoIP2 Precision City endpoint with the specified IP.
|
||||
|
||||
:param ip_address: IPv4 or IPv6 address as a string. If no
|
||||
address is provided, the address that the web service is
|
||||
called from will be used.
|
||||
|
||||
:returns: :py:class:`geoip2.models.City` object
|
||||
|
||||
"""
|
||||
return self._response_for('city', geoip2.models.City, ip_address)
|
||||
|
||||
def country(self, ip_address='me'):
|
||||
"""Call the GeoIP2 Country endpoint with the specified IP.
|
||||
|
||||
:param ip_address: IPv4 or IPv6 address as a string. If no address
|
||||
is provided, the address that the web service is called from will
|
||||
be used.
|
||||
|
||||
:returns: :py:class:`geoip2.models.Country` object
|
||||
|
||||
"""
|
||||
return self._response_for('country', geoip2.models.Country, ip_address)
|
||||
|
||||
def insights(self, ip_address='me'):
|
||||
"""Call the GeoIP2 Precision: Insights endpoint with the specified IP.
|
||||
|
||||
:param ip_address: IPv4 or IPv6 address as a string. If no address
|
||||
is provided, the address that the web service is called from will
|
||||
be used.
|
||||
|
||||
:returns: :py:class:`geoip2.models.Insights` object
|
||||
|
||||
"""
|
||||
return self._response_for('insights', geoip2.models.Insights,
|
||||
ip_address)
|
||||
|
||||
def _response_for(self, path, model_class, ip_address):
|
||||
if ip_address != 'me':
|
||||
ip_address = str(compat_ip_address(ip_address))
|
||||
uri = '/'.join([self._base_uri, path, ip_address])
|
||||
response = requests.get(uri,
|
||||
auth=(self._user_id, self._license_key),
|
||||
headers={'Accept': 'application/json',
|
||||
'User-Agent': self._user_agent()},
|
||||
timeout=self._timeout)
|
||||
if response.status_code == 200:
|
||||
body = self._handle_success(response, uri)
|
||||
return model_class(body, locales=self._locales)
|
||||
else:
|
||||
self._handle_error(response, uri)
|
||||
|
||||
def _user_agent(self):
|
||||
return 'GeoIP2 Python Client v%s (%s)' % (geoip2.__version__,
|
||||
default_user_agent())
|
||||
|
||||
def _handle_success(self, response, uri):
|
||||
try:
|
||||
return response.json()
|
||||
except ValueError as ex:
|
||||
raise GeoIP2Error('Received a 200 response for %(uri)s'
|
||||
' but could not decode the response as '
|
||||
'JSON: ' % locals() + ', '.join(ex.args), 200,
|
||||
uri)
|
||||
|
||||
def _handle_error(self, response, uri):
|
||||
status = response.status_code
|
||||
|
||||
if 400 <= status < 500:
|
||||
self._handle_4xx_status(response, status, uri)
|
||||
elif 500 <= status < 600:
|
||||
self._handle_5xx_status(status, uri)
|
||||
else:
|
||||
self._handle_non_200_status(status, uri)
|
||||
|
||||
def _handle_4xx_status(self, response, status, uri):
|
||||
if not response.content:
|
||||
raise HTTPError('Received a %(status)i error for %(uri)s '
|
||||
'with no body.' % locals(), status, uri)
|
||||
elif response.headers['Content-Type'].find('json') == -1:
|
||||
raise HTTPError('Received a %i for %s with the following '
|
||||
'body: %s' % (status, uri, response.content),
|
||||
status, uri)
|
||||
try:
|
||||
body = response.json()
|
||||
except ValueError as ex:
|
||||
raise HTTPError(
|
||||
'Received a %(status)i error for %(uri)s but it did'
|
||||
' not include the expected JSON body: ' % locals() +
|
||||
', '.join(ex.args), status, uri)
|
||||
else:
|
||||
if 'code' in body and 'error' in body:
|
||||
self._handle_web_service_error(
|
||||
body.get('error'), body.get('code'), status, uri)
|
||||
else:
|
||||
raise HTTPError(
|
||||
'Response contains JSON but it does not specify '
|
||||
'code or error keys', status, uri)
|
||||
|
||||
def _handle_web_service_error(self, message, code, status, uri):
|
||||
if code in ('IP_ADDRESS_NOT_FOUND', 'IP_ADDRESS_RESERVED'):
|
||||
raise AddressNotFoundError(message)
|
||||
elif code in ('AUTHORIZATION_INVALID', 'LICENSE_KEY_REQUIRED',
|
||||
'USER_ID_REQUIRED', 'USER_ID_UNKNOWN'):
|
||||
raise AuthenticationError(message)
|
||||
elif code in ('INSUFFICIENT_FUNDS', 'OUT_OF_QUERIES'):
|
||||
raise OutOfQueriesError(message)
|
||||
elif code == 'PERMISSION_REQUIRED':
|
||||
raise PermissionRequiredError(message)
|
||||
|
||||
raise InvalidRequestError(message, code, status, uri)
|
||||
|
||||
def _handle_5xx_status(self, status, uri):
|
||||
raise HTTPError('Received a server error (%(status)i) for '
|
||||
'%(uri)s' % locals(), status, uri)
|
||||
|
||||
def _handle_non_200_status(self, status, uri):
|
||||
raise HTTPError('Received a very surprising HTTP status '
|
||||
'(%(status)i) for %(uri)s' % locals(), status, uri)
|
2417
lib/ipaddress.py
Normal file
2417
lib/ipaddress.py
Normal file
File diff suppressed because it is too large
Load diff
46
lib/maxminddb/__init__.py
Normal file
46
lib/maxminddb/__init__.py
Normal file
|
@ -0,0 +1,46 @@
|
|||
# pylint:disable=C0111
|
||||
import os
|
||||
|
||||
import maxminddb.reader
|
||||
|
||||
try:
|
||||
import maxminddb.extension
|
||||
except ImportError:
|
||||
maxminddb.extension = None
|
||||
|
||||
from maxminddb.const import (MODE_AUTO, MODE_MMAP, MODE_MMAP_EXT, MODE_FILE,
|
||||
MODE_MEMORY)
|
||||
from maxminddb.decoder import InvalidDatabaseError
|
||||
|
||||
|
||||
def open_database(database, mode=MODE_AUTO):
|
||||
"""Open a Maxmind DB database
|
||||
|
||||
Arguments:
|
||||
database -- A path to a valid MaxMind DB file such as a GeoIP2
|
||||
database file.
|
||||
mode -- mode to open the database with. Valid mode are:
|
||||
* MODE_MMAP_EXT - use the C extension with memory map.
|
||||
* MODE_MMAP - read from memory map. Pure Python.
|
||||
* MODE_FILE - read database as standard file. Pure Python.
|
||||
* MODE_MEMORY - load database into memory. Pure Python.
|
||||
* MODE_AUTO - tries MODE_MMAP_EXT, MODE_MMAP, MODE_FILE in that
|
||||
order. Default mode.
|
||||
"""
|
||||
if (mode == MODE_AUTO and maxminddb.extension and
|
||||
hasattr(maxminddb.extension, 'Reader')) or mode == MODE_MMAP_EXT:
|
||||
return maxminddb.extension.Reader(database)
|
||||
elif mode in (MODE_AUTO, MODE_MMAP, MODE_FILE, MODE_MEMORY):
|
||||
return maxminddb.reader.Reader(database, mode)
|
||||
raise ValueError('Unsupported open mode: {0}'.format(mode))
|
||||
|
||||
|
||||
def Reader(database): # pylint: disable=invalid-name
|
||||
"""This exists for backwards compatibility. Use open_database instead"""
|
||||
return open_database(database)
|
||||
|
||||
__title__ = 'maxminddb'
|
||||
__version__ = '1.2.1'
|
||||
__author__ = 'Gregory Oschwald'
|
||||
__license__ = 'Apache License, Version 2.0'
|
||||
__copyright__ = 'Copyright 2014 Maxmind, Inc.'
|
33
lib/maxminddb/compat.py
Normal file
33
lib/maxminddb/compat.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
import sys
|
||||
|
||||
import ipaddress
|
||||
|
||||
# pylint: skip-file
|
||||
|
||||
if sys.version_info[0] == 2:
|
||||
def compat_ip_address(address):
|
||||
if isinstance(address, bytes):
|
||||
address = address.decode()
|
||||
return ipaddress.ip_address(address)
|
||||
|
||||
int_from_byte = ord
|
||||
|
||||
FileNotFoundError = IOError
|
||||
|
||||
def int_from_bytes(b):
|
||||
if b:
|
||||
return int(b.encode("hex"), 16)
|
||||
return 0
|
||||
|
||||
byte_from_int = chr
|
||||
else:
|
||||
def compat_ip_address(address):
|
||||
return ipaddress.ip_address(address)
|
||||
|
||||
int_from_byte = lambda x: x
|
||||
|
||||
FileNotFoundError = FileNotFoundError
|
||||
|
||||
int_from_bytes = lambda x: int.from_bytes(x, 'big')
|
||||
|
||||
byte_from_int = lambda x: bytes([x])
|
7
lib/maxminddb/const.py
Normal file
7
lib/maxminddb/const.py
Normal file
|
@ -0,0 +1,7 @@
|
|||
"""Constants used in the API"""
|
||||
|
||||
MODE_AUTO = 0
|
||||
MODE_MMAP_EXT = 1
|
||||
MODE_MMAP = 2
|
||||
MODE_FILE = 4
|
||||
MODE_MEMORY = 8
|
173
lib/maxminddb/decoder.py
Normal file
173
lib/maxminddb/decoder.py
Normal file
|
@ -0,0 +1,173 @@
|
|||
"""
|
||||
maxminddb.decoder
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
This package contains code for decoding the MaxMind DB data section.
|
||||
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import struct
|
||||
|
||||
from maxminddb.compat import byte_from_int, int_from_bytes
|
||||
from maxminddb.errors import InvalidDatabaseError
|
||||
|
||||
|
||||
class Decoder(object): # pylint: disable=too-few-public-methods
|
||||
|
||||
"""Decoder for the data section of the MaxMind DB"""
|
||||
|
||||
def __init__(self, database_buffer, pointer_base=0, pointer_test=False):
|
||||
"""Created a Decoder for a MaxMind DB
|
||||
|
||||
Arguments:
|
||||
database_buffer -- an mmap'd MaxMind DB file.
|
||||
pointer_base -- the base number to use when decoding a pointer
|
||||
pointer_test -- used for internal unit testing of pointer code
|
||||
"""
|
||||
self._pointer_test = pointer_test
|
||||
self._buffer = database_buffer
|
||||
self._pointer_base = pointer_base
|
||||
|
||||
def _decode_array(self, size, offset):
|
||||
array = []
|
||||
for _ in range(size):
|
||||
(value, offset) = self.decode(offset)
|
||||
array.append(value)
|
||||
return array, offset
|
||||
|
||||
def _decode_boolean(self, size, offset):
|
||||
return size != 0, offset
|
||||
|
||||
def _decode_bytes(self, size, offset):
|
||||
new_offset = offset + size
|
||||
return self._buffer[offset:new_offset], new_offset
|
||||
|
||||
# pylint: disable=no-self-argument
|
||||
# |-> I am open to better ways of doing this as long as it doesn't involve
|
||||
# lots of code duplication.
|
||||
def _decode_packed_type(type_code, type_size, pad=False):
|
||||
# pylint: disable=protected-access, missing-docstring
|
||||
def unpack_type(self, size, offset):
|
||||
if not pad:
|
||||
self._verify_size(size, type_size)
|
||||
new_offset = offset + type_size
|
||||
packed_bytes = self._buffer[offset:new_offset]
|
||||
if pad:
|
||||
packed_bytes = packed_bytes.rjust(type_size, b'\x00')
|
||||
(value,) = struct.unpack(type_code, packed_bytes)
|
||||
return value, new_offset
|
||||
return unpack_type
|
||||
|
||||
def _decode_map(self, size, offset):
|
||||
container = {}
|
||||
for _ in range(size):
|
||||
(key, offset) = self.decode(offset)
|
||||
(value, offset) = self.decode(offset)
|
||||
container[key] = value
|
||||
return container, offset
|
||||
|
||||
_pointer_value_offset = {
|
||||
1: 0,
|
||||
2: 2048,
|
||||
3: 526336,
|
||||
4: 0,
|
||||
}
|
||||
|
||||
def _decode_pointer(self, size, offset):
|
||||
pointer_size = ((size >> 3) & 0x3) + 1
|
||||
new_offset = offset + pointer_size
|
||||
pointer_bytes = self._buffer[offset:new_offset]
|
||||
packed = pointer_bytes if pointer_size == 4 else struct.pack(
|
||||
b'!c', byte_from_int(size & 0x7)) + pointer_bytes
|
||||
unpacked = int_from_bytes(packed)
|
||||
pointer = unpacked + self._pointer_base + \
|
||||
self._pointer_value_offset[pointer_size]
|
||||
if self._pointer_test:
|
||||
return pointer, new_offset
|
||||
(value, _) = self.decode(pointer)
|
||||
return value, new_offset
|
||||
|
||||
def _decode_uint(self, size, offset):
|
||||
new_offset = offset + size
|
||||
uint_bytes = self._buffer[offset:new_offset]
|
||||
return int_from_bytes(uint_bytes), new_offset
|
||||
|
||||
def _decode_utf8_string(self, size, offset):
|
||||
new_offset = offset + size
|
||||
return self._buffer[offset:new_offset].decode('utf-8'), new_offset
|
||||
|
||||
_type_decoder = {
|
||||
1: _decode_pointer,
|
||||
2: _decode_utf8_string,
|
||||
3: _decode_packed_type(b'!d', 8), # double,
|
||||
4: _decode_bytes,
|
||||
5: _decode_uint, # uint16
|
||||
6: _decode_uint, # uint32
|
||||
7: _decode_map,
|
||||
8: _decode_packed_type(b'!i', 4, pad=True), # int32
|
||||
9: _decode_uint, # uint64
|
||||
10: _decode_uint, # uint128
|
||||
11: _decode_array,
|
||||
14: _decode_boolean,
|
||||
15: _decode_packed_type(b'!f', 4), # float,
|
||||
}
|
||||
|
||||
def decode(self, offset):
|
||||
"""Decode a section of the data section starting at offset
|
||||
|
||||
Arguments:
|
||||
offset -- the location of the data structure to decode
|
||||
"""
|
||||
new_offset = offset + 1
|
||||
(ctrl_byte,) = struct.unpack(b'!B', self._buffer[offset:new_offset])
|
||||
type_num = ctrl_byte >> 5
|
||||
# Extended type
|
||||
if not type_num:
|
||||
(type_num, new_offset) = self._read_extended(new_offset)
|
||||
|
||||
if type_num not in self._type_decoder:
|
||||
raise InvalidDatabaseError('Unexpected type number ({type}) '
|
||||
'encountered'.format(type=type_num))
|
||||
|
||||
(size, new_offset) = self._size_from_ctrl_byte(
|
||||
ctrl_byte, new_offset, type_num)
|
||||
return self._type_decoder[type_num](self, size, new_offset)
|
||||
|
||||
def _read_extended(self, offset):
|
||||
(next_byte,) = struct.unpack(b'!B', self._buffer[offset:offset + 1])
|
||||
type_num = next_byte + 7
|
||||
if type_num < 7:
|
||||
raise InvalidDatabaseError(
|
||||
'Something went horribly wrong in the decoder. An '
|
||||
'extended type resolved to a type number < 8 '
|
||||
'({type})'.format(type=type_num))
|
||||
return type_num, offset + 1
|
||||
|
||||
def _verify_size(self, expected, actual):
|
||||
if expected != actual:
|
||||
raise InvalidDatabaseError(
|
||||
'The MaxMind DB file\'s data section contains bad data '
|
||||
'(unknown data type or corrupt data)'
|
||||
)
|
||||
|
||||
def _size_from_ctrl_byte(self, ctrl_byte, offset, type_num):
|
||||
size = ctrl_byte & 0x1f
|
||||
if type_num == 1:
|
||||
return size, offset
|
||||
bytes_to_read = 0 if size < 29 else size - 28
|
||||
|
||||
new_offset = offset + bytes_to_read
|
||||
size_bytes = self._buffer[offset:new_offset]
|
||||
|
||||
# Using unpack rather than int_from_bytes as it is about 200 lookups
|
||||
# per second faster here.
|
||||
if size == 29:
|
||||
size = 29 + struct.unpack(b'!B', size_bytes)[0]
|
||||
elif size == 30:
|
||||
size = 285 + struct.unpack(b'!H', size_bytes)[0]
|
||||
elif size > 30:
|
||||
size = struct.unpack(
|
||||
b'!I', size_bytes.rjust(4, b'\x00'))[0] + 65821
|
||||
|
||||
return size, new_offset
|
11
lib/maxminddb/errors.py
Normal file
11
lib/maxminddb/errors.py
Normal file
|
@ -0,0 +1,11 @@
|
|||
"""
|
||||
maxminddb.errors
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
This module contains custom errors for the MaxMind DB reader
|
||||
"""
|
||||
|
||||
|
||||
class InvalidDatabaseError(RuntimeError):
|
||||
|
||||
"""This error is thrown when unexpected data is found in the database."""
|
570
lib/maxminddb/extension/maxminddb.c
Normal file
570
lib/maxminddb/extension/maxminddb.c
Normal file
|
@ -0,0 +1,570 @@
|
|||
#include <Python.h>
|
||||
#include <maxminddb.h>
|
||||
#include "structmember.h"
|
||||
|
||||
#define __STDC_FORMAT_MACROS
|
||||
#include <inttypes.h>
|
||||
|
||||
static PyTypeObject Reader_Type;
|
||||
static PyTypeObject Metadata_Type;
|
||||
static PyObject *MaxMindDB_error;
|
||||
|
||||
typedef struct {
|
||||
PyObject_HEAD /* no semicolon */
|
||||
MMDB_s *mmdb;
|
||||
} Reader_obj;
|
||||
|
||||
typedef struct {
|
||||
PyObject_HEAD /* no semicolon */
|
||||
PyObject *binary_format_major_version;
|
||||
PyObject *binary_format_minor_version;
|
||||
PyObject *build_epoch;
|
||||
PyObject *database_type;
|
||||
PyObject *description;
|
||||
PyObject *ip_version;
|
||||
PyObject *languages;
|
||||
PyObject *node_count;
|
||||
PyObject *record_size;
|
||||
} Metadata_obj;
|
||||
|
||||
static PyObject *from_entry_data_list(MMDB_entry_data_list_s **entry_data_list);
|
||||
static PyObject *from_map(MMDB_entry_data_list_s **entry_data_list);
|
||||
static PyObject *from_array(MMDB_entry_data_list_s **entry_data_list);
|
||||
static PyObject *from_uint128(const MMDB_entry_data_list_s *entry_data_list);
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
#define MOD_INIT(name) PyMODINIT_FUNC PyInit_ ## name(void)
|
||||
#define RETURN_MOD_INIT(m) return (m)
|
||||
#define FILE_NOT_FOUND_ERROR PyExc_FileNotFoundError
|
||||
#else
|
||||
#define MOD_INIT(name) PyMODINIT_FUNC init ## name(void)
|
||||
#define RETURN_MOD_INIT(m) return
|
||||
#define PyInt_FromLong PyLong_FromLong
|
||||
#define FILE_NOT_FOUND_ERROR PyExc_IOError
|
||||
#endif
|
||||
|
||||
#ifdef __GNUC__
|
||||
# define UNUSED(x) UNUSED_ ## x __attribute__((__unused__))
|
||||
#else
|
||||
# define UNUSED(x) UNUSED_ ## x
|
||||
#endif
|
||||
|
||||
static int Reader_init(PyObject *self, PyObject *args, PyObject *kwds)
|
||||
{
|
||||
char *filename;
|
||||
int mode = 0;
|
||||
|
||||
static char *kwlist[] = {"database", "mode", NULL};
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|i", kwlist, &filename, &mode)) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (mode != 0 && mode != 1) {
|
||||
PyErr_Format(PyExc_ValueError, "Unsupported open mode (%i). Only "
|
||||
"MODE_AUTO and MODE_MMAP_EXT are supported by this extension.",
|
||||
mode);
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (0 != access(filename, R_OK)) {
|
||||
PyErr_Format(FILE_NOT_FOUND_ERROR,
|
||||
"No such file or directory: '%s'",
|
||||
filename);
|
||||
return -1;
|
||||
}
|
||||
|
||||
MMDB_s *mmdb = (MMDB_s *)malloc(sizeof(MMDB_s));
|
||||
if (NULL == mmdb) {
|
||||
PyErr_NoMemory();
|
||||
return -1;
|
||||
}
|
||||
|
||||
Reader_obj *mmdb_obj = (Reader_obj *)self;
|
||||
if (!mmdb_obj) {
|
||||
free(mmdb);
|
||||
PyErr_NoMemory();
|
||||
return -1;
|
||||
}
|
||||
|
||||
uint16_t status = MMDB_open(filename, MMDB_MODE_MMAP, mmdb);
|
||||
|
||||
if (MMDB_SUCCESS != status) {
|
||||
free(mmdb);
|
||||
PyErr_Format(
|
||||
MaxMindDB_error,
|
||||
"Error opening database file (%s). Is this a valid MaxMind DB file?",
|
||||
filename
|
||||
);
|
||||
return -1;
|
||||
}
|
||||
|
||||
mmdb_obj->mmdb = mmdb;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static PyObject *Reader_get(PyObject *self, PyObject *args)
|
||||
{
|
||||
char *ip_address = NULL;
|
||||
|
||||
Reader_obj *mmdb_obj = (Reader_obj *)self;
|
||||
if (!PyArg_ParseTuple(args, "s", &ip_address)) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
MMDB_s *mmdb = mmdb_obj->mmdb;
|
||||
|
||||
if (NULL == mmdb) {
|
||||
PyErr_SetString(PyExc_ValueError,
|
||||
"Attempt to read from a closed MaxMind DB.");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
int gai_error = 0;
|
||||
int mmdb_error = MMDB_SUCCESS;
|
||||
MMDB_lookup_result_s result =
|
||||
MMDB_lookup_string(mmdb, ip_address, &gai_error,
|
||||
&mmdb_error);
|
||||
|
||||
if (0 != gai_error) {
|
||||
PyErr_Format(PyExc_ValueError,
|
||||
"'%s' does not appear to be an IPv4 or IPv6 address.",
|
||||
ip_address);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (MMDB_SUCCESS != mmdb_error) {
|
||||
PyObject *exception;
|
||||
if (MMDB_IPV6_LOOKUP_IN_IPV4_DATABASE_ERROR == mmdb_error) {
|
||||
exception = PyExc_ValueError;
|
||||
} else {
|
||||
exception = MaxMindDB_error;
|
||||
}
|
||||
PyErr_Format(exception, "Error looking up %s. %s",
|
||||
ip_address, MMDB_strerror(mmdb_error));
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (!result.found_entry) {
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
MMDB_entry_data_list_s *entry_data_list = NULL;
|
||||
int status = MMDB_get_entry_data_list(&result.entry, &entry_data_list);
|
||||
if (MMDB_SUCCESS != status) {
|
||||
PyErr_Format(MaxMindDB_error,
|
||||
"Error while looking up data for %s. %s",
|
||||
ip_address, MMDB_strerror(status));
|
||||
MMDB_free_entry_data_list(entry_data_list);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
MMDB_entry_data_list_s *original_entry_data_list = entry_data_list;
|
||||
PyObject *py_obj = from_entry_data_list(&entry_data_list);
|
||||
MMDB_free_entry_data_list(original_entry_data_list);
|
||||
return py_obj;
|
||||
}
|
||||
|
||||
static PyObject *Reader_metadata(PyObject *self, PyObject *UNUSED(args))
|
||||
{
|
||||
Reader_obj *mmdb_obj = (Reader_obj *)self;
|
||||
|
||||
if (NULL == mmdb_obj->mmdb) {
|
||||
PyErr_SetString(PyExc_IOError,
|
||||
"Attempt to read from a closed MaxMind DB.");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
MMDB_entry_data_list_s *entry_data_list;
|
||||
MMDB_get_metadata_as_entry_data_list(mmdb_obj->mmdb, &entry_data_list);
|
||||
MMDB_entry_data_list_s *original_entry_data_list = entry_data_list;
|
||||
|
||||
PyObject *metadata_dict = from_entry_data_list(&entry_data_list);
|
||||
MMDB_free_entry_data_list(original_entry_data_list);
|
||||
if (NULL == metadata_dict || !PyDict_Check(metadata_dict)) {
|
||||
PyErr_SetString(MaxMindDB_error,
|
||||
"Error decoding metadata.");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
PyObject *args = PyTuple_New(0);
|
||||
if (NULL == args) {
|
||||
Py_DECREF(metadata_dict);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
PyObject *metadata = PyObject_Call((PyObject *)&Metadata_Type, args,
|
||||
metadata_dict);
|
||||
|
||||
Py_DECREF(metadata_dict);
|
||||
return metadata;
|
||||
}
|
||||
|
||||
static PyObject *Reader_close(PyObject *self, PyObject *UNUSED(args))
|
||||
{
|
||||
Reader_obj *mmdb_obj = (Reader_obj *)self;
|
||||
|
||||
if (NULL != mmdb_obj->mmdb) {
|
||||
MMDB_close(mmdb_obj->mmdb);
|
||||
free(mmdb_obj->mmdb);
|
||||
mmdb_obj->mmdb = NULL;
|
||||
}
|
||||
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
static void Reader_dealloc(PyObject *self)
|
||||
{
|
||||
Reader_obj *obj = (Reader_obj *)self;
|
||||
if (NULL != obj->mmdb) {
|
||||
Reader_close(self, NULL);
|
||||
}
|
||||
|
||||
PyObject_Del(self);
|
||||
}
|
||||
|
||||
static int Metadata_init(PyObject *self, PyObject *args, PyObject *kwds)
|
||||
{
|
||||
|
||||
PyObject
|
||||
*binary_format_major_version,
|
||||
*binary_format_minor_version,
|
||||
*build_epoch,
|
||||
*database_type,
|
||||
*description,
|
||||
*ip_version,
|
||||
*languages,
|
||||
*node_count,
|
||||
*record_size;
|
||||
|
||||
static char *kwlist[] = {
|
||||
"binary_format_major_version",
|
||||
"binary_format_minor_version",
|
||||
"build_epoch",
|
||||
"database_type",
|
||||
"description",
|
||||
"ip_version",
|
||||
"languages",
|
||||
"node_count",
|
||||
"record_size",
|
||||
NULL
|
||||
};
|
||||
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOOOOOOOO", kwlist,
|
||||
&binary_format_major_version,
|
||||
&binary_format_minor_version,
|
||||
&build_epoch,
|
||||
&database_type,
|
||||
&description,
|
||||
&ip_version,
|
||||
&languages,
|
||||
&node_count,
|
||||
&record_size)) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
Metadata_obj *obj = (Metadata_obj *)self;
|
||||
|
||||
obj->binary_format_major_version = binary_format_major_version;
|
||||
obj->binary_format_minor_version = binary_format_minor_version;
|
||||
obj->build_epoch = build_epoch;
|
||||
obj->database_type = database_type;
|
||||
obj->description = description;
|
||||
obj->ip_version = ip_version;
|
||||
obj->languages = languages;
|
||||
obj->node_count = node_count;
|
||||
obj->record_size = record_size;
|
||||
|
||||
Py_INCREF(obj->binary_format_major_version);
|
||||
Py_INCREF(obj->binary_format_minor_version);
|
||||
Py_INCREF(obj->build_epoch);
|
||||
Py_INCREF(obj->database_type);
|
||||
Py_INCREF(obj->description);
|
||||
Py_INCREF(obj->ip_version);
|
||||
Py_INCREF(obj->languages);
|
||||
Py_INCREF(obj->node_count);
|
||||
Py_INCREF(obj->record_size);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void Metadata_dealloc(PyObject *self)
|
||||
{
|
||||
Metadata_obj *obj = (Metadata_obj *)self;
|
||||
Py_DECREF(obj->binary_format_major_version);
|
||||
Py_DECREF(obj->binary_format_minor_version);
|
||||
Py_DECREF(obj->build_epoch);
|
||||
Py_DECREF(obj->database_type);
|
||||
Py_DECREF(obj->description);
|
||||
Py_DECREF(obj->ip_version);
|
||||
Py_DECREF(obj->languages);
|
||||
Py_DECREF(obj->node_count);
|
||||
Py_DECREF(obj->record_size);
|
||||
PyObject_Del(self);
|
||||
}
|
||||
|
||||
static PyObject *from_entry_data_list(MMDB_entry_data_list_s **entry_data_list)
|
||||
{
|
||||
if (NULL == entry_data_list || NULL == *entry_data_list) {
|
||||
PyErr_SetString(
|
||||
MaxMindDB_error,
|
||||
"Error while looking up data. Your database may be corrupt or you have found a bug in libmaxminddb."
|
||||
);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
switch ((*entry_data_list)->entry_data.type) {
|
||||
case MMDB_DATA_TYPE_MAP:
|
||||
return from_map(entry_data_list);
|
||||
case MMDB_DATA_TYPE_ARRAY:
|
||||
return from_array(entry_data_list);
|
||||
case MMDB_DATA_TYPE_UTF8_STRING:
|
||||
return PyUnicode_FromStringAndSize(
|
||||
(*entry_data_list)->entry_data.utf8_string,
|
||||
(*entry_data_list)->entry_data.data_size
|
||||
);
|
||||
case MMDB_DATA_TYPE_BYTES:
|
||||
return PyByteArray_FromStringAndSize(
|
||||
(const char *)(*entry_data_list)->entry_data.bytes,
|
||||
(Py_ssize_t)(*entry_data_list)->entry_data.data_size);
|
||||
case MMDB_DATA_TYPE_DOUBLE:
|
||||
return PyFloat_FromDouble((*entry_data_list)->entry_data.double_value);
|
||||
case MMDB_DATA_TYPE_FLOAT:
|
||||
return PyFloat_FromDouble((*entry_data_list)->entry_data.float_value);
|
||||
case MMDB_DATA_TYPE_UINT16:
|
||||
return PyLong_FromLong( (*entry_data_list)->entry_data.uint16);
|
||||
case MMDB_DATA_TYPE_UINT32:
|
||||
return PyLong_FromLong((*entry_data_list)->entry_data.uint32);
|
||||
case MMDB_DATA_TYPE_BOOLEAN:
|
||||
return PyBool_FromLong((*entry_data_list)->entry_data.boolean);
|
||||
case MMDB_DATA_TYPE_UINT64:
|
||||
return PyLong_FromUnsignedLongLong(
|
||||
(*entry_data_list)->entry_data.uint64);
|
||||
case MMDB_DATA_TYPE_UINT128:
|
||||
return from_uint128(*entry_data_list);
|
||||
case MMDB_DATA_TYPE_INT32:
|
||||
return PyLong_FromLong((*entry_data_list)->entry_data.int32);
|
||||
default:
|
||||
PyErr_Format(MaxMindDB_error,
|
||||
"Invalid data type arguments: %d",
|
||||
(*entry_data_list)->entry_data.type);
|
||||
return NULL;
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static PyObject *from_map(MMDB_entry_data_list_s **entry_data_list)
|
||||
{
|
||||
PyObject *py_obj = PyDict_New();
|
||||
if (NULL == py_obj) {
|
||||
PyErr_NoMemory();
|
||||
return NULL;
|
||||
}
|
||||
|
||||
const uint32_t map_size = (*entry_data_list)->entry_data.data_size;
|
||||
|
||||
uint i;
|
||||
// entry_data_list cannot start out NULL (see from_entry_data_list). We
|
||||
// check it in the loop because it may become NULL.
|
||||
// coverity[check_after_deref]
|
||||
for (i = 0; i < map_size && entry_data_list; i++) {
|
||||
*entry_data_list = (*entry_data_list)->next;
|
||||
|
||||
PyObject *key = PyUnicode_FromStringAndSize(
|
||||
(char *)(*entry_data_list)->entry_data.utf8_string,
|
||||
(*entry_data_list)->entry_data.data_size
|
||||
);
|
||||
|
||||
*entry_data_list = (*entry_data_list)->next;
|
||||
|
||||
PyObject *value = from_entry_data_list(entry_data_list);
|
||||
if (NULL == value) {
|
||||
Py_DECREF(key);
|
||||
Py_DECREF(py_obj);
|
||||
return NULL;
|
||||
}
|
||||
PyDict_SetItem(py_obj, key, value);
|
||||
Py_DECREF(value);
|
||||
Py_DECREF(key);
|
||||
}
|
||||
|
||||
return py_obj;
|
||||
}
|
||||
|
||||
static PyObject *from_array(MMDB_entry_data_list_s **entry_data_list)
|
||||
{
|
||||
const uint32_t size = (*entry_data_list)->entry_data.data_size;
|
||||
|
||||
PyObject *py_obj = PyList_New(size);
|
||||
if (NULL == py_obj) {
|
||||
PyErr_NoMemory();
|
||||
return NULL;
|
||||
}
|
||||
|
||||
uint i;
|
||||
// entry_data_list cannot start out NULL (see from_entry_data_list). We
|
||||
// check it in the loop because it may become NULL.
|
||||
// coverity[check_after_deref]
|
||||
for (i = 0; i < size && entry_data_list; i++) {
|
||||
*entry_data_list = (*entry_data_list)->next;
|
||||
PyObject *value = from_entry_data_list(entry_data_list);
|
||||
if (NULL == value) {
|
||||
Py_DECREF(py_obj);
|
||||
return NULL;
|
||||
}
|
||||
// PyList_SetItem 'steals' the reference
|
||||
PyList_SetItem(py_obj, i, value);
|
||||
}
|
||||
return py_obj;
|
||||
}
|
||||
|
||||
static PyObject *from_uint128(const MMDB_entry_data_list_s *entry_data_list)
|
||||
{
|
||||
uint64_t high = 0;
|
||||
uint64_t low = 0;
|
||||
#if MMDB_UINT128_IS_BYTE_ARRAY
|
||||
int i;
|
||||
for (i = 0; i < 8; i++) {
|
||||
high = (high << 8) | entry_data_list->entry_data.uint128[i];
|
||||
}
|
||||
|
||||
for (i = 8; i < 16; i++) {
|
||||
low = (low << 8) | entry_data_list->entry_data.uint128[i];
|
||||
}
|
||||
#else
|
||||
high = entry_data_list->entry_data.uint128 >> 64;
|
||||
low = (uint64_t)entry_data_list->entry_data.uint128;
|
||||
#endif
|
||||
|
||||
char *num_str = malloc(33);
|
||||
if (NULL == num_str) {
|
||||
PyErr_NoMemory();
|
||||
return NULL;
|
||||
}
|
||||
|
||||
snprintf(num_str, 33, "%016" PRIX64 "%016" PRIX64, high, low);
|
||||
|
||||
PyObject *py_obj = PyLong_FromString(num_str, NULL, 16);
|
||||
|
||||
free(num_str);
|
||||
return py_obj;
|
||||
}
|
||||
|
||||
static PyMethodDef Reader_methods[] = {
|
||||
{ "get", Reader_get, METH_VARARGS,
|
||||
"Get record for IP address" },
|
||||
{ "metadata", Reader_metadata, METH_NOARGS,
|
||||
"Returns metadata object for database" },
|
||||
{ "close", Reader_close, METH_NOARGS, "Closes database"},
|
||||
{ NULL, NULL, 0, NULL }
|
||||
};
|
||||
|
||||
static PyTypeObject Reader_Type = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
.tp_basicsize = sizeof(Reader_obj),
|
||||
.tp_dealloc = Reader_dealloc,
|
||||
.tp_doc = "Reader object",
|
||||
.tp_flags = Py_TPFLAGS_DEFAULT,
|
||||
.tp_methods = Reader_methods,
|
||||
.tp_name = "Reader",
|
||||
.tp_init = Reader_init,
|
||||
};
|
||||
|
||||
static PyMethodDef Metadata_methods[] = {
|
||||
{ NULL, NULL, 0, NULL }
|
||||
};
|
||||
|
||||
/* *INDENT-OFF* */
|
||||
static PyMemberDef Metadata_members[] = {
|
||||
{ "binary_format_major_version", T_OBJECT, offsetof(
|
||||
Metadata_obj, binary_format_major_version), READONLY, NULL },
|
||||
{ "binary_format_minor_version", T_OBJECT, offsetof(
|
||||
Metadata_obj, binary_format_minor_version), READONLY, NULL },
|
||||
{ "build_epoch", T_OBJECT, offsetof(Metadata_obj, build_epoch),
|
||||
READONLY, NULL },
|
||||
{ "database_type", T_OBJECT, offsetof(Metadata_obj, database_type),
|
||||
READONLY, NULL },
|
||||
{ "description", T_OBJECT, offsetof(Metadata_obj, description),
|
||||
READONLY, NULL },
|
||||
{ "ip_version", T_OBJECT, offsetof(Metadata_obj, ip_version),
|
||||
READONLY, NULL },
|
||||
{ "languages", T_OBJECT, offsetof(Metadata_obj, languages), READONLY,
|
||||
NULL },
|
||||
{ "node_count", T_OBJECT, offsetof(Metadata_obj, node_count),
|
||||
READONLY, NULL },
|
||||
{ "record_size", T_OBJECT, offsetof(Metadata_obj, record_size),
|
||||
READONLY, NULL },
|
||||
{ NULL, 0, 0, 0, NULL }
|
||||
};
|
||||
/* *INDENT-ON* */
|
||||
|
||||
static PyTypeObject Metadata_Type = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
.tp_basicsize = sizeof(Metadata_obj),
|
||||
.tp_dealloc = Metadata_dealloc,
|
||||
.tp_doc = "Metadata object",
|
||||
.tp_flags = Py_TPFLAGS_DEFAULT,
|
||||
.tp_members = Metadata_members,
|
||||
.tp_methods = Metadata_methods,
|
||||
.tp_name = "Metadata",
|
||||
.tp_init = Metadata_init
|
||||
};
|
||||
|
||||
static PyMethodDef MaxMindDB_methods[] = {
|
||||
{ NULL, NULL, 0, NULL }
|
||||
};
|
||||
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
static struct PyModuleDef MaxMindDB_module = {
|
||||
PyModuleDef_HEAD_INIT,
|
||||
.m_name = "extension",
|
||||
.m_doc = "This is a C extension to read MaxMind DB file format",
|
||||
.m_methods = MaxMindDB_methods,
|
||||
};
|
||||
#endif
|
||||
|
||||
MOD_INIT(extension){
|
||||
PyObject *m;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
m = PyModule_Create(&MaxMindDB_module);
|
||||
#else
|
||||
m = Py_InitModule("extension", MaxMindDB_methods);
|
||||
#endif
|
||||
|
||||
if (!m) {
|
||||
RETURN_MOD_INIT(NULL);
|
||||
}
|
||||
|
||||
Reader_Type.tp_new = PyType_GenericNew;
|
||||
if (PyType_Ready(&Reader_Type)) {
|
||||
RETURN_MOD_INIT(NULL);
|
||||
}
|
||||
Py_INCREF(&Reader_Type);
|
||||
PyModule_AddObject(m, "Reader", (PyObject *)&Reader_Type);
|
||||
|
||||
Metadata_Type.tp_new = PyType_GenericNew;
|
||||
if (PyType_Ready(&Metadata_Type)) {
|
||||
RETURN_MOD_INIT(NULL);
|
||||
}
|
||||
PyModule_AddObject(m, "extension", (PyObject *)&Metadata_Type);
|
||||
|
||||
PyObject* error_mod = PyImport_ImportModule("maxminddb.errors");
|
||||
if (error_mod == NULL) {
|
||||
RETURN_MOD_INIT(NULL);
|
||||
}
|
||||
|
||||
MaxMindDB_error = PyObject_GetAttrString(error_mod, "InvalidDatabaseError");
|
||||
Py_DECREF(error_mod);
|
||||
|
||||
if (MaxMindDB_error == NULL) {
|
||||
RETURN_MOD_INIT(NULL);
|
||||
}
|
||||
|
||||
Py_INCREF(MaxMindDB_error);
|
||||
|
||||
/* We primarily add it to the module for backwards compatibility */
|
||||
PyModule_AddObject(m, "InvalidDatabaseError", MaxMindDB_error);
|
||||
|
||||
RETURN_MOD_INIT(m);
|
||||
}
|
66
lib/maxminddb/file.py
Normal file
66
lib/maxminddb/file.py
Normal file
|
@ -0,0 +1,66 @@
|
|||
"""For internal use only. It provides a slice-like file reader."""
|
||||
|
||||
import os
|
||||
|
||||
try:
|
||||
# pylint: disable=no-name-in-module
|
||||
from multiprocessing import Lock
|
||||
except ImportError:
|
||||
from threading import Lock
|
||||
|
||||
|
||||
class FileBuffer(object):
|
||||
|
||||
"""A slice-able file reader"""
|
||||
|
||||
def __init__(self, database):
|
||||
self._handle = open(database, 'rb')
|
||||
self._size = os.fstat(self._handle.fileno()).st_size
|
||||
if not hasattr(os, 'pread'):
|
||||
self._lock = Lock()
|
||||
|
||||
def __getitem__(self, key):
|
||||
if isinstance(key, slice):
|
||||
return self._read(key.stop - key.start, key.start)
|
||||
elif isinstance(key, int):
|
||||
return self._read(1, key)
|
||||
else:
|
||||
raise TypeError("Invalid argument type.")
|
||||
|
||||
def rfind(self, needle, start):
|
||||
"""Reverse find needle from start"""
|
||||
pos = self._read(self._size - start - 1, start).rfind(needle)
|
||||
if pos == -1:
|
||||
return pos
|
||||
return start + pos
|
||||
|
||||
def size(self):
|
||||
"""Size of file"""
|
||||
return self._size
|
||||
|
||||
def close(self):
|
||||
"""Close file"""
|
||||
self._handle.close()
|
||||
|
||||
if hasattr(os, 'pread'):
|
||||
|
||||
def _read(self, buffersize, offset):
|
||||
"""read that uses pread"""
|
||||
# pylint: disable=no-member
|
||||
return os.pread(self._handle.fileno(), buffersize, offset)
|
||||
|
||||
else:
|
||||
|
||||
def _read(self, buffersize, offset):
|
||||
"""read with a lock
|
||||
|
||||
This lock is necessary as after a fork, the different processes
|
||||
will share the same file table entry, even if we dup the fd, and
|
||||
as such the same offsets. There does not appear to be a way to
|
||||
duplicate the file table entry and we cannot re-open based on the
|
||||
original path as that file may have replaced with another or
|
||||
unlinked.
|
||||
"""
|
||||
with self._lock:
|
||||
self._handle.seek(offset)
|
||||
return self._handle.read(buffersize)
|
223
lib/maxminddb/reader.py
Normal file
223
lib/maxminddb/reader.py
Normal file
|
@ -0,0 +1,223 @@
|
|||
"""
|
||||
maxminddb.reader
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
This module contains the pure Python database reader and related classes.
|
||||
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
try:
|
||||
import mmap
|
||||
except ImportError:
|
||||
# pylint: disable=invalid-name
|
||||
mmap = None
|
||||
|
||||
import struct
|
||||
|
||||
from maxminddb.compat import byte_from_int, int_from_byte, compat_ip_address
|
||||
from maxminddb.const import MODE_AUTO, MODE_MMAP, MODE_FILE, MODE_MEMORY
|
||||
from maxminddb.decoder import Decoder
|
||||
from maxminddb.errors import InvalidDatabaseError
|
||||
from maxminddb.file import FileBuffer
|
||||
|
||||
|
||||
class Reader(object):
|
||||
|
||||
"""
|
||||
Instances of this class provide a reader for the MaxMind DB format. IP
|
||||
addresses can be looked up using the ``get`` method.
|
||||
"""
|
||||
|
||||
_DATA_SECTION_SEPARATOR_SIZE = 16
|
||||
_METADATA_START_MARKER = b"\xAB\xCD\xEFMaxMind.com"
|
||||
|
||||
_ipv4_start = None
|
||||
|
||||
def __init__(self, database, mode=MODE_AUTO):
|
||||
"""Reader for the MaxMind DB file format
|
||||
|
||||
Arguments:
|
||||
database -- A path to a valid MaxMind DB file such as a GeoIP2
|
||||
database file.
|
||||
mode -- mode to open the database with. Valid mode are:
|
||||
* MODE_MMAP - read from memory map.
|
||||
* MODE_FILE - read database as standard file.
|
||||
* MODE_MEMORY - load database into memory.
|
||||
* MODE_AUTO - tries MODE_MMAP and then MODE_FILE. Default.
|
||||
"""
|
||||
# pylint: disable=redefined-variable-type
|
||||
if (mode == MODE_AUTO and mmap) or mode == MODE_MMAP:
|
||||
with open(database, 'rb') as db_file:
|
||||
self._buffer = mmap.mmap(
|
||||
db_file.fileno(), 0, access=mmap.ACCESS_READ)
|
||||
self._buffer_size = self._buffer.size()
|
||||
elif mode in (MODE_AUTO, MODE_FILE):
|
||||
self._buffer = FileBuffer(database)
|
||||
self._buffer_size = self._buffer.size()
|
||||
elif mode == MODE_MEMORY:
|
||||
with open(database, 'rb') as db_file:
|
||||
self._buffer = db_file.read()
|
||||
self._buffer_size = len(self._buffer)
|
||||
else:
|
||||
raise ValueError('Unsupported open mode ({0}). Only MODE_AUTO, '
|
||||
' MODE_FILE, and MODE_MEMORY are support by the pure Python '
|
||||
'Reader'.format(mode))
|
||||
|
||||
metadata_start = self._buffer.rfind(self._METADATA_START_MARKER,
|
||||
max(0, self._buffer_size
|
||||
- 128 * 1024))
|
||||
|
||||
if metadata_start == -1:
|
||||
self.close()
|
||||
raise InvalidDatabaseError('Error opening database file ({0}). '
|
||||
'Is this a valid MaxMind DB file?'
|
||||
''.format(database))
|
||||
|
||||
metadata_start += len(self._METADATA_START_MARKER)
|
||||
metadata_decoder = Decoder(self._buffer, metadata_start)
|
||||
(metadata, _) = metadata_decoder.decode(metadata_start)
|
||||
self._metadata = Metadata(
|
||||
**metadata) # pylint: disable=bad-option-value
|
||||
|
||||
self._decoder = Decoder(self._buffer, self._metadata.search_tree_size
|
||||
+ self._DATA_SECTION_SEPARATOR_SIZE)
|
||||
|
||||
def metadata(self):
|
||||
"""Return the metadata associated with the MaxMind DB file"""
|
||||
return self._metadata
|
||||
|
||||
def get(self, ip_address):
|
||||
"""Return the record for the ip_address in the MaxMind DB
|
||||
|
||||
|
||||
Arguments:
|
||||
ip_address -- an IP address in the standard string notation
|
||||
"""
|
||||
|
||||
address = compat_ip_address(ip_address)
|
||||
|
||||
if address.version == 6 and self._metadata.ip_version == 4:
|
||||
raise ValueError('Error looking up {0}. You attempted to look up '
|
||||
'an IPv6 address in an IPv4-only database.'.format(
|
||||
ip_address))
|
||||
pointer = self._find_address_in_tree(address)
|
||||
|
||||
return self._resolve_data_pointer(pointer) if pointer else None
|
||||
|
||||
def _find_address_in_tree(self, ip_address):
|
||||
packed = ip_address.packed
|
||||
|
||||
bit_count = len(packed) * 8
|
||||
node = self._start_node(bit_count)
|
||||
|
||||
for i in range(bit_count):
|
||||
if node >= self._metadata.node_count:
|
||||
break
|
||||
bit = 1 & (int_from_byte(packed[i >> 3]) >> 7 - (i % 8))
|
||||
node = self._read_node(node, bit)
|
||||
if node == self._metadata.node_count:
|
||||
# Record is empty
|
||||
return 0
|
||||
elif node > self._metadata.node_count:
|
||||
return node
|
||||
|
||||
raise InvalidDatabaseError('Invalid node in search tree')
|
||||
|
||||
def _start_node(self, length):
|
||||
if self._metadata.ip_version != 6 or length == 128:
|
||||
return 0
|
||||
|
||||
# We are looking up an IPv4 address in an IPv6 tree. Skip over the
|
||||
# first 96 nodes.
|
||||
if self._ipv4_start:
|
||||
return self._ipv4_start
|
||||
|
||||
node = 0
|
||||
for _ in range(96):
|
||||
if node >= self._metadata.node_count:
|
||||
break
|
||||
node = self._read_node(node, 0)
|
||||
self._ipv4_start = node
|
||||
return node
|
||||
|
||||
def _read_node(self, node_number, index):
|
||||
base_offset = node_number * self._metadata.node_byte_size
|
||||
|
||||
record_size = self._metadata.record_size
|
||||
if record_size == 24:
|
||||
offset = base_offset + index * 3
|
||||
node_bytes = b'\x00' + self._buffer[offset:offset + 3]
|
||||
elif record_size == 28:
|
||||
(middle,) = struct.unpack(
|
||||
b'!B', self._buffer[base_offset + 3:base_offset + 4])
|
||||
if index:
|
||||
middle &= 0x0F
|
||||
else:
|
||||
middle = (0xF0 & middle) >> 4
|
||||
offset = base_offset + index * 4
|
||||
node_bytes = byte_from_int(
|
||||
middle) + self._buffer[offset:offset + 3]
|
||||
elif record_size == 32:
|
||||
offset = base_offset + index * 4
|
||||
node_bytes = self._buffer[offset:offset + 4]
|
||||
else:
|
||||
raise InvalidDatabaseError(
|
||||
'Unknown record size: {0}'.format(record_size))
|
||||
return struct.unpack(b'!I', node_bytes)[0]
|
||||
|
||||
def _resolve_data_pointer(self, pointer):
|
||||
resolved = pointer - self._metadata.node_count + \
|
||||
self._metadata.search_tree_size
|
||||
|
||||
if resolved > self._buffer_size:
|
||||
raise InvalidDatabaseError(
|
||||
"The MaxMind DB file's search tree is corrupt")
|
||||
|
||||
(data, _) = self._decoder.decode(resolved)
|
||||
return data
|
||||
|
||||
def close(self):
|
||||
"""Closes the MaxMind DB file and returns the resources to the system"""
|
||||
# pylint: disable=unidiomatic-typecheck
|
||||
if type(self._buffer) not in (str, bytes):
|
||||
self._buffer.close()
|
||||
|
||||
|
||||
class Metadata(object):
|
||||
|
||||
"""Metadata for the MaxMind DB reader"""
|
||||
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
def __init__(self, **kwargs):
|
||||
"""Creates new Metadata object. kwargs are key/value pairs from spec"""
|
||||
# Although I could just update __dict__, that is less obvious and it
|
||||
# doesn't work well with static analysis tools and some IDEs
|
||||
self.node_count = kwargs['node_count']
|
||||
self.record_size = kwargs['record_size']
|
||||
self.ip_version = kwargs['ip_version']
|
||||
self.database_type = kwargs['database_type']
|
||||
self.languages = kwargs['languages']
|
||||
self.binary_format_major_version = kwargs[
|
||||
'binary_format_major_version']
|
||||
self.binary_format_minor_version = kwargs[
|
||||
'binary_format_minor_version']
|
||||
self.build_epoch = kwargs['build_epoch']
|
||||
self.description = kwargs['description']
|
||||
|
||||
@property
|
||||
def node_byte_size(self):
|
||||
"""The size of a node in bytes"""
|
||||
return self.record_size // 4
|
||||
|
||||
@property
|
||||
def search_tree_size(self):
|
||||
"""The size of the search tree"""
|
||||
return self.node_count * self.node_byte_size
|
||||
|
||||
def __repr__(self):
|
||||
args = ', '.join('%s=%r' % x for x in self.__dict__.items())
|
||||
return '{module}.{class_name}({data})'.format(
|
||||
module=self.__module__,
|
||||
class_name=self.__class__.__name__,
|
||||
data=args)
|
|
@ -18,6 +18,7 @@ import time
|
|||
|
||||
import plexpy
|
||||
import activity_processor
|
||||
import datafactory
|
||||
import helpers
|
||||
import logger
|
||||
import notification_handler
|
||||
|
@ -55,23 +56,46 @@ class ActivityHandler(object):
|
|||
|
||||
return None
|
||||
|
||||
def update_db_session(self):
|
||||
def update_db_session(self, session=None):
|
||||
# Update our session temp table values
|
||||
monitor_proc = activity_processor.ActivityProcessor()
|
||||
monitor_proc.write_session(session=self.get_live_session(), notify=False)
|
||||
monitor_proc.write_session(session=session, notify=False)
|
||||
|
||||
def on_start(self):
|
||||
if self.is_valid_session() and self.get_live_session():
|
||||
logger.debug(u"PlexPy ActivityHandler :: Session %s has started." % str(self.get_session_key()))
|
||||
|
||||
session = self.get_live_session()
|
||||
|
||||
# Check if any notification agents have notifications enabled
|
||||
if any(d['on_play'] for d in notifiers.available_notification_agents()):
|
||||
# Fire off notifications
|
||||
threading.Thread(target=notification_handler.notify,
|
||||
kwargs=dict(stream_data=self.get_live_session(), notify_action='play')).start()
|
||||
kwargs=dict(stream_data=session, notify_action='play')).start()
|
||||
|
||||
# Write the new session to our temp session table
|
||||
self.update_db_session()
|
||||
self.update_db_session(session=session)
|
||||
|
||||
# Check if any notification agents have notifications enabled
|
||||
if any(d['on_concurrent'] for d in notifiers.available_notification_agents()):
|
||||
# Check if any concurrent streams by the user
|
||||
ip = True if plexpy.CONFIG.NOTIFY_CONCURRENT_BY_IP else None
|
||||
ap = activity_processor.ActivityProcessor()
|
||||
user_sessions = ap.get_session_by_user_id(user_id=session['user_id'], ip_address=ip)
|
||||
if len(user_sessions) >= plexpy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD:
|
||||
# Push any notifications - Push it on it's own thread so we don't hold up our db actions
|
||||
threading.Thread(target=notification_handler.notify,
|
||||
kwargs=dict(stream_data=session, notify_action='concurrent')).start()
|
||||
|
||||
# Check if any notification agents have notifications enabled
|
||||
if any(d['on_newdevice'] for d in notifiers.available_notification_agents()):
|
||||
# Check if any concurrent streams by the user
|
||||
data_factory = datafactory.DataFactory()
|
||||
user_devices = data_factory.get_user_devices(user_id=session['user_id'])
|
||||
if session['machine_id'] not in user_devices:
|
||||
# Push any notifications - Push it on it's own thread so we don't hold up our db actions
|
||||
threading.Thread(target=notification_handler.notify,
|
||||
kwargs=dict(stream_data=session, notify_action='newdevice')).start()
|
||||
|
||||
def on_stop(self, force_stop=False):
|
||||
if self.is_valid_session():
|
||||
|
|
|
@ -24,6 +24,7 @@ import libraries
|
|||
import logger
|
||||
import notification_handler
|
||||
import notifiers
|
||||
import plextv
|
||||
import pmsconnect
|
||||
|
||||
|
||||
|
@ -372,23 +373,19 @@ def check_server_updates():
|
|||
with monitor_lock:
|
||||
logger.info(u"PlexPy Monitor :: Checking for PMS updates...")
|
||||
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
plex_tv = plextv.PlexTV()
|
||||
download_info = plex_tv.get_plex_downloads()
|
||||
|
||||
server_identity = pms_connect.get_server_identity()
|
||||
update_status = pms_connect.get_update_staus()
|
||||
if download_info:
|
||||
logger.info(u"PlexPy Monitor :: Current PMS version: %s", plexpy.CONFIG.PMS_VERSION)
|
||||
|
||||
if server_identity and update_status:
|
||||
version = server_identity['version']
|
||||
logger.info(u"PlexPy Monitor :: Current PMS version: %s", version)
|
||||
|
||||
if update_status['state'] == 'available':
|
||||
update_version = update_status['version']
|
||||
logger.info(u"PlexPy Monitor :: PMS update available version: %s", update_version)
|
||||
if download_info['update_available']:
|
||||
logger.info(u"PlexPy Monitor :: PMS update available version: %s", download_info['version'])
|
||||
|
||||
# Check if any notification agents have notifications enabled
|
||||
if any(d['on_pmsupdate'] for d in notifiers.available_notification_agents()):
|
||||
# Fire off notifications
|
||||
threading.Thread(target=notification_handler.notify_timeline,
|
||||
kwargs=dict(notify_action='pmsupdate')).start()
|
||||
kwargs=dict(notify_action='pmsupdate')).start()
|
||||
else:
|
||||
logger.info(u"PlexPy Monitor :: No PMS update available.")
|
|
@ -19,6 +19,7 @@ import re
|
|||
|
||||
import plexpy
|
||||
import database
|
||||
import datafactory
|
||||
import libraries
|
||||
import log_reader
|
||||
import logger
|
||||
|
@ -106,6 +107,26 @@ class ActivityProcessor(object):
|
|||
ip_address = {'ip_address': ip_address}
|
||||
self.db.upsert('sessions', ip_address, keys)
|
||||
|
||||
# Check if any notification agents have notifications enabled
|
||||
if notify and any(d['on_concurrent'] for d in notifiers.available_notification_agents()):
|
||||
# Check if any concurrent streams by the user
|
||||
ip = True if plexpy.CONFIG.NOTIFY_CONCURRENT_BY_IP else None
|
||||
user_sessions = self.get_session_by_user_id(user_id=session['user_id'], ip_address=ip)
|
||||
if len(user_sessions) >= plexpy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD:
|
||||
# Push any notifications - Push it on it's own thread so we don't hold up our db actions
|
||||
threading.Thread(target=notification_handler.notify,
|
||||
kwargs=dict(stream_data=values, notify_action='concurrent')).start()
|
||||
|
||||
# Check if any notification agents have notifications enabled
|
||||
if notify and any(d['on_newdevice'] for d in notifiers.available_notification_agents()):
|
||||
# Check if any concurrent streams by the user
|
||||
data_factory = datafactory.DataFactory()
|
||||
user_devices = data_factory.get_user_devices(user_id=session['user_id'])
|
||||
if session['machine_id'] not in user_devices:
|
||||
# Push any notifications - Push it on it's own thread so we don't hold up our db actions
|
||||
threading.Thread(target=notification_handler.notify,
|
||||
kwargs=dict(stream_data=values, notify_action='newdevice')).start()
|
||||
|
||||
return True
|
||||
|
||||
def write_session_history(self, session=None, import_metadata=None, is_import=False, import_ignore_interval=0):
|
||||
|
@ -470,3 +491,13 @@ class ActivityProcessor(object):
|
|||
return last_time['buffer_last_triggered']
|
||||
|
||||
return None
|
||||
|
||||
def get_session_by_user_id(self, user_id=None, ip_address=None):
|
||||
sessions = []
|
||||
if str(user_id).isdigit():
|
||||
ip = 'GROUP BY ip_address' if ip_address else ''
|
||||
sessions = self.db.select('SELECT * '
|
||||
'FROM sessions '
|
||||
'WHERE user_id = ? %s' % ip,
|
||||
[user_id])
|
||||
return sessions
|
|
@ -55,6 +55,11 @@ _CONFIG_DEFINITIONS = {
|
|||
'PMS_USE_BIF': (int, 'PMS', 0),
|
||||
'PMS_UUID': (str, 'PMS', ''),
|
||||
'PMS_TIMEOUT': (int, 'Advanced', 15),
|
||||
'PMS_PLEXPASS': (int, 'PMS', 0),
|
||||
'PMS_PLATFORM': (str, 'PMS', ''),
|
||||
'PMS_VERSION': (str, 'PMS', ''),
|
||||
'PMS_UPDATE_CHANNEL': (str, 'PMS', 'public'),
|
||||
'PMS_UPDATE_DISTRO_BUILD': (str, 'PMS', ''),
|
||||
'TIME_FORMAT': (str, 'General', 'HH:mm'),
|
||||
'ANON_REDIRECT': (str, 'General', 'http://dereferer.org/?'),
|
||||
'API_ENABLED': (int, 'General', 0),
|
||||
|
@ -75,6 +80,8 @@ _CONFIG_DEFINITIONS = {
|
|||
'BOXCAR_ON_EXTUP': (int, 'Boxcar', 0),
|
||||
'BOXCAR_ON_INTUP': (int, 'Boxcar', 0),
|
||||
'BOXCAR_ON_PMSUPDATE': (int, 'Boxcar', 0),
|
||||
'BOXCAR_ON_CONCURRENT': (int, 'Boxcar', 0),
|
||||
'BOXCAR_ON_NEWDEVICE': (int, 'Boxcar', 0),
|
||||
'BROWSER_ENABLED': (int, 'Boxcar', 0),
|
||||
'BROWSER_AUTO_HIDE_DELAY': (int, 'Boxcar', 5),
|
||||
'BROWSER_ON_PLAY': (int, 'BROWSER', 0),
|
||||
|
@ -89,6 +96,8 @@ _CONFIG_DEFINITIONS = {
|
|||
'BROWSER_ON_EXTUP': (int, 'BROWSER', 0),
|
||||
'BROWSER_ON_INTUP': (int, 'BROWSER', 0),
|
||||
'BROWSER_ON_PMSUPDATE': (int, 'BROWSER', 0),
|
||||
'BROWSER_ON_CONCURRENT': (int, 'BROWSER', 0),
|
||||
'BROWSER_ON_NEWDEVICE': (int, 'BROWSER', 0),
|
||||
'BUFFER_THRESHOLD': (int, 'Monitoring', 3),
|
||||
'BUFFER_WAIT': (int, 'Monitoring', 900),
|
||||
'BACKUP_DIR': (str, 'General', ''),
|
||||
|
@ -125,6 +134,8 @@ _CONFIG_DEFINITIONS = {
|
|||
'EMAIL_ON_EXTUP': (int, 'Email', 0),
|
||||
'EMAIL_ON_INTUP': (int, 'Email', 0),
|
||||
'EMAIL_ON_PMSUPDATE': (int, 'Email', 0),
|
||||
'EMAIL_ON_CONCURRENT': (int, 'Email', 0),
|
||||
'EMAIL_ON_NEWDEVICE': (int, 'Email', 0),
|
||||
'ENABLE_HTTPS': (int, 'General', 0),
|
||||
'FACEBOOK_ENABLED': (int, 'Facebook', 0),
|
||||
'FACEBOOK_REDIRECT_URI': (str, 'Facebook', ''),
|
||||
|
@ -147,8 +158,11 @@ _CONFIG_DEFINITIONS = {
|
|||
'FACEBOOK_ON_EXTUP': (int, 'Facebook', 0),
|
||||
'FACEBOOK_ON_INTUP': (int, 'Facebook', 0),
|
||||
'FACEBOOK_ON_PMSUPDATE': (int, 'Facebook', 0),
|
||||
'FACEBOOK_ON_CONCURRENT': (int, 'Facebook', 0),
|
||||
'FACEBOOK_ON_NEWDEVICE': (int, 'Facebook', 0),
|
||||
'FIRST_RUN_COMPLETE': (int, 'General', 0),
|
||||
'FREEZE_DB': (int, 'General', 0),
|
||||
'GEOIP_DB': (str, 'General', ''),
|
||||
'GET_FILE_SIZES': (int, 'General', 0),
|
||||
'GET_FILE_SIZES_HOLD': (dict, 'General', {'section_ids': [], 'rating_keys': []}),
|
||||
'GIT_BRANCH': (str, 'General', 'master'),
|
||||
|
@ -174,6 +188,8 @@ _CONFIG_DEFINITIONS = {
|
|||
'GROWL_ON_EXTUP': (int, 'Growl', 0),
|
||||
'GROWL_ON_INTUP': (int, 'Growl', 0),
|
||||
'GROWL_ON_PMSUPDATE': (int, 'Growl', 0),
|
||||
'GROWL_ON_CONCURRENT': (int, 'Growl', 0),
|
||||
'GROWL_ON_NEWDEVICE': (int, 'Growl', 0),
|
||||
'HOME_SECTIONS': (list, 'General', ['current_activity','watch_stats','library_stats','recently_added']),
|
||||
'HOME_LIBRARY_CARDS': (list, 'General', ['first_run']),
|
||||
'HOME_STATS_LENGTH': (int, 'General', 30),
|
||||
|
@ -196,6 +212,27 @@ _CONFIG_DEFINITIONS = {
|
|||
'HTTP_PROXY': (int, 'General', 0),
|
||||
'HTTP_ROOT': (str, 'General', ''),
|
||||
'HTTP_USERNAME': (str, 'General', ''),
|
||||
'HIPCHAT_URL': (str, 'Hipchat', ''),
|
||||
'HIPCHAT_COLOR': (str, 'Hipchat', ''),
|
||||
'HIPCHAT_INCL_SUBJECT': (int, 'Hipchat', 1),
|
||||
'HIPCHAT_INCL_PMSLINK': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_INCL_POSTER': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_EMOTICON': (str, 'Hipchat', ''),
|
||||
'HIPCHAT_ENABLED': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_PLAY': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_STOP': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_PAUSE': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_RESUME': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_BUFFER': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_WATCHED': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_CREATED': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_EXTDOWN': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_INTDOWN': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_EXTUP': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_INTUP': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_PMSUPDATE': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_CONCURRENT': (int, 'Hipchat', 0),
|
||||
'HIPCHAT_ON_NEWDEVICE': (int, 'Hipchat', 0),
|
||||
'INTERFACE': (str, 'General', 'default'),
|
||||
'IP_LOGGING_ENABLE': (int, 'General', 0),
|
||||
'IFTTT_KEY': (str, 'IFTTT', ''),
|
||||
|
@ -213,10 +250,13 @@ _CONFIG_DEFINITIONS = {
|
|||
'IFTTT_ON_EXTUP': (int, 'IFTTT', 0),
|
||||
'IFTTT_ON_INTUP': (int, 'IFTTT', 0),
|
||||
'IFTTT_ON_PMSUPDATE': (int, 'IFTTT', 0),
|
||||
'IFTTT_ON_CONCURRENT': (int, 'IFTTT', 0),
|
||||
'IFTTT_ON_NEWDEVICE': (int, 'IFTTT', 0),
|
||||
'IMGUR_CLIENT_ID': (str, 'Monitoring', ''),
|
||||
'JOIN_APIKEY': (str, 'Join', ''),
|
||||
'JOIN_DEVICEID': (str, 'Join', ''),
|
||||
'JOIN_ENABLED': (int, 'Join', 0),
|
||||
'JOIN_INCL_SUBJECT': (int, 'Join', 1),
|
||||
'JOIN_ON_PLAY': (int, 'Join', 0),
|
||||
'JOIN_ON_STOP': (int, 'Join', 0),
|
||||
'JOIN_ON_PAUSE': (int, 'Join', 0),
|
||||
|
@ -229,6 +269,8 @@ _CONFIG_DEFINITIONS = {
|
|||
'JOIN_ON_EXTUP': (int, 'Join', 0),
|
||||
'JOIN_ON_INTUP': (int, 'Join', 0),
|
||||
'JOIN_ON_PMSUPDATE': (int, 'Join', 0),
|
||||
'JOIN_ON_CONCURRENT': (int, 'Join', 0),
|
||||
'JOIN_ON_NEWDEVICE': (int, 'Join', 0),
|
||||
'JOURNAL_MODE': (str, 'Advanced', 'wal'),
|
||||
'LAUNCH_BROWSER': (int, 'General', 1),
|
||||
'LOG_BLACKLIST': (int, 'General', 1),
|
||||
|
@ -263,11 +305,15 @@ _CONFIG_DEFINITIONS = {
|
|||
'NMA_ON_EXTUP': (int, 'NMA', 0),
|
||||
'NMA_ON_INTUP': (int, 'NMA', 0),
|
||||
'NMA_ON_PMSUPDATE': (int, 'NMA', 0),
|
||||
'NMA_ON_CONCURRENT': (int, 'NMA', 0),
|
||||
'NMA_ON_NEWDEVICE': (int, 'NMA', 0),
|
||||
'NOTIFY_CONSECUTIVE': (int, 'Monitoring', 1),
|
||||
'NOTIFY_UPLOAD_POSTERS': (int, 'Monitoring', 0),
|
||||
'NOTIFY_RECENTLY_ADDED': (int, 'Monitoring', 0),
|
||||
'NOTIFY_RECENTLY_ADDED_GRANDPARENT': (int, 'Monitoring', 0),
|
||||
'NOTIFY_RECENTLY_ADDED_DELAY': (int, 'Monitoring', 60),
|
||||
'NOTIFY_CONCURRENT_BY_IP': (int, 'Monitoring', 0),
|
||||
'NOTIFY_CONCURRENT_THRESHOLD': (int, 'Monitoring', 2),
|
||||
'NOTIFY_WATCHED_PERCENT': (int, 'Monitoring', 85),
|
||||
'NOTIFY_ON_START_SUBJECT_TEXT': (unicode, 'Monitoring', 'PlexPy ({server_name})'),
|
||||
'NOTIFY_ON_START_BODY_TEXT': (unicode, 'Monitoring', '{user} ({player}) started playing {title}.'),
|
||||
|
@ -293,6 +339,10 @@ _CONFIG_DEFINITIONS = {
|
|||
'NOTIFY_ON_INTUP_BODY_TEXT': (unicode, 'Monitoring', 'The Plex Media Server is back up.'),
|
||||
'NOTIFY_ON_PMSUPDATE_SUBJECT_TEXT': (unicode, 'Monitoring', 'PlexPy ({server_name})'),
|
||||
'NOTIFY_ON_PMSUPDATE_BODY_TEXT': (unicode, 'Monitoring', 'An update is available for the Plex Media Server (version {update_version}).'),
|
||||
'NOTIFY_ON_CONCURRENT_SUBJECT_TEXT': (unicode, 'Monitoring', 'PlexPy ({server_name})'),
|
||||
'NOTIFY_ON_CONCURRENT_BODY_TEXT': (unicode, 'Monitoring', '{user} has {user_streams} concurrent streams.'),
|
||||
'NOTIFY_ON_NEWDEVICE_SUBJECT_TEXT': (unicode, 'Monitoring', 'PlexPy ({server_name})'),
|
||||
'NOTIFY_ON_NEWDEVICE_BODY_TEXT': (unicode, 'Monitoring', '{user} is streaming from a new device: {player}.'),
|
||||
'NOTIFY_SCRIPTS_ARGS_TEXT': (unicode, 'Monitoring', ''),
|
||||
'OSX_NOTIFY_APP': (str, 'OSX_Notify', '/Applications/PlexPy'),
|
||||
'OSX_NOTIFY_ENABLED': (int, 'OSX_Notify', 0),
|
||||
|
@ -308,6 +358,8 @@ _CONFIG_DEFINITIONS = {
|
|||
'OSX_NOTIFY_ON_EXTUP': (int, 'OSX_Notify', 0),
|
||||
'OSX_NOTIFY_ON_INTUP': (int, 'OSX_Notify', 0),
|
||||
'OSX_NOTIFY_ON_PMSUPDATE': (int, 'OSX_Notify', 0),
|
||||
'OSX_NOTIFY_ON_CONCURRENT': (int, 'OSX_Notify', 0),
|
||||
'OSX_NOTIFY_ON_NEWDEVICE': (int, 'OSX_Notify', 0),
|
||||
'PLEX_CLIENT_HOST': (str, 'Plex', ''),
|
||||
'PLEX_ENABLED': (int, 'Plex', 0),
|
||||
'PLEX_PASSWORD': (str, 'Plex', ''),
|
||||
|
@ -324,6 +376,8 @@ _CONFIG_DEFINITIONS = {
|
|||
'PLEX_ON_EXTUP': (int, 'Plex', 0),
|
||||
'PLEX_ON_INTUP': (int, 'Plex', 0),
|
||||
'PLEX_ON_PMSUPDATE': (int, 'Plex', 0),
|
||||
'PLEX_ON_CONCURRENT': (int, 'Plex', 0),
|
||||
'PLEX_ON_NEWDEVICE': (int, 'Plex', 0),
|
||||
'PROWL_ENABLED': (int, 'Prowl', 0),
|
||||
'PROWL_KEYS': (str, 'Prowl', ''),
|
||||
'PROWL_PRIORITY': (int, 'Prowl', 0),
|
||||
|
@ -339,6 +393,8 @@ _CONFIG_DEFINITIONS = {
|
|||
'PROWL_ON_EXTUP': (int, 'Prowl', 0),
|
||||
'PROWL_ON_INTUP': (int, 'Prowl', 0),
|
||||
'PROWL_ON_PMSUPDATE': (int, 'Prowl', 0),
|
||||
'PROWL_ON_CONCURRENT': (int, 'Prowl', 0),
|
||||
'PROWL_ON_NEWDEVICE': (int, 'Prowl', 0),
|
||||
'PUSHALOT_APIKEY': (str, 'Pushalot', ''),
|
||||
'PUSHALOT_ENABLED': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_PLAY': (int, 'Pushalot', 0),
|
||||
|
@ -353,6 +409,8 @@ _CONFIG_DEFINITIONS = {
|
|||
'PUSHALOT_ON_EXTUP': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_INTUP': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_PMSUPDATE': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_CONCURRENT': (int, 'Pushalot', 0),
|
||||
'PUSHALOT_ON_NEWDEVICE': (int, 'Pushalot', 0),
|
||||
'PUSHBULLET_APIKEY': (str, 'PushBullet', ''),
|
||||
'PUSHBULLET_DEVICEID': (str, 'PushBullet', ''),
|
||||
'PUSHBULLET_CHANNEL_TAG': (str, 'PushBullet', ''),
|
||||
|
@ -369,6 +427,8 @@ _CONFIG_DEFINITIONS = {
|
|||
'PUSHBULLET_ON_EXTUP': (int, 'PushBullet', 0),
|
||||
'PUSHBULLET_ON_INTUP': (int, 'PushBullet', 0),
|
||||
'PUSHBULLET_ON_PMSUPDATE': (int, 'PushBullet', 0),
|
||||
'PUSHBULLET_ON_CONCURRENT': (int, 'PushBullet', 0),
|
||||
'PUSHBULLET_ON_NEWDEVICE': (int, 'PushBullet', 0),
|
||||
'PUSHOVER_APITOKEN': (str, 'Pushover', ''),
|
||||
'PUSHOVER_ENABLED': (int, 'Pushover', 0),
|
||||
'PUSHOVER_HTML_SUPPORT': (int, 'Pushover', 1),
|
||||
|
@ -387,6 +447,8 @@ _CONFIG_DEFINITIONS = {
|
|||
'PUSHOVER_ON_EXTUP': (int, 'Pushover', 0),
|
||||
'PUSHOVER_ON_INTUP': (int, 'Pushover', 0),
|
||||
'PUSHOVER_ON_PMSUPDATE': (int, 'Pushover', 0),
|
||||
'PUSHOVER_ON_CONCURRENT': (int, 'Pushover', 0),
|
||||
'PUSHOVER_ON_NEWDEVICE': (int, 'Pushover', 0),
|
||||
'REFRESH_LIBRARIES_INTERVAL': (int, 'Monitoring', 12),
|
||||
'REFRESH_LIBRARIES_ON_STARTUP': (int, 'Monitoring', 1),
|
||||
'REFRESH_USERS_INTERVAL': (int, 'Monitoring', 12),
|
||||
|
@ -412,6 +474,8 @@ _CONFIG_DEFINITIONS = {
|
|||
'SLACK_ON_EXTUP': (int, 'Slack', 0),
|
||||
'SLACK_ON_INTUP': (int, 'Slack', 0),
|
||||
'SLACK_ON_PMSUPDATE': (int, 'Slack', 0),
|
||||
'SLACK_ON_CONCURRENT': (int, 'Slack', 0),
|
||||
'SLACK_ON_NEWDEVICE': (int, 'Slack', 0),
|
||||
'SCRIPTS_ENABLED': (int, 'Scripts', 0),
|
||||
'SCRIPTS_FOLDER': (unicode, 'Scripts', ''),
|
||||
'SCRIPTS_ON_PLAY': (int, 'Scripts', 0),
|
||||
|
@ -426,6 +490,8 @@ _CONFIG_DEFINITIONS = {
|
|||
'SCRIPTS_ON_INTDOWN': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_INTUP': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_PMSUPDATE': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_CONCURRENT': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_NEWDEVICE': (int, 'Scripts', 0),
|
||||
'SCRIPTS_ON_PLAY_SCRIPT': (unicode, 'Scripts', ''),
|
||||
'SCRIPTS_ON_STOP_SCRIPT': (unicode, 'Scripts', ''),
|
||||
'SCRIPTS_ON_PAUSE_SCRIPT': (unicode, 'Scripts', ''),
|
||||
|
@ -438,6 +504,8 @@ _CONFIG_DEFINITIONS = {
|
|||
'SCRIPTS_ON_INTDOWN_SCRIPT': (unicode, 'Scripts', ''),
|
||||
'SCRIPTS_ON_INTUP_SCRIPT': (unicode, 'Scripts', ''),
|
||||
'SCRIPTS_ON_PMSUPDATE_SCRIPT': (unicode, 'Scripts', ''),
|
||||
'SCRIPTS_ON_CONCURRENT_SCRIPT': (unicode, 'Scripts', ''),
|
||||
'SCRIPTS_ON_NEWDEVICE_SCRIPT': (unicode, 'Scripts', ''),
|
||||
'TELEGRAM_BOT_TOKEN': (str, 'Telegram', ''),
|
||||
'TELEGRAM_ENABLED': (int, 'Telegram', 0),
|
||||
'TELEGRAM_CHAT_ID': (str, 'Telegram', ''),
|
||||
|
@ -456,6 +524,8 @@ _CONFIG_DEFINITIONS = {
|
|||
'TELEGRAM_ON_EXTUP': (int, 'Telegram', 0),
|
||||
'TELEGRAM_ON_INTUP': (int, 'Telegram', 0),
|
||||
'TELEGRAM_ON_PMSUPDATE': (int, 'Telegram', 0),
|
||||
'TELEGRAM_ON_CONCURRENT': (int, 'Telegram', 0),
|
||||
'TELEGRAM_ON_NEWDEVICE': (int, 'Telegram', 0),
|
||||
'TV_LOGGING_ENABLE': (int, 'Monitoring', 1),
|
||||
'TV_NOTIFY_ENABLE': (int, 'Monitoring', 0),
|
||||
'TV_NOTIFY_ON_START': (int, 'Monitoring', 1),
|
||||
|
@ -480,6 +550,8 @@ _CONFIG_DEFINITIONS = {
|
|||
'TWITTER_ON_EXTUP': (int, 'Twitter', 0),
|
||||
'TWITTER_ON_INTUP': (int, 'Twitter', 0),
|
||||
'TWITTER_ON_PMSUPDATE': (int, 'Twitter', 0),
|
||||
'TWITTER_ON_CONCURRENT': (int, 'Twitter', 0),
|
||||
'TWITTER_ON_NEWDEVICE': (int, 'Twitter', 0),
|
||||
'UPDATE_DB_INTERVAL': (int, 'General', 24),
|
||||
'UPDATE_SECTION_IDS': (int, 'General', 1),
|
||||
'UPDATE_LABELS': (int, 'General', 1),
|
||||
|
@ -500,7 +572,9 @@ _CONFIG_DEFINITIONS = {
|
|||
'XBMC_ON_INTDOWN': (int, 'XBMC', 0),
|
||||
'XBMC_ON_EXTUP': (int, 'XBMC', 0),
|
||||
'XBMC_ON_INTUP': (int, 'XBMC', 0),
|
||||
'XBMC_ON_PMSUPDATE': (int, 'XBMC', 0)
|
||||
'XBMC_ON_PMSUPDATE': (int, 'XBMC', 0),
|
||||
'XBMC_ON_CONCURRENT': (int, 'XBMC', 0),
|
||||
'XBMC_ON_NEWDEVICE': (int, 'XBMC', 0)
|
||||
}
|
||||
|
||||
_BLACKLIST_KEYS = ['_APITOKEN', '_TOKEN', '_KEY', '_SECRET', '_PASSWORD', '_APIKEY', '_ID']
|
||||
|
@ -719,4 +793,8 @@ class Config(object):
|
|||
home_sections = self.HOME_SECTIONS
|
||||
home_sections.remove('library_stats')
|
||||
self.HOME_SECTIONS = home_sections
|
||||
self.CONFIG_VERSION = '5'
|
||||
self.CONFIG_VERSION = '5'
|
||||
|
||||
if self.CONFIG_VERSION == '5':
|
||||
self.MONITOR_PMS_UPDATES = 0
|
||||
self.CONFIG_VERSION = '6'
|
|
@ -69,8 +69,8 @@ class DataFactory(object):
|
|||
'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS paused_counter',
|
||||
'session_history.user_id',
|
||||
'session_history.user',
|
||||
'(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE users.friendly_name END) \
|
||||
AS friendly_name',
|
||||
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \
|
||||
THEN users.username ELSE users.friendly_name END) AS friendly_name',
|
||||
'platform',
|
||||
'player',
|
||||
'ip_address',
|
||||
|
@ -1306,4 +1306,19 @@ class DataFactory(object):
|
|||
return True
|
||||
except Exception as e:
|
||||
logger.warn(u"PlexPy DataFactory :: Unable to execute database query for delete_notification_log: %s." % e)
|
||||
return False
|
||||
return False
|
||||
|
||||
def get_user_devices(self, user_id=''):
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
if user_id:
|
||||
try:
|
||||
query = 'SELECT machine_id FROM session_history WHERE user_id = ? GROUP BY machine_id'
|
||||
result = monitor_db.select(query=query, args=[user_id])
|
||||
except Exception as e:
|
||||
logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_user_devices: %s." % e)
|
||||
return []
|
||||
else:
|
||||
return []
|
||||
|
||||
return [d['machine_id'] for d in result]
|
|
@ -463,7 +463,8 @@ class Graphs(object):
|
|||
if y_axis == 'plays':
|
||||
query = 'SELECT ' \
|
||||
'users.user_id, users.username, ' \
|
||||
'(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE users.friendly_name END) AS friendly_name,' \
|
||||
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" ' \
|
||||
' THEN users.username ELSE users.friendly_name END) AS friendly_name,' \
|
||||
'SUM(CASE WHEN media_type = "episode" THEN 1 ELSE 0 END) AS tv_count, ' \
|
||||
'SUM(CASE WHEN media_type = "movie" THEN 1 ELSE 0 END) AS movie_count, ' \
|
||||
'SUM(CASE WHEN media_type = "track" THEN 1 ELSE 0 END) AS music_count, ' \
|
||||
|
@ -479,7 +480,8 @@ class Graphs(object):
|
|||
else:
|
||||
query = 'SELECT ' \
|
||||
'users.user_id, users.username, ' \
|
||||
'(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE users.friendly_name END) AS friendly_name,' \
|
||||
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" ' \
|
||||
' THEN users.username ELSE users.friendly_name END) AS friendly_name,' \
|
||||
'SUM(CASE WHEN media_type = "episode" AND stopped > 0 THEN (stopped - started) ' \
|
||||
' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS tv_count, ' \
|
||||
'SUM(CASE WHEN media_type = "movie" AND stopped > 0 THEN (stopped - started) ' \
|
||||
|
@ -904,7 +906,8 @@ class Graphs(object):
|
|||
if y_axis == 'plays':
|
||||
query = 'SELECT ' \
|
||||
'users.user_id, users.username, ' \
|
||||
'(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE users.friendly_name END) AS friendly_name, ' \
|
||||
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" ' \
|
||||
' THEN users.username ELSE users.friendly_name END) AS friendly_name,' \
|
||||
'SUM(CASE WHEN session_history_media_info.transcode_decision = "direct play" ' \
|
||||
'THEN 1 ELSE 0 END) AS dp_count, ' \
|
||||
'SUM(CASE WHEN session_history_media_info.transcode_decision = "copy" ' \
|
||||
|
@ -925,7 +928,8 @@ class Graphs(object):
|
|||
else:
|
||||
query = 'SELECT ' \
|
||||
'users.user_id, users.username, ' \
|
||||
'(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE users.friendly_name END) AS friendly_name, ' \
|
||||
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" ' \
|
||||
' THEN users.username ELSE users.friendly_name END) AS friendly_name,' \
|
||||
'SUM(CASE WHEN session_history_media_info.transcode_decision = "direct play" ' \
|
||||
'AND session_history.stopped > 0 THEN (session_history.stopped - session_history.started) ' \
|
||||
' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS dp_count, ' \
|
||||
|
|
|
@ -16,11 +16,14 @@
|
|||
import base64
|
||||
import datetime
|
||||
from functools import wraps
|
||||
import geoip2.database, geoip2.errors
|
||||
import gzip
|
||||
import hashlib
|
||||
import imghdr
|
||||
from IPy import IP
|
||||
import json
|
||||
import math
|
||||
import maxminddb
|
||||
from operator import itemgetter
|
||||
import os
|
||||
import re
|
||||
|
@ -514,6 +517,118 @@ def get_ip(host):
|
|||
|
||||
return ip_address
|
||||
|
||||
def install_geoip_db():
|
||||
maxmind_url = 'http://geolite.maxmind.com/download/geoip/database/'
|
||||
geolite2_gz = 'GeoLite2-City.mmdb.gz'
|
||||
geolite2_md5 = 'GeoLite2-City.md5'
|
||||
geolite2_db = geolite2_gz[:-3]
|
||||
md5_checksum = ''
|
||||
|
||||
temp_gz = os.path.join(plexpy.CONFIG.CACHE_DIR, geolite2_gz)
|
||||
geolite2_db = plexpy.CONFIG.GEOIP_DB or os.path.join(plexpy.DATA_DIR, geolite2_db)
|
||||
|
||||
# Retrieve the GeoLite2 gzip file
|
||||
logger.debug(u"PlexPy Helpers :: Downloading GeoLite2 gzip file from MaxMind...")
|
||||
try:
|
||||
maxmind = urllib.URLopener()
|
||||
maxmind.retrieve(maxmind_url + geolite2_gz, temp_gz)
|
||||
md5_checksum = urllib2.urlopen(maxmind_url + geolite2_md5).read()
|
||||
except Exception as e:
|
||||
logger.error(u"PlexPy Helpers :: Failed to download GeoLite2 gzip file from MaxMind: %s" % e)
|
||||
return False
|
||||
|
||||
# Extract the GeoLite2 database file
|
||||
logger.debug(u"PlexPy Helpers :: Extracting GeoLite2 database...")
|
||||
try:
|
||||
with gzip.open(temp_gz, 'rb') as gz:
|
||||
with open(geolite2_db, 'wb') as db:
|
||||
db.write(gz.read())
|
||||
except Exception as e:
|
||||
logger.error(u"PlexPy Helpers :: Failed to extract the GeoLite2 database: %s" % e)
|
||||
return False
|
||||
|
||||
# Check MD5 hash for GeoLite2 database file
|
||||
logger.debug(u"PlexPy Helpers :: Checking MD5 checksum for GeoLite2 database...")
|
||||
try:
|
||||
hash_md5 = hashlib.md5()
|
||||
with open(geolite2_db, 'rb') as f:
|
||||
for chunk in iter(lambda: f.read(4096), b""):
|
||||
hash_md5.update(chunk)
|
||||
md5_hash = hash_md5.hexdigest()
|
||||
|
||||
if md5_hash != md5_checksum:
|
||||
logger.error(u"PlexPy Helpers :: MD5 checksum doesn't match for GeoLite2 database. "
|
||||
"Checksum: %s, file hash: %s" % (md5_checksum, md5_hash))
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(u"PlexPy Helpers :: Failed to generate MD5 checksum for GeoLite2 database: %s" % e)
|
||||
return False
|
||||
|
||||
# Delete temportary GeoLite2 gzip file
|
||||
logger.debug(u"PlexPy Helpers :: Deleting temporary GeoLite2 gzip file...")
|
||||
try:
|
||||
os.remove(temp_gz)
|
||||
except Exception as e:
|
||||
logger.warn(u"PlexPy Helpers :: Failed to remove temporary GeoLite2 gzip file: %s" % e)
|
||||
|
||||
logger.debug(u"PlexPy Helpers :: GeoLite2 database installed successfully.")
|
||||
plexpy.CONFIG.__setattr__('GEOIP_DB', geolite2_db)
|
||||
plexpy.CONFIG.write()
|
||||
|
||||
return True
|
||||
|
||||
def uninstall_geoip_db():
|
||||
logger.debug(u"PlexPy Helpers :: Uninstalling the GeoLite2 database...")
|
||||
try:
|
||||
os.remove(plexpy.CONFIG.GEOIP_DB)
|
||||
plexpy.CONFIG.__setattr__('GEOIP_DB', '')
|
||||
plexpy.CONFIG.write()
|
||||
except Exception as e:
|
||||
logger.error(u"PlexPy Helpers :: Failed to uninstall the GeoLite2 database: %s" % e)
|
||||
return False
|
||||
|
||||
logger.debug(u"PlexPy Helpers :: GeoLite2 database uninstalled successfully.")
|
||||
return True
|
||||
|
||||
def geoip_lookup(ip_address):
|
||||
if not plexpy.CONFIG.GEOIP_DB:
|
||||
return 'GeoLite2 database not installed. Please install from the ' \
|
||||
'<a href="settings?install_geoip=true">Settings</a> page.'
|
||||
|
||||
if not ip_address:
|
||||
return 'No IP address provided.'
|
||||
|
||||
try:
|
||||
reader = geoip2.database.Reader(plexpy.CONFIG.GEOIP_DB)
|
||||
geo = reader.city(ip_address)
|
||||
reader.close()
|
||||
except IOError as e:
|
||||
return 'Missing GeoLite2 database. Please reinstall from the ' \
|
||||
'<a href="settings?install_geoip=true">Settings</a> page.'
|
||||
except ValueError as e:
|
||||
return 'Unable to read GeoLite2 database. Please reinstall from the ' \
|
||||
'<a href="settings?reinstall_geoip=true">Settings</a> page.'
|
||||
except maxminddb.InvalidDatabaseError as e:
|
||||
return 'Invalid GeoLite2 database. Please reinstall from the ' \
|
||||
'<a href="settings?reinstall_geoip=true">Settings</a> page.'
|
||||
except geoip2.errors.AddressNotFoundError as e:
|
||||
return '%s' % e
|
||||
except Exception as e:
|
||||
return 'Error: %s' % e
|
||||
|
||||
geo_info = {'continent': geo.continent.name,
|
||||
'country': geo.country.name,
|
||||
'region': geo.subdivisions.most_specific.name,
|
||||
'city': geo.city.name,
|
||||
'postal_code': geo.postal.code,
|
||||
'timezone': geo.location.time_zone,
|
||||
'latitude': geo.location.latitude,
|
||||
'longitude': geo.location.longitude,
|
||||
'accuracy': geo.location.accuracy_radius
|
||||
}
|
||||
|
||||
return geo_info
|
||||
|
||||
# Taken from SickRage
|
||||
def anon_url(*url):
|
||||
"""
|
||||
|
|
|
@ -753,8 +753,9 @@ class Libraries(object):
|
|||
|
||||
try:
|
||||
if str(section_id).isdigit():
|
||||
query = 'SELECT (CASE WHEN users.friendly_name IS NULL THEN users.username ' \
|
||||
'ELSE users.friendly_name END) AS friendly_name, users.user_id, users.thumb, COUNT(user) AS user_count ' \
|
||||
query = 'SELECT (CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" ' \
|
||||
'THEN users.username ELSE users.friendly_name END) AS friendly_name, ' \
|
||||
'users.user_id, users.thumb, COUNT(user) AS user_count ' \
|
||||
'FROM session_history ' \
|
||||
'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \
|
||||
'JOIN users ON users.user_id = session_history.user_id ' \
|
||||
|
|
|
@ -90,14 +90,14 @@ class PublicIPFilter(logging.Filter):
|
|||
|
||||
try:
|
||||
# Currently only checking for ipv4 addresses
|
||||
ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}', record.msg)
|
||||
ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}(?!\d*-[a-z0-9]{6})', record.msg)
|
||||
for ip in ipv4:
|
||||
if helpers.is_ip_public(ip):
|
||||
record.msg = record.msg.replace(ip, ip.partition('.')[0] + '.***.***.***')
|
||||
|
||||
args = []
|
||||
for arg in record.args:
|
||||
ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}', arg) if isinstance(arg, basestring) else []
|
||||
ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}(?!\d*-[a-z0-9]{6})', arg) if isinstance(arg, basestring) else []
|
||||
for ip in ipv4:
|
||||
if helpers.is_ip_public(ip):
|
||||
arg = arg.replace(ip, ip.partition('.')[0] + '.***.***.***')
|
||||
|
|
|
@ -182,6 +182,46 @@ def notify(stream_data=None, notify_action=None):
|
|||
notify_strings=notify_strings,
|
||||
metadata=metadata)
|
||||
|
||||
elif agent['on_concurrent'] and notify_action == 'concurrent':
|
||||
# Build and send notification
|
||||
notify_strings, metadata = build_notify_text(session=stream_data,
|
||||
notify_action=notify_action,
|
||||
agent_id=agent['id'])
|
||||
|
||||
notifiers.send_notification(agent_id=agent['id'],
|
||||
subject=notify_strings[0],
|
||||
body=notify_strings[1],
|
||||
script_args=notify_strings[2],
|
||||
notify_action=notify_action,
|
||||
metadata=metadata)
|
||||
|
||||
# Set the notification state in the db
|
||||
set_notify_state(session=stream_data,
|
||||
notify_action=notify_action,
|
||||
agent_info=agent,
|
||||
notify_strings=notify_strings,
|
||||
metadata=metadata)
|
||||
|
||||
elif agent['on_newdevice'] and notify_action == 'newdevice':
|
||||
# Build and send notification
|
||||
notify_strings, metadata = build_notify_text(session=stream_data,
|
||||
notify_action=notify_action,
|
||||
agent_id=agent['id'])
|
||||
|
||||
notifiers.send_notification(agent_id=agent['id'],
|
||||
subject=notify_strings[0],
|
||||
body=notify_strings[1],
|
||||
script_args=notify_strings[2],
|
||||
notify_action=notify_action,
|
||||
metadata=metadata)
|
||||
|
||||
# Set the notification state in the db
|
||||
set_notify_state(session=stream_data,
|
||||
notify_action=notify_action,
|
||||
agent_info=agent,
|
||||
notify_strings=notify_strings,
|
||||
metadata=metadata)
|
||||
|
||||
elif (stream_data['media_type'] == 'track' and plexpy.CONFIG.MUSIC_NOTIFY_ENABLE):
|
||||
|
||||
for agent in notifiers.available_notification_agents():
|
||||
|
@ -285,6 +325,46 @@ def notify(stream_data=None, notify_action=None):
|
|||
notify_strings=notify_strings,
|
||||
metadata=metadata)
|
||||
|
||||
elif agent['on_concurrent'] and notify_action == 'concurrent':
|
||||
# Build and send notification
|
||||
notify_strings, metadata = build_notify_text(session=stream_data,
|
||||
notify_action=notify_action,
|
||||
agent_id=agent['id'])
|
||||
|
||||
notifiers.send_notification(agent_id=agent['id'],
|
||||
subject=notify_strings[0],
|
||||
body=notify_strings[1],
|
||||
script_args=notify_strings[2],
|
||||
notify_action=notify_action,
|
||||
metadata=metadata)
|
||||
|
||||
# Set the notification state in the db
|
||||
set_notify_state(session=stream_data,
|
||||
notify_action=notify_action,
|
||||
agent_info=agent,
|
||||
notify_strings=notify_strings,
|
||||
metadata=metadata)
|
||||
|
||||
elif agent['on_newdevice'] and notify_action == 'newdevice':
|
||||
# Build and send notification
|
||||
notify_strings, metadata = build_notify_text(session=stream_data,
|
||||
notify_action=notify_action,
|
||||
agent_id=agent['id'])
|
||||
|
||||
notifiers.send_notification(agent_id=agent['id'],
|
||||
subject=notify_strings[0],
|
||||
body=notify_strings[1],
|
||||
script_args=notify_strings[2],
|
||||
notify_action=notify_action,
|
||||
metadata=metadata)
|
||||
|
||||
# Set the notification state in the db
|
||||
set_notify_state(session=stream_data,
|
||||
notify_action=notify_action,
|
||||
agent_info=agent,
|
||||
notify_strings=notify_strings,
|
||||
metadata=metadata)
|
||||
|
||||
elif stream_data['media_type'] == 'clip':
|
||||
pass
|
||||
else:
|
||||
|
@ -485,7 +565,10 @@ def build_notify_text(session=None, timeline=None, notify_action=None, agent_id=
|
|||
pms_connect = pmsconnect.PmsConnect()
|
||||
metadata_list = pms_connect.get_metadata_details(rating_key=rating_key)
|
||||
|
||||
stream_count = pms_connect.get_current_activity().get('stream_count', '')
|
||||
current_activity = pms_connect.get_current_activity()
|
||||
sessions = current_activity.get('sessions', [])
|
||||
stream_count = current_activity.get('stream_count', '')
|
||||
user_stream_count = sum(1 for d in sessions if d['user_id'] == session['user_id']) if session else ''
|
||||
|
||||
if metadata_list:
|
||||
metadata = metadata_list['metadata']
|
||||
|
@ -525,6 +608,10 @@ def build_notify_text(session=None, timeline=None, notify_action=None, agent_id=
|
|||
on_watched_body = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_WATCHED_BODY_TEXT), agent_id)
|
||||
on_created_subject = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_CREATED_SUBJECT_TEXT), agent_id)
|
||||
on_created_body = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_CREATED_BODY_TEXT), agent_id)
|
||||
on_concurrent_subject = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_CONCURRENT_SUBJECT_TEXT), agent_id)
|
||||
on_concurrent_body = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_CONCURRENT_BODY_TEXT), agent_id)
|
||||
on_newdevice_subject = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_NEWDEVICE_SUBJECT_TEXT), agent_id)
|
||||
on_newdevice_body = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_NEWDEVICE_BODY_TEXT), agent_id)
|
||||
script_args_text = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_SCRIPTS_ARGS_TEXT), agent_id)
|
||||
else:
|
||||
on_start_subject = strip_tag(plexpy.CONFIG.NOTIFY_ON_START_SUBJECT_TEXT, agent_id)
|
||||
|
@ -541,6 +628,10 @@ def build_notify_text(session=None, timeline=None, notify_action=None, agent_id=
|
|||
on_watched_body = strip_tag(plexpy.CONFIG.NOTIFY_ON_WATCHED_BODY_TEXT, agent_id)
|
||||
on_created_subject = strip_tag(plexpy.CONFIG.NOTIFY_ON_CREATED_SUBJECT_TEXT, agent_id)
|
||||
on_created_body = strip_tag(plexpy.CONFIG.NOTIFY_ON_CREATED_BODY_TEXT, agent_id)
|
||||
on_concurrent_subject = strip_tag(plexpy.CONFIG.NOTIFY_ON_CONCURRENT_SUBJECT_TEXT, agent_id)
|
||||
on_concurrent_body = strip_tag(plexpy.CONFIG.NOTIFY_ON_CONCURRENT_BODY_TEXT, agent_id)
|
||||
on_newdevice_subject = strip_tag(plexpy.CONFIG.NOTIFY_ON_NEWDEVICE_SUBJECT_TEXT, agent_id)
|
||||
on_newdevice_body = strip_tag(plexpy.CONFIG.NOTIFY_ON_NEWDEVICE_BODY_TEXT, agent_id)
|
||||
script_args_text = strip_tag(plexpy.CONFIG.NOTIFY_SCRIPTS_ARGS_TEXT, agent_id)
|
||||
|
||||
# Create a title
|
||||
|
@ -624,7 +715,7 @@ def build_notify_text(session=None, timeline=None, notify_action=None, agent_id=
|
|||
else:
|
||||
thumb = None
|
||||
|
||||
if thumb:
|
||||
if plexpy.CONFIG.NOTIFY_UPLOAD_POSTERS and thumb:
|
||||
# Try to retrieve a poster_url from the database
|
||||
data_factory = datafactory.DataFactory()
|
||||
poster_url = data_factory.get_poster_url(rating_key=poster_key)
|
||||
|
@ -676,6 +767,7 @@ def build_notify_text(session=None, timeline=None, notify_action=None, agent_id=
|
|||
'timestamp': arrow.now().format(time_format),
|
||||
# Stream parameters
|
||||
'streams': stream_count,
|
||||
'user_streams': user_stream_count,
|
||||
'user': session.get('friendly_name',''),
|
||||
'username': session.get('user',''),
|
||||
'platform': session.get('platform',''),
|
||||
|
@ -940,6 +1032,52 @@ def build_notify_text(session=None, timeline=None, notify_action=None, agent_id=
|
|||
except:
|
||||
logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.")
|
||||
|
||||
return [subject_text, body_text, script_args], metadata
|
||||
else:
|
||||
return [subject_text, body_text, script_args], metadata
|
||||
elif notify_action == 'concurrent':
|
||||
# Default body text
|
||||
body_text = '%s has %s concurrent streams.' % (session['friendly_name'],
|
||||
user_stream_count)
|
||||
|
||||
if on_concurrent_subject and on_concurrent_body:
|
||||
try:
|
||||
subject_text = unicode(on_concurrent_subject).format(**available_params)
|
||||
except LookupError as e:
|
||||
logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification subject. Using fallback." % e)
|
||||
except:
|
||||
logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification subject. Using fallback.")
|
||||
|
||||
try:
|
||||
body_text = unicode(on_concurrent_body).format(**available_params)
|
||||
except LookupError as e:
|
||||
logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification body. Using fallback." % e)
|
||||
except:
|
||||
logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.")
|
||||
|
||||
return [subject_text, body_text, script_args], metadata
|
||||
else:
|
||||
return [subject_text, body_text, script_args], metadata
|
||||
elif notify_action == 'newdevice':
|
||||
# Default body text
|
||||
body_text = '%s is streaming from a new device: %s.' % (session['friendly_name'],
|
||||
session['player'])
|
||||
|
||||
if on_newdevice_subject and on_newdevice_body:
|
||||
try:
|
||||
subject_text = unicode(on_newdevice_subject).format(**available_params)
|
||||
except LookupError as e:
|
||||
logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification subject. Using fallback." % e)
|
||||
except:
|
||||
logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification subject. Using fallback.")
|
||||
|
||||
try:
|
||||
body_text = unicode(on_newdevice_body).format(**available_params)
|
||||
except LookupError as e:
|
||||
logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification body. Using fallback." % e)
|
||||
except:
|
||||
logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.")
|
||||
|
||||
return [subject_text, body_text, script_args], metadata
|
||||
else:
|
||||
return [subject_text, body_text, script_args], metadata
|
||||
|
@ -961,8 +1099,7 @@ def build_server_notify_text(notify_action=None, agent_id=None):
|
|||
|
||||
update_status = {}
|
||||
if notify_action == 'pmsupdate':
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
update_status = pms_connect.get_update_staus()
|
||||
update_status = plex_tv.get_plex_downloads()
|
||||
|
||||
if server_times:
|
||||
updated_at = server_times['updated_at']
|
||||
|
@ -995,7 +1132,16 @@ def build_server_notify_text(notify_action=None, agent_id=None):
|
|||
# Update parameters
|
||||
'update_version': update_status.get('version',''),
|
||||
'update_url': update_status.get('download_url',''),
|
||||
'update_changelog': update_status.get('changelog','')}
|
||||
'update_release_date': arrow.get(update_status.get('release_date','')).format(date_format)
|
||||
if update_status.get('release_date','') else '',
|
||||
'update_channel': 'Plex Pass' if plexpy.CONFIG.PMS_UPDATE_CHANNEL == 'plexpass' else 'Public',
|
||||
'update_platform': update_status.get('platform',''),
|
||||
'update_distro': update_status.get('distro',''),
|
||||
'update_distro_build': update_status.get('build',''),
|
||||
'update_requirements': update_status.get('requirements',''),
|
||||
'update_extra_info': update_status.get('extra_info',''),
|
||||
'update_changelog_added': update_status.get('changelog_added',''),
|
||||
'update_changelog_fixed': update_status.get('changelog_fixed','')}
|
||||
|
||||
# Default text
|
||||
subject_text = 'PlexPy (%s)' % server_name
|
||||
|
@ -1146,10 +1292,10 @@ def strip_tag(data, agent_id=None):
|
|||
elif agent_id == 13:
|
||||
# Allow tags b, i, code, pre, a[href] for Telegram
|
||||
whitelist = {'b': [],
|
||||
'i': [],
|
||||
'code': [],
|
||||
'pre': [],
|
||||
'a': ['href']}
|
||||
'i': [],
|
||||
'code': [],
|
||||
'pre': [],
|
||||
'a': ['href']}
|
||||
return bleach.clean(data, tags=whitelist.keys(), attributes=whitelist, strip=True)
|
||||
|
||||
else:
|
||||
|
|
|
@ -31,6 +31,7 @@ import urllib
|
|||
from urllib import urlencode
|
||||
import urllib2
|
||||
from urlparse import urlparse
|
||||
import uuid
|
||||
|
||||
import gntp.notifier
|
||||
import facebook
|
||||
|
@ -62,7 +63,8 @@ AGENT_IDS = {"Growl": 0,
|
|||
"Scripts": 15,
|
||||
"Facebook": 16,
|
||||
"Browser": 17,
|
||||
"Join": 18}
|
||||
"Join": 18,
|
||||
"Hipchat": 19}
|
||||
|
||||
|
||||
def available_notification_agents():
|
||||
|
@ -82,7 +84,9 @@ def available_notification_agents():
|
|||
'on_intdown': plexpy.CONFIG.GROWL_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.GROWL_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.GROWL_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.GROWL_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.GROWL_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.GROWL_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.GROWL_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Prowl',
|
||||
'id': AGENT_IDS['Prowl'],
|
||||
|
@ -100,7 +104,9 @@ def available_notification_agents():
|
|||
'on_intdown': plexpy.CONFIG.PROWL_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.PROWL_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.PROWL_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.PROWL_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.PROWL_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.PROWL_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.PROWL_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'XBMC',
|
||||
'id': AGENT_IDS['XBMC'],
|
||||
|
@ -118,7 +124,9 @@ def available_notification_agents():
|
|||
'on_intdown': plexpy.CONFIG.XBMC_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.XBMC_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.XBMC_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.XBMC_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.XBMC_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.XBMC_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.XBMC_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Plex Home Theater',
|
||||
'id': AGENT_IDS['Plex'],
|
||||
|
@ -136,7 +144,9 @@ def available_notification_agents():
|
|||
'on_intdown': plexpy.CONFIG.PLEX_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.PLEX_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.PLEX_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.PLEX_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.PLEX_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.PLEX_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.PLEX_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'NotifyMyAndroid',
|
||||
'id': AGENT_IDS['NMA'],
|
||||
|
@ -154,7 +164,9 @@ def available_notification_agents():
|
|||
'on_intdown': plexpy.CONFIG.NMA_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.NMA_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.NMA_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.NMA_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.NMA_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.NMA_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.NMA_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Pushalot',
|
||||
'id': AGENT_IDS['Pushalot'],
|
||||
|
@ -172,7 +184,9 @@ def available_notification_agents():
|
|||
'on_intdown': plexpy.CONFIG.PUSHALOT_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.PUSHALOT_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.PUSHALOT_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.PUSHALOT_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.PUSHALOT_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.PUSHALOT_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.PUSHALOT_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Pushbullet',
|
||||
'id': AGENT_IDS['Pushbullet'],
|
||||
|
@ -190,7 +204,9 @@ def available_notification_agents():
|
|||
'on_intdown': plexpy.CONFIG.PUSHBULLET_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.PUSHBULLET_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.PUSHBULLET_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.PUSHBULLET_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.PUSHBULLET_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.PUSHBULLET_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.PUSHBULLET_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Pushover',
|
||||
'id': AGENT_IDS['Pushover'],
|
||||
|
@ -208,7 +224,9 @@ def available_notification_agents():
|
|||
'on_intdown': plexpy.CONFIG.PUSHOVER_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.PUSHOVER_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.PUSHOVER_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.PUSHOVER_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.PUSHOVER_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.PUSHOVER_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.PUSHOVER_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Boxcar2',
|
||||
'id': AGENT_IDS['Boxcar2'],
|
||||
|
@ -226,7 +244,9 @@ def available_notification_agents():
|
|||
'on_intdown': plexpy.CONFIG.BOXCAR_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.BOXCAR_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.BOXCAR_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.BOXCAR_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.BOXCAR_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.BOXCAR_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.BOXCAR_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'E-mail',
|
||||
'id': AGENT_IDS['Email'],
|
||||
|
@ -244,7 +264,9 @@ def available_notification_agents():
|
|||
'on_intdown': plexpy.CONFIG.EMAIL_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.EMAIL_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.EMAIL_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.EMAIL_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.EMAIL_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.EMAIL_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.EMAIL_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Twitter',
|
||||
'id': AGENT_IDS['Twitter'],
|
||||
|
@ -262,7 +284,9 @@ def available_notification_agents():
|
|||
'on_intdown': plexpy.CONFIG.TWITTER_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.TWITTER_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.TWITTER_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.TWITTER_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.TWITTER_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.TWITTER_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.TWITTER_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'IFTTT',
|
||||
'id': AGENT_IDS['IFTTT'],
|
||||
|
@ -280,7 +304,9 @@ def available_notification_agents():
|
|||
'on_intdown': plexpy.CONFIG.IFTTT_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.IFTTT_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.IFTTT_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.IFTTT_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.IFTTT_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.IFTTT_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.IFTTT_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Telegram',
|
||||
'id': AGENT_IDS['Telegram'],
|
||||
|
@ -298,7 +324,9 @@ def available_notification_agents():
|
|||
'on_intdown': plexpy.CONFIG.TELEGRAM_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.TELEGRAM_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.TELEGRAM_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.TELEGRAM_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.TELEGRAM_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.TELEGRAM_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.TELEGRAM_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Slack',
|
||||
'id': AGENT_IDS['Slack'],
|
||||
|
@ -316,7 +344,9 @@ def available_notification_agents():
|
|||
'on_intdown': plexpy.CONFIG.SLACK_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.SLACK_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.SLACK_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.SLACK_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.SLACK_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.SLACK_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.SLACK_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Scripts',
|
||||
'id': AGENT_IDS['Scripts'],
|
||||
|
@ -334,7 +364,9 @@ def available_notification_agents():
|
|||
'on_extup': plexpy.CONFIG.SCRIPTS_ON_EXTUP,
|
||||
'on_intdown': plexpy.CONFIG.SCRIPTS_ON_INTDOWN,
|
||||
'on_intup': plexpy.CONFIG.SCRIPTS_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.SCRIPTS_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.SCRIPTS_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.SCRIPTS_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.SCRIPTS_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Facebook',
|
||||
'id': AGENT_IDS['Facebook'],
|
||||
|
@ -352,7 +384,9 @@ def available_notification_agents():
|
|||
'on_intdown': plexpy.CONFIG.FACEBOOK_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.FACEBOOK_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.FACEBOOK_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.FACEBOOK_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.FACEBOOK_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.FACEBOOK_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.FACEBOOK_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Browser',
|
||||
'id': AGENT_IDS['Browser'],
|
||||
|
@ -370,7 +404,9 @@ def available_notification_agents():
|
|||
'on_intdown': plexpy.CONFIG.BROWSER_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.BROWSER_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.BROWSER_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.BROWSER_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.BROWSER_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.BROWSER_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.BROWSER_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Join',
|
||||
'id': AGENT_IDS['Join'],
|
||||
|
@ -388,7 +424,29 @@ def available_notification_agents():
|
|||
'on_intdown': plexpy.CONFIG.JOIN_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.JOIN_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.JOIN_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.JOIN_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.JOIN_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.JOIN_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.JOIN_ON_NEWDEVICE
|
||||
},
|
||||
{'name': 'Hipchat',
|
||||
'id': AGENT_IDS['Hipchat'],
|
||||
'config_prefix': 'hipchat',
|
||||
'has_config': True,
|
||||
'state': checked(plexpy.CONFIG.HIPCHAT_ENABLED),
|
||||
'on_play': plexpy.CONFIG.HIPCHAT_ON_PLAY,
|
||||
'on_stop': plexpy.CONFIG.HIPCHAT_ON_STOP,
|
||||
'on_pause': plexpy.CONFIG.HIPCHAT_ON_PAUSE,
|
||||
'on_resume': plexpy.CONFIG.HIPCHAT_ON_RESUME,
|
||||
'on_buffer': plexpy.CONFIG.HIPCHAT_ON_BUFFER,
|
||||
'on_watched': plexpy.CONFIG.HIPCHAT_ON_WATCHED,
|
||||
'on_created': plexpy.CONFIG.HIPCHAT_ON_CREATED,
|
||||
'on_extdown': plexpy.CONFIG.HIPCHAT_ON_EXTDOWN,
|
||||
'on_intdown': plexpy.CONFIG.HIPCHAT_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.HIPCHAT_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.HIPCHAT_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.HIPCHAT_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.HIPCHAT_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.HIPCHAT_ON_NEWDEVICE
|
||||
}
|
||||
]
|
||||
|
||||
|
@ -411,7 +469,9 @@ def available_notification_agents():
|
|||
'on_intdown': plexpy.CONFIG.OSX_NOTIFY_ON_INTDOWN,
|
||||
'on_extup': plexpy.CONFIG.OSX_NOTIFY_ON_EXTUP,
|
||||
'on_intup': plexpy.CONFIG.OSX_NOTIFY_ON_INTUP,
|
||||
'on_pmsupdate': plexpy.CONFIG.OSX_NOTIFY_ON_PMSUPDATE
|
||||
'on_pmsupdate': plexpy.CONFIG.OSX_NOTIFY_ON_PMSUPDATE,
|
||||
'on_concurrent': plexpy.CONFIG.OSX_NOTIFY_ON_CONCURRENT,
|
||||
'on_newdevice': plexpy.CONFIG.OSX_NOTIFY_ON_NEWDEVICE
|
||||
})
|
||||
|
||||
return agents
|
||||
|
@ -478,6 +538,9 @@ def get_notification_agent_config(agent_id):
|
|||
elif agent_id == 18:
|
||||
join = JOIN()
|
||||
return join.return_config_options()
|
||||
elif agent_id == 19:
|
||||
hipchat = HIPCHAT()
|
||||
return hipchat.return_config_options()
|
||||
else:
|
||||
return []
|
||||
else:
|
||||
|
@ -545,11 +608,80 @@ def send_notification(agent_id, subject, body, notify_action, **kwargs):
|
|||
elif agent_id == 18:
|
||||
join = JOIN()
|
||||
return join.notify(message=body, subject=subject)
|
||||
elif agent_id == 19:
|
||||
hipchat = HIPCHAT()
|
||||
return hipchat.notify(message=body, subject=subject, **kwargs)
|
||||
else:
|
||||
logger.debug(u"PlexPy Notifiers :: Unknown agent id received.")
|
||||
else:
|
||||
logger.debug(u"PlexPy Notifiers :: Notification requested but no agent id received.")
|
||||
|
||||
class PrettyMetadata(object):
|
||||
def __init__(self, metadata):
|
||||
self.metadata = metadata
|
||||
self.media_type = metadata['media_type']
|
||||
|
||||
def get_poster_url(self):
|
||||
self.poster_url = self.metadata.get('poster_url','')
|
||||
if not self.poster_url:
|
||||
if self.metadata['media_type'] in ['artist', 'track']:
|
||||
self.poster_url = 'https://raw.githubusercontent.com/drzoidberg33/plexpy/master/data/interfaces/default/images/cover.png'
|
||||
else:
|
||||
self.poster_url = 'https://raw.githubusercontent.com/drzoidberg33/plexpy/master/data/interfaces/default/images/poster.png'
|
||||
return self.poster_url
|
||||
|
||||
def get_poster_link(self):
|
||||
self.poster_link = ''
|
||||
if self.metadata.get('thetvdb_url',''):
|
||||
self.poster_link = self.metadata.get('thetvdb_url', '')
|
||||
elif self.metadata.get('themoviedb_url',''):
|
||||
self.poster_link = self.metadata.get('themoviedb_url', '')
|
||||
elif self.metadata.get('imdb_url',''):
|
||||
self.poster_link = self.metadata.get('imdb_url', '')
|
||||
elif self.metadata.get('lastfm_url',''):
|
||||
self.poster_link = self.metadata.get('lastfm_url', '')
|
||||
return self.poster_link
|
||||
|
||||
def get_caption(self):
|
||||
self.caption = ''
|
||||
if self.metadata.get('thetvdb_url',''):
|
||||
self.caption = 'View on TheTVDB'
|
||||
elif self.metadata.get('themoviedb_url',''):
|
||||
self.caption = 'View on The Movie Database'
|
||||
elif self.metadata.get('imdb_url',''):
|
||||
self.caption = 'View on IMDB'
|
||||
elif self.metadata.get('lastfm_url',''):
|
||||
self.caption = 'View on Last.fm'
|
||||
return self.caption
|
||||
|
||||
def get_title(self, divider = '-'):
|
||||
self.title = None
|
||||
if self.media_type == 'movie':
|
||||
self.title = '%s (%s)' % (self.metadata['title'], self.metadata['year'])
|
||||
elif self.media_type == 'show':
|
||||
self.title = '%s (%s)' % (self.metadata['title'], self.metadata['year'])
|
||||
elif self.media_type == 'artist':
|
||||
self.title = self.metadata['title']
|
||||
elif self.media_type == 'track':
|
||||
self.title = '%s - %s' % (self.metadata['grandparent_title'], self.metadata['title'])
|
||||
elif self.media_type == 'episode':
|
||||
self.title = '%s - %s (S%s %s E%s)' % (self.metadata['grandparent_title'],
|
||||
self.metadata['title'],
|
||||
self.metadata['parent_media_index'],
|
||||
divider,
|
||||
self.metadata['media_index'])
|
||||
return self.title.encode("utf-8")
|
||||
|
||||
def get_subtitle(self):
|
||||
if self.media_type == 'track':
|
||||
self.subtitle = self.metadata['parent_title']
|
||||
else:
|
||||
self.subtitle = self.metadata['summary']
|
||||
return self.subtitle.encode("utf-8")
|
||||
|
||||
def get_plex_url(self):
|
||||
self.plex_url = self.metadata['plex_url']
|
||||
return self.plex_url
|
||||
|
||||
class GROWL(object):
|
||||
"""
|
||||
|
@ -1264,7 +1396,7 @@ class TwitterNotifier(object):
|
|||
poster_url = metadata.get('poster_url','')
|
||||
|
||||
if self.incl_subject:
|
||||
self._send_tweet(subject + ': ' + message, attachment=poster_url)
|
||||
self._send_tweet(subject + '\r\n' + message, attachment=poster_url)
|
||||
else:
|
||||
self._send_tweet(message, attachment=poster_url)
|
||||
|
||||
|
@ -1721,18 +1853,21 @@ class TELEGRAM(object):
|
|||
data = {'chat_id': self.chat_id}
|
||||
|
||||
if self.incl_subject:
|
||||
text = event.encode('utf-8') + ': ' + message.encode('utf-8')
|
||||
text = event.encode('utf-8') + '\r\n' + message.encode('utf-8')
|
||||
else:
|
||||
text = message.encode('utf-8')
|
||||
|
||||
if self.incl_poster and 'metadata' in kwargs:
|
||||
poster_data = {'chat_id': self.chat_id,
|
||||
'disable_notification': True}
|
||||
|
||||
metadata = kwargs['metadata']
|
||||
poster_url = metadata.get('poster_url','')
|
||||
|
||||
if poster_url:
|
||||
files = {'photo': (poster_url, urllib.urlopen(poster_url).read())}
|
||||
response = requests.post('https://api.telegram.org/bot%s/%s' % (self.bot_token, 'sendPhoto'),
|
||||
data=data,
|
||||
data=poster_data,
|
||||
files=files)
|
||||
request_status = response.status_code
|
||||
request_content = json.loads(response.text)
|
||||
|
@ -1840,7 +1975,7 @@ class SLACK(object):
|
|||
return
|
||||
|
||||
if self.incl_subject:
|
||||
text = event.encode('utf-8') + ': ' + message.encode("utf-8")
|
||||
text = event.encode('utf-8') + '\r\n' + message.encode("utf-8")
|
||||
else:
|
||||
text = message.encode("utf-8")
|
||||
|
||||
|
@ -1854,81 +1989,42 @@ class SLACK(object):
|
|||
data['icon_url'] = self.icon_emoji
|
||||
|
||||
if self.incl_poster and 'metadata' in kwargs:
|
||||
attachment = {}
|
||||
metadata = kwargs['metadata']
|
||||
poster_url = metadata.get('poster_url','')
|
||||
poster_link = ''
|
||||
caption = ''
|
||||
# Grab formatted metadata
|
||||
pretty_metadata = PrettyMetadata(kwargs['metadata'])
|
||||
poster_url = pretty_metadata.get_poster_url()
|
||||
plex_url = pretty_metadata.get_plex_url()
|
||||
poster_link = pretty_metadata.get_poster_link()
|
||||
caption = pretty_metadata.get_caption()
|
||||
title = pretty_metadata.get_title()
|
||||
subtitle = pretty_metadata.get_subtitle()
|
||||
|
||||
# Use default posters if no poster_url
|
||||
if not poster_url:
|
||||
if metadata['media_type'] in ['artist', 'track']:
|
||||
poster_url = 'https://raw.githubusercontent.com/drzoidberg33/plexpy/master/data/interfaces/default/images/cover.png'
|
||||
else:
|
||||
poster_url = 'https://raw.githubusercontent.com/drzoidberg33/plexpy/master/data/interfaces/default/images/poster.png'
|
||||
# Build Slack post attachment
|
||||
attachment = {'fallback': 'Image for %s' % title,
|
||||
'title': title,
|
||||
'text': subtitle,
|
||||
'image_url': poster_url,
|
||||
'thumb_url': poster_url
|
||||
}
|
||||
|
||||
if metadata['media_type'] == 'movie':
|
||||
title = '%s (%s)' % (metadata['title'], metadata['year'])
|
||||
if metadata.get('imdb_url',''):
|
||||
poster_link = metadata.get('imdb_url', '')
|
||||
caption = 'View on IMDB'
|
||||
elif metadata.get('themoviedb_url',''):
|
||||
poster_link = metadata.get('themoviedb_url', '')
|
||||
caption = 'View on The Movie Database'
|
||||
|
||||
elif metadata['media_type'] == 'show':
|
||||
title = '%s (%s)' % (metadata['title'], metadata['year'])
|
||||
if metadata.get('thetvdb_url',''):
|
||||
poster_link = metadata.get('thetvdb_url', '')
|
||||
caption = 'View on TheTVDB'
|
||||
elif metadata.get('themoviedb_url',''):
|
||||
poster_link = metadata.get('themoviedb_url', '')
|
||||
caption = 'View on The Movie Database'
|
||||
|
||||
elif metadata['media_type'] == 'episode':
|
||||
title = '%s - %s (S%s - E%s)' % (metadata['grandparent_title'],
|
||||
metadata['title'],
|
||||
metadata['parent_media_index'],
|
||||
metadata['media_index'])
|
||||
if metadata.get('thetvdb_url',''):
|
||||
poster_link = metadata.get('thetvdb_url', '')
|
||||
caption = 'View on TheTVDB'
|
||||
elif metadata.get('themoviedb_url',''):
|
||||
poster_link = metadata.get('themoviedb_url', '')
|
||||
caption = 'View on The Movie Database'
|
||||
|
||||
elif metadata['media_type'] == 'artist':
|
||||
title = metadata['title']
|
||||
if metadata.get('lastfm_url',''):
|
||||
poster_link = metadata.get('lastfm_url', '')
|
||||
caption = 'View on Last.fm'
|
||||
|
||||
elif metadata['media_type'] == 'track':
|
||||
title = '%s - %s' % (metadata['grandparent_title'], metadata['title'])
|
||||
if metadata.get('lastfm_url',''):
|
||||
poster_link = metadata.get('lastfm_url', '')
|
||||
caption = 'View on Last.fm'
|
||||
|
||||
# Build Facebook post attachment
|
||||
if self.incl_pmslink:
|
||||
caption = 'View on Plex Web'
|
||||
attachment['title_link'] = metadata['plex_url']
|
||||
attachment['text'] = caption
|
||||
elif poster_link:
|
||||
fields = []
|
||||
if poster_link:
|
||||
attachment['title_link'] = poster_link
|
||||
attachment['text'] = caption
|
||||
|
||||
attachment['fallback'] = 'Image for %s' % title
|
||||
attachment['title'] = title
|
||||
attachment['image_url'] = poster_url
|
||||
fields.append({'value': '<%s|%s>' % (poster_link, caption),
|
||||
'short': True})
|
||||
if self.incl_pmslink:
|
||||
fields.append({'value': '<%s|%s>' % (plex_url, 'View on Plex Web'),
|
||||
'short': True})
|
||||
if fields:
|
||||
attachment['fields'] = fields
|
||||
|
||||
data['attachments'] = [attachment]
|
||||
|
||||
url = urlparse(self.slack_hook).path
|
||||
slackhost = urlparse(self.slack_hook).hostname
|
||||
slackpath = urlparse(self.slack_hook).path
|
||||
|
||||
http_handler = HTTPSConnection("hooks.slack.com")
|
||||
http_handler = HTTPSConnection(slackhost)
|
||||
http_handler.request("POST",
|
||||
url,
|
||||
slackpath,
|
||||
headers={'Content-type': "application/x-www-form-urlencoded"},
|
||||
body=json.dumps(data))
|
||||
|
||||
|
@ -2090,6 +2186,12 @@ class Scripts(object):
|
|||
elif notify_action == 'pmsupdate':
|
||||
script = plexpy.CONFIG.SCRIPTS_ON_PMSUPDATE_SCRIPT
|
||||
|
||||
elif notify_action == 'concurrent':
|
||||
script = plexpy.CONFIG.SCRIPTS_ON_CONCURRENT_SCRIPT
|
||||
|
||||
elif notify_action == 'newdevice':
|
||||
script = plexpy.CONFIG.SCRIPTS_ON_NEWDEVICE_SCRIPT
|
||||
|
||||
else:
|
||||
# For manual scripts
|
||||
script = kwargs.get('script', '')
|
||||
|
@ -2266,6 +2368,20 @@ class Scripts(object):
|
|||
'description': 'Choose the script for Plex update available.',
|
||||
'input_type': 'select',
|
||||
'select_options': self.list_scripts()
|
||||
},
|
||||
{'label': 'User Concurrent Streams',
|
||||
'value': plexpy.CONFIG.SCRIPTS_ON_CONCURRENT_SCRIPT,
|
||||
'name': 'scripts_on_concurrent_script',
|
||||
'description': 'Choose the script for user concurrent streams.',
|
||||
'input_type': 'select',
|
||||
'select_options': self.list_scripts()
|
||||
},
|
||||
{'label': 'User New Device',
|
||||
'value': plexpy.CONFIG.SCRIPTS_ON_NEWDEVICE_SCRIPT,
|
||||
'name': 'scripts_on_newdevice_script',
|
||||
'description': 'Choose the script for user new device.',
|
||||
'input_type': 'select',
|
||||
'select_options': self.list_scripts()
|
||||
}
|
||||
]
|
||||
|
||||
|
@ -2291,71 +2407,19 @@ class FacebookNotifier(object):
|
|||
attachment = {}
|
||||
|
||||
if self.incl_poster and 'metadata' in kwargs:
|
||||
metadata = kwargs['metadata']
|
||||
poster_url = metadata.get('poster_url','')
|
||||
poster_link = ''
|
||||
caption = ''
|
||||
|
||||
# Use default posters if no poster_url
|
||||
if not poster_url:
|
||||
if metadata['media_type'] in ['artist', 'track']:
|
||||
poster_url = 'https://raw.githubusercontent.com/drzoidberg33/plexpy/master/data/interfaces/default/images/cover.png'
|
||||
else:
|
||||
poster_url = 'https://raw.githubusercontent.com/drzoidberg33/plexpy/master/data/interfaces/default/images/poster.png'
|
||||
|
||||
if metadata['media_type'] == 'movie':
|
||||
title = '%s (%s)' % (metadata['title'], metadata['year'])
|
||||
subtitle = metadata['summary']
|
||||
if metadata.get('imdb_url',''):
|
||||
poster_link = metadata.get('imdb_url', '')
|
||||
caption = 'View on IMDB'
|
||||
elif metadata.get('themoviedb_url',''):
|
||||
poster_link = metadata.get('themoviedb_url', '')
|
||||
caption = 'View on The Movie Database'
|
||||
|
||||
elif metadata['media_type'] == 'show':
|
||||
title = '%s (%s)' % (metadata['title'], metadata['year'])
|
||||
subtitle = metadata['summary']
|
||||
if metadata.get('thetvdb_url',''):
|
||||
poster_link = metadata.get('thetvdb_url', '')
|
||||
caption = 'View on TheTVDB'
|
||||
elif metadata.get('themoviedb_url',''):
|
||||
poster_link = metadata.get('themoviedb_url', '')
|
||||
caption = 'View on The Movie Database'
|
||||
|
||||
elif metadata['media_type'] == 'episode':
|
||||
title = '%s - %s (S%s %s E%s)' % (metadata['grandparent_title'],
|
||||
metadata['title'],
|
||||
metadata['parent_media_index'],
|
||||
'\xc2\xb7'.decode('utf8'),
|
||||
metadata['media_index'])
|
||||
subtitle = metadata['summary']
|
||||
if metadata.get('thetvdb_url',''):
|
||||
poster_link = metadata.get('thetvdb_url', '')
|
||||
caption = 'View on TheTVDB'
|
||||
elif metadata.get('themoviedb_url',''):
|
||||
poster_link = metadata.get('themoviedb_url', '')
|
||||
caption = 'View on The Movie Database'
|
||||
|
||||
elif metadata['media_type'] == 'artist':
|
||||
title = metadata['title']
|
||||
subtitle = metadata['summary']
|
||||
if metadata.get('lastfm_url',''):
|
||||
poster_link = metadata.get('lastfm_url', '')
|
||||
caption = 'View on Last.fm'
|
||||
|
||||
elif metadata['media_type'] == 'track':
|
||||
title = '%s - %s' % (metadata['grandparent_title'], metadata['title'])
|
||||
subtitle = metadata['parent_title']
|
||||
if metadata.get('lastfm_url',''):
|
||||
poster_link = metadata.get('lastfm_url', '')
|
||||
caption = 'View on Last.fm'
|
||||
# Grab formatted metadata
|
||||
pretty_metadata = PrettyMetadata(kwargs['metadata'])
|
||||
poster_url = pretty_metadata.get_poster_url()
|
||||
plex_url = pretty_metadata.get_plex_url()
|
||||
poster_link = pretty_metadata.get_poster_link()
|
||||
caption = pretty_metadata.get_caption()
|
||||
title = pretty_metadata.get_title('\xc2\xb7'.decode('utf8'))
|
||||
subtitle = pretty_metadata.get_subtitle()
|
||||
|
||||
# Build Facebook post attachment
|
||||
if self.incl_pmslink:
|
||||
caption = 'View on Plex Web'
|
||||
attachment['link'] = metadata['plex_url']
|
||||
attachment['caption'] = caption
|
||||
attachment['link'] = plex_url
|
||||
attachment['caption'] = 'View on Plex Web'
|
||||
elif poster_link:
|
||||
attachment['link'] = poster_link
|
||||
attachment['caption'] = caption
|
||||
|
@ -2367,7 +2431,7 @@ class FacebookNotifier(object):
|
|||
attachment['description'] = subtitle
|
||||
|
||||
if self.incl_subject:
|
||||
self._post_facebook(subject + ': ' + message, attachment=attachment)
|
||||
self._post_facebook(subject + '\r\n' + message, attachment=attachment)
|
||||
else:
|
||||
self._post_facebook(message, attachment=attachment)
|
||||
|
||||
|
@ -2425,13 +2489,14 @@ class FacebookNotifier(object):
|
|||
config_option = [{'label': 'Instructions',
|
||||
'description': 'Step 1: Visit <a href="' + helpers.anon_url('https://developers.facebook.com/apps') + '" target="_blank"> \
|
||||
Facebook Developers</a> to add a new app using <strong>basic setup</strong>.<br>\
|
||||
Step 2: Go to <strong>Settings > Advanced</strong> and fill in \
|
||||
<strong>Valid OAuth redirect URIs</strong> with your PlexPy URL (e.g. http://localhost:8181).<br>\
|
||||
Step 3: Go to <strong>App Review</strong> and toggle public to <strong>Yes</strong>.<br>\
|
||||
Step 4: Fill in the <strong>PlexPy URL</strong> below with the exact same URL from Step 3.<br>\
|
||||
Step 5: Fill in the <strong>App ID</strong> and <strong>App Secret</strong> below.<br>\
|
||||
Step 6: Click the <strong>Request Authorization</strong> button below.<br>\
|
||||
Step 7: Fill in your <strong>Group ID</strong> below.',
|
||||
Step 2: Click <strong>Add Product</strong> on the left, then <strong>Get Started</strong> \
|
||||
for <strong>Facebook Login</strong>.<br>\
|
||||
Step 3: Fill in <strong>Valid OAuth redirect URIs</strong> with your PlexPy URL (e.g. http://localhost:8181).<br>\
|
||||
Step 4: Click <strong>App Review</strong> on the left and toggle "make public" to <strong>Yes</strong>.<br>\
|
||||
Step 5: Fill in the <strong>PlexPy URL</strong> below with the exact same URL from Step 3.<br>\
|
||||
Step 6: Fill in the <strong>App ID</strong> and <strong>App Secret</strong> below.<br>\
|
||||
Step 7: Click the <strong>Request Authorization</strong> button below.<br>\
|
||||
Step 8: Fill in your <strong>Group ID</strong> below.',
|
||||
'input_type': 'help'
|
||||
},
|
||||
{'label': 'PlexPy URL',
|
||||
|
@ -2554,6 +2619,7 @@ class JOIN(object):
|
|||
def __init__(self):
|
||||
self.apikey = plexpy.CONFIG.JOIN_APIKEY
|
||||
self.deviceid = plexpy.CONFIG.JOIN_DEVICEID
|
||||
self.incl_subject = plexpy.CONFIG.JOIN_INCL_SUBJECT
|
||||
|
||||
def conf(self, options):
|
||||
return cherrypy.config['config'].get('PUSHBULLET', options)
|
||||
|
@ -2566,9 +2632,11 @@ class JOIN(object):
|
|||
|
||||
data = {'apikey': self.apikey,
|
||||
deviceid_key: self.deviceid,
|
||||
'title': subject.encode("utf-8"),
|
||||
'text': message.encode("utf-8")}
|
||||
|
||||
if self.incl_subject:
|
||||
data['title'] = subject.encode("utf-8")
|
||||
|
||||
response = requests.post('https://joinjoaomgcd.appspot.com/_ah/api/messaging/v1/sendPush',
|
||||
params=data)
|
||||
request_status = response.status_code
|
||||
|
@ -2649,7 +2717,160 @@ class JOIN(object):
|
|||
{'label': 'Your Devices IDs',
|
||||
'description': devices,
|
||||
'input_type': 'help'
|
||||
},
|
||||
{'label': 'Include Subject Line',
|
||||
'value': self.incl_subject,
|
||||
'name': 'join_incl_subject',
|
||||
'description': 'Include the subject line with the notifications.',
|
||||
'input_type': 'checkbox'
|
||||
}
|
||||
]
|
||||
|
||||
return config_option
|
||||
|
||||
class HIPCHAT(object):
|
||||
|
||||
def __init__(self):
|
||||
self.apiurl = plexpy.CONFIG.HIPCHAT_URL
|
||||
self.color = plexpy.CONFIG.HIPCHAT_COLOR
|
||||
self.emoticon = plexpy.CONFIG.HIPCHAT_EMOTICON
|
||||
self.incl_pmslink = plexpy.CONFIG.HIPCHAT_INCL_PMSLINK
|
||||
self.incl_poster = plexpy.CONFIG.HIPCHAT_INCL_POSTER
|
||||
self.incl_subject = plexpy.CONFIG.HIPCHAT_INCL_SUBJECT
|
||||
|
||||
def notify(self, message, subject, **kwargs):
|
||||
if not message or not subject:
|
||||
return
|
||||
|
||||
data = {'notify': 'false'}
|
||||
|
||||
text = message.encode('utf-8')
|
||||
|
||||
if self.incl_subject:
|
||||
data['from'] = subject.encode('utf-8')
|
||||
|
||||
if self.color:
|
||||
data['color'] = self.color
|
||||
|
||||
if self.incl_poster and 'metadata' in kwargs:
|
||||
pretty_metadata = PrettyMetadata(kwargs['metadata'])
|
||||
poster_url = pretty_metadata.get_poster_url()
|
||||
poster_link = pretty_metadata.get_poster_link()
|
||||
caption = pretty_metadata.get_caption()
|
||||
title = pretty_metadata.get_title()
|
||||
subtitle = pretty_metadata.get_subtitle()
|
||||
plex_url = pretty_metadata.get_plex_url()
|
||||
|
||||
card = {'title': title,
|
||||
'format': 'medium',
|
||||
'style': 'application',
|
||||
'id': uuid.uuid4().hex,
|
||||
'activity': {'html': text,
|
||||
'icon': {'url': poster_url}},
|
||||
'description': {'format': 'text',
|
||||
'value': subtitle},
|
||||
'thumbnail': {'url': poster_url}
|
||||
}
|
||||
|
||||
attributes = []
|
||||
if poster_link:
|
||||
card['url'] = poster_link
|
||||
attributes.append({'value': {'label': caption,
|
||||
'url': poster_link}})
|
||||
if self.incl_pmslink:
|
||||
attributes.append({'value': {'label': 'View on Plex Web',
|
||||
'url': plex_url}})
|
||||
if attributes:
|
||||
card['attributes'] = attributes
|
||||
|
||||
data['message'] = text
|
||||
data['card'] = card
|
||||
|
||||
else:
|
||||
if self.emoticon:
|
||||
text = self.emoticon + ' ' + text
|
||||
data['message'] = text
|
||||
data['message_format'] = 'text'
|
||||
|
||||
hiphost = urlparse(self.apiurl).hostname
|
||||
hipfullq = urlparse(self.apiurl).path + '?' + urlparse(self.apiurl).query
|
||||
|
||||
http_handler = HTTPSConnection(hiphost)
|
||||
http_handler.request("POST",
|
||||
hipfullq,
|
||||
headers={'Content-type': "application/json"},
|
||||
body=json.dumps(data))
|
||||
response = http_handler.getresponse()
|
||||
request_status = response.status
|
||||
|
||||
if request_status == 200 or request_status == 204:
|
||||
logger.info(u"PlexPy Notifiers :: Hipchat notification sent.")
|
||||
return True
|
||||
elif request_status >= 400 and request_status < 500:
|
||||
logger.warn(u"PlexPy Notifiers :: Hipchat notification failed: [%s] %s" % (request_status, response.reason))
|
||||
return False
|
||||
else:
|
||||
logger.warn(u"PlexPy Notifiers :: Hipchat notification failed.")
|
||||
return False
|
||||
|
||||
def test(self, apiurl, color, hipchat_emoticon, hipchat_incl_subject):
|
||||
|
||||
self.enabled = True
|
||||
self.apiurl = apiurl
|
||||
self.color = color
|
||||
self.emoticon = hipchat_emoticon
|
||||
self.incl_subject = hipchat_incl_subject
|
||||
|
||||
return self.notify('PlexPy', 'Test Message')
|
||||
|
||||
def return_config_options(self):
|
||||
config_option = [{'label': 'Hipchat Custom Integrations Full URL',
|
||||
'value': self.apiurl,
|
||||
'name': 'hipchat_url',
|
||||
'description': 'Your Hipchat BYO integration URL. You can get a key from'
|
||||
' <a href="' + helpers.anon_url('https://www.hipchat.com/addons/') + '" target="_blank">here</a>.',
|
||||
'input_type': 'text'
|
||||
},
|
||||
{'label': 'Hipchat Color',
|
||||
'value': self.color,
|
||||
'name': 'hipchat_color',
|
||||
'description': 'Background color for the message.',
|
||||
'input_type': 'select',
|
||||
'select_options': {'': '',
|
||||
'gray': 'gray',
|
||||
'green': 'green',
|
||||
'purple': 'purple',
|
||||
'random': 'random',
|
||||
'red': 'red',
|
||||
'yellow': 'yellow'
|
||||
}
|
||||
},
|
||||
{'label': 'Hipchat Emoticon',
|
||||
'value': self.emoticon,
|
||||
'name': 'hipchat_emoticon',
|
||||
'description': 'Include an emoticon tag at the beginning of text notifications (e.g. (taco)). Leave blank for none.'
|
||||
' Use a stock emoticon or create a custom emoticon'
|
||||
' <a href="' + helpers.anon_url('https://www.hipchat.com/emoticons/') + '" target="_blank">here</a>.',
|
||||
'input_type': 'text'
|
||||
},
|
||||
{'label': 'Include Poster',
|
||||
'value': self.incl_poster,
|
||||
'name': 'hipchat_incl_poster',
|
||||
'description': 'Include a poster in the notifications.<br>This will change the notification type to HTML and emoticons will no longer work.',
|
||||
'input_type': 'checkbox'
|
||||
},
|
||||
{'label': 'Include Link to Plex Web',
|
||||
'value': self.incl_pmslink,
|
||||
'name': 'hipchat_incl_pmslink',
|
||||
'description': 'Include a link to the media in Plex Web with the notifications.',
|
||||
'input_type': 'checkbox'
|
||||
},
|
||||
{'label': 'Include Subject Line',
|
||||
'value': self.incl_subject,
|
||||
'name': 'hipchat_incl_subject',
|
||||
'description': 'Includes the subject with the notifications.',
|
||||
'input_type': 'checkbox'
|
||||
}
|
||||
]
|
||||
|
||||
return config_option
|
149
plexpy/plextv.py
149
plexpy/plextv.py
|
@ -17,6 +17,7 @@
|
|||
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import base64
|
||||
import json
|
||||
from xml.dom import minidom
|
||||
|
||||
import plexpy
|
||||
|
@ -95,25 +96,31 @@ def get_real_pms_url():
|
|||
|
||||
fallback_url = 'http://' + plexpy.CONFIG.PMS_IP + ':' + str(plexpy.CONFIG.PMS_PORT)
|
||||
|
||||
if plexpy.CONFIG.PMS_SSL:
|
||||
result = PlexTV().get_server_urls(include_https=True)
|
||||
else:
|
||||
result = PlexTV().get_server_urls(include_https=False)
|
||||
plex_tv = PlexTV()
|
||||
result = plex_tv.get_server_urls(include_https=plexpy.CONFIG.PMS_SSL)
|
||||
plexpass = plex_tv.get_plexpass_status()
|
||||
|
||||
connections = []
|
||||
if result:
|
||||
plexpy.CONFIG.__setattr__('PMS_VERSION', result['version'])
|
||||
plexpy.CONFIG.__setattr__('PMS_PLATFORM', result['platform'])
|
||||
plexpy.CONFIG.__setattr__('PMS_PLEXPASS', plexpass)
|
||||
connections = result['connections']
|
||||
|
||||
# Only need to retrieve PMS_URL if using SSL
|
||||
if plexpy.CONFIG.PMS_SSL:
|
||||
if result:
|
||||
if connections:
|
||||
if plexpy.CONFIG.PMS_IS_REMOTE:
|
||||
# Get all remote connections
|
||||
connections = [c for c in result if c['local'] == '0' and 'plex.direct' in c['uri']]
|
||||
conns = [c for c in connections if c['local'] == '0' and 'plex.direct' in c['uri']]
|
||||
else:
|
||||
# Get all local connections
|
||||
connections = [c for c in result if c['local'] == '1' and 'plex.direct' in c['uri']]
|
||||
conns = [c for c in connections if c['local'] == '1' and 'plex.direct' in c['uri']]
|
||||
|
||||
if connections:
|
||||
if conns:
|
||||
# Get connection with matching address, otherwise return first connection
|
||||
conn = next((c for c in connections if c['address'] == plexpy.CONFIG.PMS_IP
|
||||
and c['port'] == str(plexpy.CONFIG.PMS_PORT)), connections[0])
|
||||
conn = next((c for c in conns if c['address'] == plexpy.CONFIG.PMS_IP
|
||||
and c['port'] == str(plexpy.CONFIG.PMS_PORT)), conns[0])
|
||||
plexpy.CONFIG.__setattr__('PMS_URL', conn['uri'])
|
||||
plexpy.CONFIG.write()
|
||||
logger.info(u"PlexPy PlexTV :: Server URL retrieved.")
|
||||
|
@ -273,6 +280,18 @@ class PlexTV(object):
|
|||
|
||||
return request
|
||||
|
||||
def get_plextv_downloads(self, plexpass=False, output_format=''):
|
||||
if plexpass:
|
||||
uri = '/api/downloads/1.json?channel=plexpass'
|
||||
else:
|
||||
uri = '/api/downloads/1.json'
|
||||
request = self.request_handler.make_request(uri=uri,
|
||||
proto=self.protocol,
|
||||
request_type='GET',
|
||||
output_format=output_format)
|
||||
|
||||
return request
|
||||
|
||||
def get_full_users_list(self):
|
||||
friends_list = self.get_plextv_friends()
|
||||
own_account = self.get_plextv_user_details()
|
||||
|
@ -454,7 +473,7 @@ class PlexTV(object):
|
|||
server_id = plexpy.CONFIG.PMS_IDENTIFIER
|
||||
else:
|
||||
logger.error(u"PlexPy PlexTV :: Unable to retrieve server identity.")
|
||||
return []
|
||||
return {}
|
||||
|
||||
plextv_resources = self.get_plextv_resources(include_https=include_https)
|
||||
|
||||
|
@ -462,22 +481,26 @@ class PlexTV(object):
|
|||
xml_parse = minidom.parseString(plextv_resources)
|
||||
except Exception as e:
|
||||
logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_server_urls: %s" % e)
|
||||
return []
|
||||
return {}
|
||||
except:
|
||||
logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_server_urls.")
|
||||
return []
|
||||
return {}
|
||||
|
||||
try:
|
||||
xml_head = xml_parse.getElementsByTagName('Device')
|
||||
except Exception as e:
|
||||
logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_server_urls: %s." % e)
|
||||
return []
|
||||
return {}
|
||||
|
||||
# Function to get all connections for a device
|
||||
def get_connections(device):
|
||||
conn = []
|
||||
connections = device.getElementsByTagName('Connection')
|
||||
|
||||
server = {"platform": helpers.get_xml_attr(device, 'platform'),
|
||||
"version": helpers.get_xml_attr(device, 'productVersion')
|
||||
}
|
||||
|
||||
for c in connections:
|
||||
server_details = {"protocol": helpers.get_xml_attr(c, 'protocol'),
|
||||
"address": helpers.get_xml_attr(c, 'address'),
|
||||
|
@ -487,18 +510,19 @@ class PlexTV(object):
|
|||
}
|
||||
conn.append(server_details)
|
||||
|
||||
return conn
|
||||
server['connections'] = conn
|
||||
return server
|
||||
|
||||
server_urls = []
|
||||
server = {}
|
||||
|
||||
# Try to match the device
|
||||
for a in xml_head:
|
||||
if helpers.get_xml_attr(a, 'clientIdentifier') == server_id:
|
||||
server_urls = get_connections(a)
|
||||
server = get_connections(a)
|
||||
break
|
||||
|
||||
# Else no device match found
|
||||
if not server_urls:
|
||||
if not server:
|
||||
# Try to match the PMS_IP and PMS_PORT
|
||||
for a in xml_head:
|
||||
if helpers.get_xml_attr(a, 'provides') == 'server':
|
||||
|
@ -511,16 +535,16 @@ class PlexTV(object):
|
|||
plexpy.CONFIG.PMS_IDENTIFIER = helpers.get_xml_attr(a, 'clientIdentifier')
|
||||
plexpy.CONFIG.write()
|
||||
|
||||
logger.info(u"PlexPy PlexTV :: PMS identifier changed from %s to %s." % \
|
||||
(server_id, plexpy.CONFIG.PMS_IDENTIFIER))
|
||||
logger.info(u"PlexPy PlexTV :: PMS identifier changed from %s to %s."
|
||||
% (server_id, plexpy.CONFIG.PMS_IDENTIFIER))
|
||||
|
||||
server_urls = get_connections(a)
|
||||
server = get_connections(a)
|
||||
break
|
||||
|
||||
if server_urls:
|
||||
if server.get('connections'):
|
||||
break
|
||||
|
||||
return server_urls
|
||||
return server
|
||||
|
||||
def get_server_times(self):
|
||||
servers = self.get_plextv_server_list(output_format='xml')
|
||||
|
@ -588,4 +612,81 @@ class PlexTV(object):
|
|||
}
|
||||
clean_servers.append(server)
|
||||
|
||||
return clean_servers
|
||||
return clean_servers
|
||||
|
||||
def get_plex_downloads(self):
|
||||
logger.debug(u"PlexPy PlexTV :: Plex update channel is %s." % plexpy.CONFIG.PMS_UPDATE_CHANNEL)
|
||||
plex_downloads = self.get_plextv_downloads(plexpass=(plexpy.CONFIG.PMS_UPDATE_CHANNEL == 'plexpass'))
|
||||
|
||||
try:
|
||||
available_downloads = json.loads(plex_downloads)
|
||||
except Exception as e:
|
||||
logger.warn(u"PlexPy PlexTV :: Unable to load JSON for get_plex_updates.")
|
||||
return {}
|
||||
|
||||
# Get the updates for the platform
|
||||
platform_downloads = available_downloads.get('computer').get(plexpy.CONFIG.PMS_PLATFORM) or \
|
||||
available_downloads.get('nas').get(plexpy.CONFIG.PMS_PLATFORM)
|
||||
|
||||
if not platform_downloads:
|
||||
logger.error(u"PlexPy PlexTV :: Unable to retrieve Plex updates: Could not match server platform: %s."
|
||||
% plexpy.CONFIG.PMS_PLATFORM)
|
||||
return {}
|
||||
|
||||
v_old = plexpy.CONFIG.PMS_VERSION.split('-')[0].split('.')
|
||||
v_new = platform_downloads.get('version', '').split('-')[0].split('.')
|
||||
|
||||
if len(v_old) < 4:
|
||||
logger.error(u"PlexPy PlexTV :: Unable to retrieve Plex updates: Invalid current server version: %s."
|
||||
% plexpy.CONFIG.PMS_VERSION)
|
||||
return {}
|
||||
if len(v_new) < 4:
|
||||
logger.error(u"PlexPy PlexTV :: Unable to retrieve Plex updates: Invalid new server version: %s."
|
||||
% platform_downloads.get('version'))
|
||||
return {}
|
||||
|
||||
# Compare versions
|
||||
if v_new[0] > v_old[0] or \
|
||||
v_new[0] == v_old[0] and v_new[1] > v_old[1] or \
|
||||
v_new[0] == v_old[0] and v_new[1] == v_old[1] and v_new[2] > v_old[2] or \
|
||||
v_new[0] == v_old[0] and v_new[1] == v_old[1] and v_new[2] == v_old[2] and v_new[3] > v_old[3]:
|
||||
update_available = True
|
||||
else:
|
||||
update_available = False
|
||||
|
||||
# Get proper download
|
||||
releases = platform_downloads.get('releases', [{}])
|
||||
release = next((r for r in releases if r['build'] == plexpy.CONFIG.PMS_UPDATE_DISTRO_BUILD), releases[0])
|
||||
|
||||
download_info = {'update_available': update_available,
|
||||
'platform': platform_downloads.get('name'),
|
||||
'release_date': platform_downloads.get('release_date'),
|
||||
'version': platform_downloads.get('version'),
|
||||
'requirements': platform_downloads.get('requirements'),
|
||||
'extra_info': platform_downloads.get('extra_info'),
|
||||
'changelog_added': platform_downloads.get('items_added'),
|
||||
'changelog_fixed': platform_downloads.get('items_fixed'),
|
||||
'label': release.get('label'),
|
||||
'distro': release.get('distro'),
|
||||
'distro_build': release.get('build'),
|
||||
'download_url': release.get('url'),
|
||||
}
|
||||
|
||||
return download_info
|
||||
|
||||
def get_plexpass_status(self):
|
||||
account_data = self.get_plextv_user_details(output_format='xml')
|
||||
|
||||
try:
|
||||
subscription = account_data.getElementsByTagName('subscription')
|
||||
except Exception as e:
|
||||
logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_plexpass_status: %s." % e)
|
||||
return False
|
||||
|
||||
if subscription and helpers.get_xml_attr(subscription[0], 'active') == '1':
|
||||
return True
|
||||
else:
|
||||
logger.debug(u"PlexPy PlexTV :: Plex Pass subscription not found.")
|
||||
plexpy.CONFIG.__setattr__('PMS_PLEXPASS', 0)
|
||||
plexpy.CONFIG.write()
|
||||
return False
|
|
@ -181,8 +181,8 @@ class Users(object):
|
|||
'session_history_media_info.transcode_decision',
|
||||
'session_history.user',
|
||||
'session_history.user_id as custom_user_id',
|
||||
'(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE \
|
||||
users.friendly_name END) AS friendly_name'
|
||||
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \
|
||||
THEN users.username ELSE users.friendly_name END) AS friendly_name'
|
||||
]
|
||||
|
||||
try:
|
||||
|
@ -717,8 +717,8 @@ class Users(object):
|
|||
'user_login.host',
|
||||
'user_login.user_agent',
|
||||
'user_login.timestamp',
|
||||
'(CASE WHEN users.friendly_name IS NULL THEN user_login.user ELSE users.friendly_name END) \
|
||||
AS friendly_name'
|
||||
'(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \
|
||||
THEN users.username ELSE users.friendly_name END) AS friendly_name'
|
||||
]
|
||||
|
||||
try:
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
PLEXPY_VERSION = "master"
|
||||
PLEXPY_RELEASE_VERSION = "1.4.6"
|
||||
PLEXPY_RELEASE_VERSION = "1.4.7"
|
||||
|
|
|
@ -269,7 +269,7 @@ class WebInterface(object):
|
|||
else:
|
||||
if s['video_decision'] == 'transcode' or s['audio_decision'] == 'transcode':
|
||||
data['transcode'] += 1
|
||||
elif s['video_decision'] == 'direct copy' or s['audio_decision'] == 'copy play':
|
||||
elif s['video_decision'] == 'copy' or s['audio_decision'] == 'copy':
|
||||
data['direct_stream'] += 1
|
||||
else:
|
||||
data['direct_play'] += 1
|
||||
|
@ -2491,7 +2491,7 @@ class WebInterface(object):
|
|||
|
||||
@cherrypy.expose
|
||||
@requireAuth(member_of("admin"))
|
||||
def settings(self):
|
||||
def settings(self, **kwargs):
|
||||
interface_dir = os.path.join(plexpy.PROG_DIR, 'data/interfaces/')
|
||||
interface_list = [name for name in os.listdir(interface_dir) if
|
||||
os.path.isdir(os.path.join(interface_dir, name))]
|
||||
|
@ -2569,6 +2569,8 @@ class WebInterface(object):
|
|||
"notify_recently_added": checked(plexpy.CONFIG.NOTIFY_RECENTLY_ADDED),
|
||||
"notify_recently_added_grandparent": checked(plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_GRANDPARENT),
|
||||
"notify_recently_added_delay": plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY,
|
||||
"notify_concurrent_by_ip": plexpy.CONFIG.NOTIFY_CONCURRENT_BY_IP,
|
||||
"notify_concurrent_threshold": plexpy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD,
|
||||
"notify_watched_percent": plexpy.CONFIG.NOTIFY_WATCHED_PERCENT,
|
||||
"notify_on_start_subject_text": plexpy.CONFIG.NOTIFY_ON_START_SUBJECT_TEXT,
|
||||
"notify_on_start_body_text": plexpy.CONFIG.NOTIFY_ON_START_BODY_TEXT,
|
||||
|
@ -2594,6 +2596,10 @@ class WebInterface(object):
|
|||
"notify_on_intup_body_text": plexpy.CONFIG.NOTIFY_ON_INTUP_BODY_TEXT,
|
||||
"notify_on_pmsupdate_subject_text": plexpy.CONFIG.NOTIFY_ON_PMSUPDATE_SUBJECT_TEXT,
|
||||
"notify_on_pmsupdate_body_text": plexpy.CONFIG.NOTIFY_ON_PMSUPDATE_BODY_TEXT,
|
||||
"notify_on_concurrent_subject_text": plexpy.CONFIG.NOTIFY_ON_CONCURRENT_SUBJECT_TEXT,
|
||||
"notify_on_concurrent_body_text": plexpy.CONFIG.NOTIFY_ON_CONCURRENT_BODY_TEXT,
|
||||
"notify_on_newdevice_subject_text": plexpy.CONFIG.NOTIFY_ON_NEWDEVICE_SUBJECT_TEXT,
|
||||
"notify_on_newdevice_body_text": plexpy.CONFIG.NOTIFY_ON_NEWDEVICE_BODY_TEXT,
|
||||
"notify_scripts_args_text": plexpy.CONFIG.NOTIFY_SCRIPTS_ARGS_TEXT,
|
||||
"home_sections": json.dumps(plexpy.CONFIG.HOME_SECTIONS),
|
||||
"home_stats_length": plexpy.CONFIG.HOME_STATS_LENGTH,
|
||||
|
@ -2606,10 +2612,11 @@ class WebInterface(object):
|
|||
"group_history_tables": checked(plexpy.CONFIG.GROUP_HISTORY_TABLES),
|
||||
"git_token": plexpy.CONFIG.GIT_TOKEN,
|
||||
"imgur_client_id": plexpy.CONFIG.IMGUR_CLIENT_ID,
|
||||
"cache_images": checked(plexpy.CONFIG.CACHE_IMAGES)
|
||||
"cache_images": checked(plexpy.CONFIG.CACHE_IMAGES),
|
||||
"pms_version": plexpy.CONFIG.PMS_VERSION
|
||||
}
|
||||
|
||||
return serve_template(templatename="settings.html", title="Settings", config=config)
|
||||
return serve_template(templatename="settings.html", title="Settings", config=config, kwargs=kwargs)
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
|
@ -2764,11 +2771,27 @@ class WebInterface(object):
|
|||
else:
|
||||
return {'result': 'error', 'message': 'Config backup failed.'}
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth(member_of("admin"))
|
||||
def get_configuration_table(self, **kwargs):
|
||||
return serve_template(templatename="configuration_table.html")
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth(member_of("admin"))
|
||||
def get_scheduler_table(self, **kwargs):
|
||||
return serve_template(templatename="scheduler_table.html")
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
def get_server_update_params(self):
|
||||
plex_tv = plextv.PlexTV()
|
||||
plexpass = plex_tv.get_plexpass_status()
|
||||
return {'plexpass': plexpass,
|
||||
'pms_platform': plexpy.CONFIG.PMS_PLATFORM,
|
||||
'pms_update_channel': plexpy.CONFIG.PMS_UPDATE_CHANNEL,
|
||||
'pms_update_distro_build': plexpy.CONFIG.PMS_UPDATE_DISTRO_BUILD}
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
|
@ -2782,6 +2805,34 @@ class WebInterface(object):
|
|||
else:
|
||||
return {'result': 'error', 'message': 'Database backup failed.'}
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
@addtoapi()
|
||||
def install_geoip_db(self):
|
||||
""" Downloads and installs the GeoLite2 database """
|
||||
|
||||
result = helpers.install_geoip_db()
|
||||
|
||||
if result:
|
||||
return {'result': 'success', 'message': 'GeoLite2 database installed successful.'}
|
||||
else:
|
||||
return {'result': 'error', 'message': 'GeoLite2 database install failed.'}
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
@addtoapi()
|
||||
def uninstall_geoip_db(self):
|
||||
""" Uninstalls the GeoLite2 database """
|
||||
|
||||
result = helpers.uninstall_geoip_db()
|
||||
|
||||
if result:
|
||||
return {'result': 'success', 'message': 'GeoLite2 database uninstalled successfully.'}
|
||||
else:
|
||||
return {'result': 'error', 'message': 'GeoLite2 database uninstall failed.'}
|
||||
|
||||
@cherrypy.expose
|
||||
@requireAuth(member_of("admin"))
|
||||
def get_notification_agent_config(self, agent_id, **kwargs):
|
||||
|
@ -2833,6 +2884,7 @@ class WebInterface(object):
|
|||
10 # Email
|
||||
16 # Facebook
|
||||
0 # Growl
|
||||
19 # Hipchat
|
||||
12 # IFTTT
|
||||
18 # Join
|
||||
4 # NotifyMyAndroid
|
||||
|
@ -3217,7 +3269,9 @@ class WebInterface(object):
|
|||
logger.error('No image input received.')
|
||||
return
|
||||
|
||||
refresh = True if refresh == 'true' else False
|
||||
if refresh:
|
||||
mo = member_of('admin')
|
||||
refresh = True if mo() else False
|
||||
|
||||
if rating_key and not img:
|
||||
img = '/library/metadata/%s/thumb/1337' % rating_key
|
||||
|
@ -4202,7 +4256,7 @@ class WebInterface(object):
|
|||
'Can you hurry up. My horse is getting tired.',
|
||||
'What killed the dinosaurs? The Ice Age!',
|
||||
'That\'s for sleeping with my wife!',
|
||||
'Remember when I said I’d kill you last... I lied!',
|
||||
'Remember when I said I\'d kill you last... I lied!',
|
||||
'You want to be a farmer? Here\'s a couple of acres',
|
||||
'Now, this is the plan. Get your ass to Mars.',
|
||||
'I just had a terrible thought... What if this is a dream?'
|
||||
|
@ -4229,3 +4283,39 @@ class WebInterface(object):
|
|||
pms_connect = pmsconnect.PmsConnect()
|
||||
result = pms_connect.get_update_staus()
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth()
|
||||
@addtoapi()
|
||||
def get_geoip_lookup(self, ip_address='', **kwargs):
|
||||
""" Get the geolocation info for an IP address. The GeoLite2 database must be installed.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
ip_address
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"continent": "North America",
|
||||
"country": "United States",
|
||||
"region": "California",
|
||||
"city": "Mountain View",
|
||||
"postal_code": "94035",
|
||||
"timezone": "America/Los_Angeles",
|
||||
"latitude": 37.386,
|
||||
"longitude": -122.0838,
|
||||
"accuracy": 1000
|
||||
}
|
||||
json:
|
||||
{"error": "The address 127.0.0.1 is not in the database."
|
||||
}
|
||||
```
|
||||
"""
|
||||
geo_info = helpers.geoip_lookup(ip_address)
|
||||
if isinstance(geo_info, basestring):
|
||||
return {'error': geo_info}
|
||||
return geo_info
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue