mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-06 21:21:15 -07:00
Merge branch 'nightly' into python3
# Conflicts: # plexpy/activity_pinger.py # plexpy/activity_processor.py # plexpy/helpers.py # plexpy/notifiers.py # plexpy/version.py # plexpy/webserve.py
This commit is contained in:
commit
d8f223327e
47 changed files with 566 additions and 3201 deletions
15
API.md
15
API.md
|
@ -672,7 +672,7 @@ Returns:
|
||||||
|
|
||||||
|
|
||||||
### get_geoip_lookup
|
### get_geoip_lookup
|
||||||
Get the geolocation info for an IP address. The GeoLite2 database must be installed.
|
Get the geolocation info for an IP address.
|
||||||
|
|
||||||
```
|
```
|
||||||
Required parameters:
|
Required parameters:
|
||||||
|
@ -683,7 +683,7 @@ Optional parameters:
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
json:
|
json:
|
||||||
{"continent": "North America",
|
{"code": 'US",
|
||||||
"country": "United States",
|
"country": "United States",
|
||||||
"region": "California",
|
"region": "California",
|
||||||
"city": "Mountain View",
|
"city": "Mountain View",
|
||||||
|
@ -693,9 +693,6 @@ Returns:
|
||||||
"longitude": -122.0838,
|
"longitude": -122.0838,
|
||||||
"accuracy": 1000
|
"accuracy": 1000
|
||||||
}
|
}
|
||||||
json:
|
|
||||||
{"error": "The address 127.0.0.1 is not in the database."
|
|
||||||
}
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
@ -2574,10 +2571,6 @@ Returns:
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
### install_geoip_db
|
|
||||||
Downloads and installs the GeoLite2 database
|
|
||||||
|
|
||||||
|
|
||||||
### notify
|
### notify
|
||||||
Send a notification using Tautulli.
|
Send a notification using Tautulli.
|
||||||
|
|
||||||
|
@ -2854,10 +2847,6 @@ Returns:
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
### uninstall_geoip_db
|
|
||||||
Uninstalls the GeoLite2 database
|
|
||||||
|
|
||||||
|
|
||||||
### update
|
### update
|
||||||
Update Tautulli.
|
Update Tautulli.
|
||||||
|
|
||||||
|
|
22
CHANGELOG.md
22
CHANGELOG.md
|
@ -1,5 +1,23 @@
|
||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## v2.2.3-beta (2020-04-27)
|
||||||
|
|
||||||
|
* Notifications:
|
||||||
|
* New: Added Plex Android / iOS App notification agent.
|
||||||
|
* New: Added bandwidth notification parameters.
|
||||||
|
* New: Added user thumb to notification parameters.
|
||||||
|
* New: Added initial stream notification parameter and threshold setting to determine if a stream is the first stream of a continuous streaming session.
|
||||||
|
* New: Added Plex remote access notification parameters.
|
||||||
|
* Fix: The rating key notification parameter was being overwritten when 3rd party lookup was enabled.
|
||||||
|
* Fix: Missing artist value for Musicbrainz lookup in certain situations.
|
||||||
|
* UI:
|
||||||
|
* Fix: History table was not being refreshed after deleting entries.
|
||||||
|
* Other:
|
||||||
|
* Fix: Auto-updater was not scheduled when enabling the setting unless Tautulli was restarted.
|
||||||
|
* Change: Remove the unnecessary optional Plex logs volume from the Docker image.
|
||||||
|
* Change: Use Plex.tv for GeoIP lookup instead of requiring the MaxMind GeoLite2 database.
|
||||||
|
|
||||||
|
|
||||||
## v2.2.2-beta (2020-04-12)
|
## v2.2.2-beta (2020-04-12)
|
||||||
|
|
||||||
* Notifications:
|
* Notifications:
|
||||||
|
@ -14,14 +32,14 @@
|
||||||
* Fix: XBMC platform icon not being redirected to the Kodi platform icon.
|
* Fix: XBMC platform icon not being redirected to the Kodi platform icon.
|
||||||
* Change: Improved deleting libraries so libraries with the same section ID are not also deleted.
|
* Change: Improved deleting libraries so libraries with the same section ID are not also deleted.
|
||||||
* API:
|
* API:
|
||||||
* Fix: Returning XML for the API failing due to unicode characters.
|
* Fix: Returning XML from the API failing due to unicode characters.
|
||||||
* Fix: Grouping parameter for various API commands not falling back to default setting.
|
* Fix: Grouping parameter for various API commands not falling back to default setting.
|
||||||
* New: Added time_queries parameter to get_library_watch_time_stats and get_user_watch_time_stats API command. (Thanks @KaasKop97)
|
* New: Added time_queries parameter to get_library_watch_time_stats and get_user_watch_time_stats API command. (Thanks @KaasKop97)
|
||||||
* New: Added an "is_active" return value to the get_user, get_users, get_library, and get_libraries API commands which indicates if the user or library is on the Plex server.
|
* New: Added an "is_active" return value to the get_user, get_users, get_library, and get_libraries API commands which indicates if the user or library is on the Plex server.
|
||||||
* New: Added delete_history API command.
|
* New: Added delete_history API command.
|
||||||
* Change: Added optional parameter for row_ids for delete_library, delete_user, delete_all_library_history, and delete_all_user_history API commands.
|
* Change: Added optional parameter for row_ids for delete_library, delete_user, delete_all_library_history, and delete_all_user_history API commands.
|
||||||
* Mobile App:
|
* Mobile App:
|
||||||
* Fix: Temporary device token not being invalidated after cancelling device registration.
|
* Fix: Temporary device token was not being invalidated after cancelling device registration.
|
||||||
* Other:
|
* Other:
|
||||||
* Fix: Update failing on CentOS due to an older git version.
|
* Fix: Update failing on CentOS due to an older git version.
|
||||||
* Fix: Manifest file for creating a web app had incorrect info.
|
* Fix: Manifest file for creating a web app had incorrect info.
|
||||||
|
|
|
@ -18,6 +18,6 @@ COPY . /app
|
||||||
|
|
||||||
CMD [ "python", "Tautulli.py", "--datadir", "/config" ]
|
CMD [ "python", "Tautulli.py", "--datadir", "/config" ]
|
||||||
|
|
||||||
VOLUME /config /plex_logs
|
VOLUME /config
|
||||||
EXPOSE 8181
|
EXPOSE 8181
|
||||||
HEALTHCHECK --start-period=90s CMD curl -ILfSs http://localhost:8181/status > /dev/null || curl -ILfkSs https://localhost:8181/status > /dev/null || exit 1
|
HEALTHCHECK --start-period=90s CMD curl -ILfSs http://localhost:8181/status > /dev/null || curl -ILfkSs https://localhost:8181/status > /dev/null || exit 1
|
||||||
|
|
|
@ -35,8 +35,8 @@ This project is based on code from [Headphones](https://github.com/rembo10/headp
|
||||||
|
|
||||||
| Status | Branch: `master` | Branch: `beta` | Branch: `nightly` |
|
| Status | Branch: `master` | Branch: `beta` | Branch: `nightly` |
|
||||||
| --- | --- | --- | --- |
|
| --- | --- | --- | --- |
|
||||||
| Release | [](https://github.com/Tautulli/Tautulli/releases/latest) <br> [](https://github.com/Tautulli/Tautulli/releases/latest) | [](https://github.com/Tautulli/Tautulli/releases) <br> [](https://github.com/Tautulli/Tautulli/commits/beta) | [](https://github.com/Tautulli/Tautulli/commits/nightly) <br> [](https://github.com/Tautulli/Tautulli/commits/nightly) |
|
| Release | [](https://github.com/Tautulli/Tautulli/releases/latest) <br> [](https://github.com/Tautulli/Tautulli/releases/latest) | [](https://github.com/Tautulli/Tautulli/releases) <br> [](https://github.com/Tautulli/Tautulli/commits/beta) | [](https://github.com/Tautulli/Tautulli/commits/nightly) <br> [](https://github.com/Tautulli/Tautulli/commits/nightly) |
|
||||||
| Docker | [](https://hub.docker.com/r/tautulli/tautulli) <br> [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Docker"+branch%3Amaster) | [](https://hub.docker.com/r/tautulli/tautulli) <br> [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Docker"+branch%3Abeta) | [](https://hub.docker.com/r/tautulli/tautulli) <br> [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Docker"+branch%3Anightly) |
|
| Docker | [](https://hub.docker.com/r/tautulli/tautulli) <br> [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Docker"+branch%3Amaster) | [](https://hub.docker.com/r/tautulli/tautulli) <br> [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Docker"+branch%3Abeta) | [](https://hub.docker.com/r/tautulli/tautulli) <br> [](https://github.com/Tautulli/Tautulli/actions?query=workflow%3A"Publish+Docker"+branch%3Anightly) |
|
||||||
|
|
||||||
[](https://github.com/Tautulli/Tautulli-Wiki/wiki)
|
[](https://github.com/Tautulli/Tautulli-Wiki/wiki)
|
||||||
[](https://tautulli.com/discord)
|
[](https://tautulli.com/discord)
|
||||||
|
|
|
@ -193,9 +193,9 @@
|
||||||
success: function (data) {
|
success: function (data) {
|
||||||
var msg = "History deleted";
|
var msg = "History deleted";
|
||||||
showMsg(msg, false, true, 2000);
|
showMsg(msg, false, true, 2000);
|
||||||
|
history_table.draw();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
history_table.draw();
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -729,9 +729,9 @@ DOCUMENTATION :: END
|
||||||
success: function (data) {
|
success: function (data) {
|
||||||
var msg = "History deleted";
|
var msg = "History deleted";
|
||||||
showMsg(msg, false, true, 2000);
|
showMsg(msg, false, true, 2000);
|
||||||
|
history_table.draw();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
history_table.draw();
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,7 +24,6 @@
|
||||||
<div id="ip_error" class="col-sm-12 text-muted"></div>
|
<div id="ip_error" class="col-sm-12 text-muted"></div>
|
||||||
<div class="col-sm-6">
|
<div class="col-sm-6">
|
||||||
<ul class="list-unstyled">
|
<ul class="list-unstyled">
|
||||||
<li>Continent: <strong><span id="continent"></span></strong></li>
|
|
||||||
<li>Country: <strong><span id="country"></span></strong></li>
|
<li>Country: <strong><span id="country"></span></strong></li>
|
||||||
<li>Region: <strong><span id="region"></span></strong></li>
|
<li>Region: <strong><span id="region"></span></strong></li>
|
||||||
<li>City: <strong><span id="city"></span></strong></li>
|
<li>City: <strong><span id="city"></span></strong></li>
|
||||||
|
@ -36,7 +35,6 @@
|
||||||
<li>Timezone: <strong><span id="timezone"></span></strong></li>
|
<li>Timezone: <strong><span id="timezone"></span></strong></li>
|
||||||
<li>Latitude: <strong><span id="latitude"></span></strong></li>
|
<li>Latitude: <strong><span id="latitude"></span></strong></li>
|
||||||
<li>Longitude: <strong><span id="longitude"></span></strong></li>
|
<li>Longitude: <strong><span id="longitude"></span></strong></li>
|
||||||
<li>Accuracy Radius: <strong><span id="accuracy"></span></strong></li>
|
|
||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
<div class="col-sm-12">
|
<div class="col-sm-12">
|
||||||
|
@ -61,8 +59,6 @@
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="modal-footer">
|
<div class="modal-footer">
|
||||||
<% from plexpy.helpers import anon_url %>
|
|
||||||
<span class="text-muted">GeoLite2 data created by <a href="${anon_url('http://www.maxmind.com')}" target="_blank">MaxMind</a>.</span>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -82,11 +78,11 @@
|
||||||
error: function () {
|
error: function () {
|
||||||
$('#ip_error').html('<i class="fa fa-exclamation-circle"></i> Internal request failed.').show();
|
$('#ip_error').html('<i class="fa fa-exclamation-circle"></i> Internal request failed.').show();
|
||||||
},
|
},
|
||||||
success: function (data) {
|
success: function (result) {
|
||||||
if ('error' in data) {
|
if (result.results === 'error') {
|
||||||
$('#ip_error').html('<i class="fa fa-exclamation-circle"></i> ' + data.error).show();
|
$('#ip_error').html('<i class="fa fa-exclamation-circle"></i> ' + result.message).show();
|
||||||
} else {
|
} else {
|
||||||
$('#continent').html(data.continent);
|
var data = result.data;
|
||||||
$('#country').html(data.country);
|
$('#country').html(data.country);
|
||||||
$('#region').html(data.region);
|
$('#region').html(data.region);
|
||||||
$('#city').html(data.city);
|
$('#city').html(data.city);
|
||||||
|
@ -94,7 +90,6 @@
|
||||||
$('#timezone').html(data.timezone);
|
$('#timezone').html(data.timezone);
|
||||||
$('#latitude').html(data.latitude);
|
$('#latitude').html(data.latitude);
|
||||||
$('#longitude').html(data.longitude);
|
$('#longitude').html(data.longitude);
|
||||||
$('#accuracy').html(data.accuracy + ' km');
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -493,9 +493,9 @@ DOCUMENTATION :: END
|
||||||
success: function (data) {
|
success: function (data) {
|
||||||
var msg = "History deleted";
|
var msg = "History deleted";
|
||||||
showMsg(msg, false, true, 2000);
|
showMsg(msg, false, true, 2000);
|
||||||
|
history_table.draw();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
history_table.draw();
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
% if notifier:
|
% if notifier:
|
||||||
<%!
|
<%
|
||||||
import json
|
import json
|
||||||
from plexpy import notifiers, users
|
from plexpy import notifiers, users
|
||||||
from plexpy.helpers import checked
|
from plexpy.helpers import checked
|
||||||
available_notification_actions = notifiers.available_notification_actions()
|
available_notification_actions = notifiers.available_notification_actions(agent_id=notifier['agent_id'])
|
||||||
|
|
||||||
user_emails = [{'user': u['friendly_name'] or u['username'], 'email': u['email']} for u in users.Users().get_users() if u['email']]
|
user_emails = [{'user': u['friendly_name'] or u['username'], 'email': u['email']} for u in users.Users().get_users() if u['email']]
|
||||||
sorted(user_emails, key=lambda u: u['user'])
|
sorted(user_emails, key=lambda u: u['user'])
|
||||||
|
@ -25,7 +25,7 @@
|
||||||
<li role="presentation"><a href="#tabs-notify_text" aria-controls="tabs-notify_text" role="tab" data-toggle="tab">Arguments</a></li>
|
<li role="presentation"><a href="#tabs-notify_text" aria-controls="tabs-notify_text" role="tab" data-toggle="tab">Arguments</a></li>
|
||||||
% elif notifier['agent_name'] == 'webhook':
|
% elif notifier['agent_name'] == 'webhook':
|
||||||
<li role="presentation"><a href="#tabs-notify_text" aria-controls="tabs-notify_text" role="tab" data-toggle="tab">Data</a></li>
|
<li role="presentation"><a href="#tabs-notify_text" aria-controls="tabs-notify_text" role="tab" data-toggle="tab">Data</a></li>
|
||||||
% else:
|
% elif notifier['agent_name'] != 'plexmobileapp':
|
||||||
<li role="presentation"><a href="#tabs-notify_text" aria-controls="tabs-notify_text" role="tab" data-toggle="tab">Text</a></li>
|
<li role="presentation"><a href="#tabs-notify_text" aria-controls="tabs-notify_text" role="tab" data-toggle="tab">Text</a></li>
|
||||||
% endif
|
% endif
|
||||||
<li role="presentation"><a href="#tabs-test_notifications" aria-controls="tabs-test_notifications" role="tab" data-toggle="tab">Test Notifications</a></li>
|
<li role="presentation"><a href="#tabs-test_notifications" aria-controls="tabs-test_notifications" role="tab" data-toggle="tab">Test Notifications</a></li>
|
||||||
|
@ -684,6 +684,15 @@
|
||||||
pushoverPriority();
|
pushoverPriority();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
% elif notifier['agent_name'] == 'plexmobileapp':
|
||||||
|
var $plexmobileapp_user_ids = $('#plexmobileapp_user_ids').selectize({
|
||||||
|
plugins: ['remove_button'],
|
||||||
|
maxItems: null,
|
||||||
|
create: true
|
||||||
|
});
|
||||||
|
var plexmobileapp_user_ids = $plexmobileapp_user_ids[0].selectize;
|
||||||
|
plexmobileapp_user_ids.setValue(${json.dumps(next((c['value'] for c in notifier['config_options'] if c['name'] == 'plexmobileapp_user_ids'), [])) | n});
|
||||||
|
|
||||||
% endif
|
% endif
|
||||||
|
|
||||||
function validateLogic() {
|
function validateLogic() {
|
||||||
|
|
|
@ -941,7 +941,7 @@
|
||||||
</div>
|
</div>
|
||||||
<div id="buffer_wait_error" class="alert alert-danger settings-alert" role="alert"></div>
|
<div id="buffer_wait_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||||
</div>
|
</div>
|
||||||
<p class="help-block">The value (in seconds) Tautulli should wait before triggering the next buffer warning. 0 to always trigger.</p>
|
<p class="help-block">The value (in seconds) Tautulli should wait before triggering the next buffer warning. Set to 0 to always trigger.</p>
|
||||||
</div>
|
</div>
|
||||||
<div class="checkbox advanced-setting">
|
<div class="checkbox advanced-setting">
|
||||||
<label>
|
<label>
|
||||||
|
@ -965,6 +965,20 @@
|
||||||
</div>
|
</div>
|
||||||
<p class="help-block">The number of concurrent streams by a single user for Tautulli to trigger a notification. Minimum 2.</p>
|
<p class="help-block">The number of concurrent streams by a single user for Tautulli to trigger a notification. Minimum 2.</p>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="form-group advanced-setting">
|
||||||
|
<label for="notify_concurrent_threshold">Continued Session Threshold</label>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col-md-2">
|
||||||
|
<input type="text" class="form-control" data-parsley-type="integer" id="notify_continued_session_threshold" name="notify_continued_session_threshold" value="${config['notify_continued_session_threshold']}" data-parsley-min="0" data-parsley-trigger="change" data-parsley-errors-container="#notify_continued_session_threshold_error" required>
|
||||||
|
</div>
|
||||||
|
<div id="notify_continued_session_threshold_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||||
|
</div>
|
||||||
|
<p class="help-block">
|
||||||
|
The number of seconds between stopping and starting a new stream to be considered as a continued session. Set to 0 to consider all streams as new sessions.
|
||||||
|
<br>
|
||||||
|
Note: The threshold is only used by the "Initial Stream" notification parameter to determine if a stream is the first stream of a continuous streaming session.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div class="padded-header">
|
<div class="padded-header">
|
||||||
<h3>Recently Added Notifications</h3>
|
<h3>Recently Added Notifications</h3>
|
||||||
|
@ -1254,7 +1268,7 @@
|
||||||
<p class="help-block">Enable to lookup links to MusicBrainz for music when available.</p>
|
<p class="help-block">Enable to lookup links to MusicBrainz for music when available.</p>
|
||||||
</div>
|
</div>
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
<label for="maxmind_license_key">Delete Lookup Info</label>
|
<label for="delete_lookup_info">Delete Lookup Info</label>
|
||||||
<p class="help-block">Delete all cached metadata lookup info in Tautulli.</p>
|
<p class="help-block">Delete all cached metadata lookup info in Tautulli.</p>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col-md-9">
|
<div class="col-md-9">
|
||||||
|
@ -1267,54 +1281,6 @@
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="padded-header">
|
|
||||||
<h3>Geolocation Database</h3>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<p class="help-block">The GeoLite2 database is used to geolocate IP addresses.</p>
|
|
||||||
<p class="help-block">
|
|
||||||
Please see the <a target='_blank' href='${anon_url('https://github.com/%s/%s-Wiki/wiki/3rd-Party-APIs-Guide' % (plexpy.CONFIG.GIT_USER, plexpy.CONFIG.GIT_REPO))}'>3rd Party APIs Guide</a> for instructions on setting up MaxMind.<br>
|
|
||||||
</p>
|
|
||||||
<div class="form-group">
|
|
||||||
<label for="maxmind_license_key">MaxMind License Key</label>
|
|
||||||
<div class="row">
|
|
||||||
<div class="col-md-6">
|
|
||||||
<input type="text" class="form-control" id="maxmind_license_key" name="maxmind_license_key" value="${config['maxmind_license_key']}" data-parsley-trigger="change">
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<p class="help-block">
|
|
||||||
Enter your MaxMind License Key to install the GeoLite2 database.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<div class="form-group">
|
|
||||||
<label for="geoip_db">GeoLite2 Database File</label> ${docker_msg | n}
|
|
||||||
<div class="row">
|
|
||||||
<div class="col-md-9">
|
|
||||||
<div class="input-group">
|
|
||||||
<input type="text" class="form-control" id="geoip_db" name="geoip_db" value="${config['geoip_db']}" ${docker_setting} data-parsley-trigger="change" data-parsley-pattern=".+\.mmdb$" data-parsley-errors-container="#geoip_db_error" data-parsley-error-message="Must end with '.mmdb'">
|
|
||||||
<span class="input-group-btn">
|
|
||||||
<button class="btn btn-form" type="button" id="install_geoip_db">${'Update' if config["geoip_db_installed"] else 'Install'}</button>
|
|
||||||
<button class="btn btn-form" type="button" id="uninstall_geoip_db" ${'disabled' if not config['geoip_db_installed'] else ''}>Uninstall</button>
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div id="geoip_db_error" class="alert alert-danger settings-alert" role="alert"></div>
|
|
||||||
</div>
|
|
||||||
<p class="help-block">
|
|
||||||
Leave blank to install in the default location. GeoLite2 database last updated <strong><span id="geoip_db_updated">never</span></strong>.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<div class="form-group advanced-setting">
|
|
||||||
<label for="geoip_db_update_days">GeoLite2 Database Update Interval</label>
|
|
||||||
<div class="row">
|
|
||||||
<div class="col-md-2">
|
|
||||||
<input type="text" class="form-control" data-parsley-type="integer" id="geoip_db_update_days" name="geoip_db_update_days" value="${config['geoip_db_update_days']}" size="5" data-parsley-range="[7, 30]" data-parsley-trigger="change" data-parsley-errors-container="#geoip_db_update_days_error" required>
|
|
||||||
</div>
|
|
||||||
<div id="geoip_db_update_days_error" class="alert alert-danger settings-alert" role="alert"></div>
|
|
||||||
</div>
|
|
||||||
<p class="help-block">The interval (in days) Tautulli will automatically update the GeoLite2 database. Minimum 7, maximum 30, default 30.</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<p><input type="button" class="btn btn-bright save-button" value="Save" data-success="Changes saved successfully"></p>
|
<p><input type="button" class="btn btn-bright save-button" value="Save" data-success="Changes saved successfully"></p>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
@ -2928,56 +2894,6 @@ $(document).ready(function() {
|
||||||
$('#resources-xml').on('tripleclick', function () {
|
$('#resources-xml').on('tripleclick', function () {
|
||||||
openPlexXML('/api/resources', true, {includeHttps: 1});
|
openPlexXML('/api/resources', true, {includeHttps: 1});
|
||||||
});
|
});
|
||||||
|
|
||||||
if ("${kwargs.get('install_geoip')}" === 'true') {
|
|
||||||
gotoSetting('3rd_party_apis', 'geoip_db')
|
|
||||||
}
|
|
||||||
|
|
||||||
if ("${config['geoip_db_installed']}" > "0") {
|
|
||||||
$("#geoip_db_updated").text(moment("${config['geoip_db_installed']}", "X").fromNow());
|
|
||||||
}
|
|
||||||
|
|
||||||
$("#install_geoip_db").click(function () {
|
|
||||||
var maxmind_license_key = $("#maxmind_license_key");
|
|
||||||
maxmind_license_key.val($.trim(maxmind_license_key.val()));
|
|
||||||
if (maxmind_license_key.val() === "") {
|
|
||||||
maxmind_license_key.focus();
|
|
||||||
showMsg('<i class="fa fa-exclamation-circle"></i> Maxmind License Key is required.', false, true, 5000, true);
|
|
||||||
return false;
|
|
||||||
} else if (!(saveSettings())) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
var msg = 'Are you sure you want to install the GeoLite2 database?<br /><br />' +
|
|
||||||
'The database is used to lookup IP address geolocation info.<br />' +
|
|
||||||
'The database will be downloaded from <a href="${anon_url("https://dev.maxmind.com/geoip/geoip2/geolite2/")}" target="_blank">MaxMind</a>, <br />' +
|
|
||||||
'and requires <strong>100MB</strong> of free space to install.<br />';
|
|
||||||
var url = 'install_geoip_db';
|
|
||||||
if ($(this).text() === 'Update') {
|
|
||||||
url += '?update=true';
|
|
||||||
}
|
|
||||||
confirmAjaxCall(url, msg, null, 'Installing GeoLite2 database.', function (result) {
|
|
||||||
if (result.result === "success") {
|
|
||||||
$('#install_geoip_db').text('Update');
|
|
||||||
$('#uninstall_geoip_db').prop('disabled', false);
|
|
||||||
$('#geoip_db_updated').text(moment(result.updated, "X").fromNow());
|
|
||||||
}
|
|
||||||
getSchedulerTable();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
$("#uninstall_geoip_db").click(function () {
|
|
||||||
var msg = 'Are you sure you want to uninstall the GeoLite2 database?<br /><br />' +
|
|
||||||
'You will not be able to lookup IP address geolocation info.';
|
|
||||||
var url = 'uninstall_geoip_db';
|
|
||||||
confirmAjaxCall(url, msg, null, 'Uninstalling GeoLite2 database.', function (result) {
|
|
||||||
if (result.result === "success") {
|
|
||||||
$('#install_geoip_db').text('Install');
|
|
||||||
$('#uninstall_geoip_db').prop('disabled', true);
|
|
||||||
$('#geoip_db_updated').text('never');
|
|
||||||
}
|
|
||||||
getSchedulerTable();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
</%def>
|
</%def>
|
||||||
|
|
|
@ -582,9 +582,9 @@ DOCUMENTATION :: END
|
||||||
success: function (data) {
|
success: function (data) {
|
||||||
var msg = "History deleted";
|
var msg = "History deleted";
|
||||||
showMsg(msg, false, true, 2000);
|
showMsg(msg, false, true, 2000);
|
||||||
|
history_table.draw();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
history_table.draw();
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,7 +0,0 @@
|
||||||
# pylint:disable=C0111
|
|
||||||
|
|
||||||
__title__ = 'geoip2'
|
|
||||||
__version__ = '2.4.0'
|
|
||||||
__author__ = 'Gregory Oschwald'
|
|
||||||
__license__ = 'Apache License, Version 2.0'
|
|
||||||
__copyright__ = 'Copyright (c) 2013-2016 Maxmind, Inc.'
|
|
|
@ -1,17 +0,0 @@
|
||||||
"""Intended for internal use only."""
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import ipaddress
|
|
||||||
|
|
||||||
# pylint: skip-file
|
|
||||||
|
|
||||||
if sys.version_info[0] == 2:
|
|
||||||
def compat_ip_address(address):
|
|
||||||
"""Intended for internal use only."""
|
|
||||||
if isinstance(address, bytes):
|
|
||||||
address = address.decode()
|
|
||||||
return ipaddress.ip_address(address)
|
|
||||||
else:
|
|
||||||
def compat_ip_address(address):
|
|
||||||
"""Intended for internal use only."""
|
|
||||||
return ipaddress.ip_address(address)
|
|
|
@ -1,199 +0,0 @@
|
||||||
"""
|
|
||||||
======================
|
|
||||||
GeoIP2 Database Reader
|
|
||||||
======================
|
|
||||||
|
|
||||||
"""
|
|
||||||
import inspect
|
|
||||||
|
|
||||||
import maxminddb
|
|
||||||
# pylint: disable=unused-import
|
|
||||||
from maxminddb import (MODE_AUTO, MODE_MMAP, MODE_MMAP_EXT, MODE_FILE,
|
|
||||||
MODE_MEMORY)
|
|
||||||
|
|
||||||
import geoip2
|
|
||||||
import geoip2.models
|
|
||||||
import geoip2.errors
|
|
||||||
|
|
||||||
|
|
||||||
class Reader(object):
|
|
||||||
"""GeoIP2 database Reader object.
|
|
||||||
|
|
||||||
Instances of this class provide a reader for the GeoIP2 database format.
|
|
||||||
IP addresses can be looked up using the ``country`` and ``city`` methods.
|
|
||||||
|
|
||||||
The basic API for this class is the same for every database. First, you
|
|
||||||
create a reader object, specifying a file name. You then call the method
|
|
||||||
corresponding to the specific database, passing it the IP address you want
|
|
||||||
to look up.
|
|
||||||
|
|
||||||
If the request succeeds, the method call will return a model class for the
|
|
||||||
method you called. This model in turn contains multiple record classes,
|
|
||||||
each of which represents part of the data returned by the database. If the
|
|
||||||
database does not contain the requested information, the attributes on the
|
|
||||||
record class will have a ``None`` value.
|
|
||||||
|
|
||||||
If the address is not in the database, an
|
|
||||||
``geoip2.errors.AddressNotFoundError`` exception will be thrown. If the
|
|
||||||
database is corrupt or invalid, a ``maxminddb.InvalidDatabaseError`` will
|
|
||||||
be thrown.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, filename, locales=None, mode=MODE_AUTO):
|
|
||||||
"""Create GeoIP2 Reader.
|
|
||||||
|
|
||||||
:param filename: The path to the GeoIP2 database.
|
|
||||||
:param locales: This is list of locale codes. This argument will be
|
|
||||||
passed on to record classes to use when their name properties are
|
|
||||||
called. The default value is ['en'].
|
|
||||||
|
|
||||||
The order of the locales is significant. When a record class has
|
|
||||||
multiple names (country, city, etc.), its name property will return
|
|
||||||
the name in the first locale that has one.
|
|
||||||
|
|
||||||
Note that the only locale which is always present in the GeoIP2
|
|
||||||
data is "en". If you do not include this locale, the name property
|
|
||||||
may end up returning None even when the record has an English name.
|
|
||||||
|
|
||||||
Currently, the valid locale codes are:
|
|
||||||
|
|
||||||
* de -- German
|
|
||||||
* en -- English names may still include accented characters if that
|
|
||||||
is the accepted spelling in English. In other words, English does
|
|
||||||
not mean ASCII.
|
|
||||||
* es -- Spanish
|
|
||||||
* fr -- French
|
|
||||||
* ja -- Japanese
|
|
||||||
* pt-BR -- Brazilian Portuguese
|
|
||||||
* ru -- Russian
|
|
||||||
* zh-CN -- Simplified Chinese.
|
|
||||||
:param mode: The mode to open the database with. Valid mode are:
|
|
||||||
* MODE_MMAP_EXT - use the C extension with memory map.
|
|
||||||
* MODE_MMAP - read from memory map. Pure Python.
|
|
||||||
* MODE_FILE - read database as standard file. Pure Python.
|
|
||||||
* MODE_MEMORY - load database into memory. Pure Python.
|
|
||||||
* MODE_AUTO - try MODE_MMAP_EXT, MODE_MMAP, MODE_FILE in that order.
|
|
||||||
Default.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if locales is None:
|
|
||||||
locales = ['en']
|
|
||||||
self._db_reader = maxminddb.open_database(filename, mode)
|
|
||||||
self._locales = locales
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_value, traceback):
|
|
||||||
self.close()
|
|
||||||
|
|
||||||
def country(self, ip_address):
|
|
||||||
"""Get the Country object for the IP address.
|
|
||||||
|
|
||||||
:param ip_address: IPv4 or IPv6 address as a string.
|
|
||||||
|
|
||||||
:returns: :py:class:`geoip2.models.Country` object
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
return self._model_for(geoip2.models.Country, 'Country', ip_address)
|
|
||||||
|
|
||||||
def city(self, ip_address):
|
|
||||||
"""Get the City object for the IP address.
|
|
||||||
|
|
||||||
:param ip_address: IPv4 or IPv6 address as a string.
|
|
||||||
|
|
||||||
:returns: :py:class:`geoip2.models.City` object
|
|
||||||
|
|
||||||
"""
|
|
||||||
return self._model_for(geoip2.models.City, 'City', ip_address)
|
|
||||||
|
|
||||||
def anonymous_ip(self, ip_address):
|
|
||||||
"""Get the AnonymousIP object for the IP address.
|
|
||||||
|
|
||||||
:param ip_address: IPv4 or IPv6 address as a string.
|
|
||||||
|
|
||||||
:returns: :py:class:`geoip2.models.AnonymousIP` object
|
|
||||||
|
|
||||||
"""
|
|
||||||
return self._flat_model_for(geoip2.models.AnonymousIP,
|
|
||||||
'GeoIP2-Anonymous-IP', ip_address)
|
|
||||||
|
|
||||||
def connection_type(self, ip_address):
|
|
||||||
"""Get the ConnectionType object for the IP address.
|
|
||||||
|
|
||||||
:param ip_address: IPv4 or IPv6 address as a string.
|
|
||||||
|
|
||||||
:returns: :py:class:`geoip2.models.ConnectionType` object
|
|
||||||
|
|
||||||
"""
|
|
||||||
return self._flat_model_for(geoip2.models.ConnectionType,
|
|
||||||
'GeoIP2-Connection-Type', ip_address)
|
|
||||||
|
|
||||||
def domain(self, ip_address):
|
|
||||||
"""Get the Domain object for the IP address.
|
|
||||||
|
|
||||||
:param ip_address: IPv4 or IPv6 address as a string.
|
|
||||||
|
|
||||||
:returns: :py:class:`geoip2.models.Domain` object
|
|
||||||
|
|
||||||
"""
|
|
||||||
return self._flat_model_for(geoip2.models.Domain, 'GeoIP2-Domain',
|
|
||||||
ip_address)
|
|
||||||
|
|
||||||
def enterprise(self, ip_address):
|
|
||||||
"""Get the Enterprise object for the IP address.
|
|
||||||
|
|
||||||
:param ip_address: IPv4 or IPv6 address as a string.
|
|
||||||
|
|
||||||
:returns: :py:class:`geoip2.models.Enterprise` object
|
|
||||||
|
|
||||||
"""
|
|
||||||
return self._model_for(geoip2.models.Enterprise, 'Enterprise',
|
|
||||||
ip_address)
|
|
||||||
|
|
||||||
def isp(self, ip_address):
|
|
||||||
"""Get the ISP object for the IP address.
|
|
||||||
|
|
||||||
:param ip_address: IPv4 or IPv6 address as a string.
|
|
||||||
|
|
||||||
:returns: :py:class:`geoip2.models.ISP` object
|
|
||||||
|
|
||||||
"""
|
|
||||||
return self._flat_model_for(geoip2.models.ISP, 'GeoIP2-ISP',
|
|
||||||
ip_address)
|
|
||||||
|
|
||||||
def _get(self, database_type, ip_address):
|
|
||||||
if database_type not in self.metadata().database_type:
|
|
||||||
caller = inspect.stack()[2][3]
|
|
||||||
raise TypeError("The %s method cannot be used with the "
|
|
||||||
"%s database" %
|
|
||||||
(caller, self.metadata().database_type))
|
|
||||||
record = self._db_reader.get(ip_address)
|
|
||||||
if record is None:
|
|
||||||
raise geoip2.errors.AddressNotFoundError(
|
|
||||||
"The address %s is not in the database." % ip_address)
|
|
||||||
return record
|
|
||||||
|
|
||||||
def _model_for(self, model_class, types, ip_address):
|
|
||||||
record = self._get(types, ip_address)
|
|
||||||
record.setdefault('traits', {})['ip_address'] = ip_address
|
|
||||||
return model_class(record, locales=self._locales)
|
|
||||||
|
|
||||||
def _flat_model_for(self, model_class, types, ip_address):
|
|
||||||
record = self._get(types, ip_address)
|
|
||||||
record['ip_address'] = ip_address
|
|
||||||
return model_class(record)
|
|
||||||
|
|
||||||
def metadata(self):
|
|
||||||
"""The metadata for the open database.
|
|
||||||
|
|
||||||
:returns: :py:class:`maxminddb.reader.Metadata` object
|
|
||||||
"""
|
|
||||||
return self._db_reader.metadata()
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
"""Closes the GeoIP2 database."""
|
|
||||||
|
|
||||||
self._db_reader.close()
|
|
|
@ -1,51 +0,0 @@
|
||||||
"""
|
|
||||||
Errors
|
|
||||||
======
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class GeoIP2Error(RuntimeError):
|
|
||||||
"""There was a generic error in GeoIP2.
|
|
||||||
|
|
||||||
This class represents a generic error. It extends :py:exc:`RuntimeError`
|
|
||||||
and does not add any additional attributes.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class AddressNotFoundError(GeoIP2Error):
|
|
||||||
"""The address you were looking up was not found."""
|
|
||||||
|
|
||||||
|
|
||||||
class AuthenticationError(GeoIP2Error):
|
|
||||||
"""There was a problem authenticating the request."""
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPError(GeoIP2Error):
|
|
||||||
"""There was an error when making your HTTP request.
|
|
||||||
|
|
||||||
This class represents an HTTP transport error. It extends
|
|
||||||
:py:exc:`GeoIP2Error` and adds attributes of its own.
|
|
||||||
|
|
||||||
:ivar http_status: The HTTP status code returned
|
|
||||||
:ivar uri: The URI queried
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, message, http_status=None, uri=None):
|
|
||||||
super(HTTPError, self).__init__(message)
|
|
||||||
self.http_status = http_status
|
|
||||||
self.uri = uri
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidRequestError(GeoIP2Error):
|
|
||||||
"""The request was invalid."""
|
|
||||||
|
|
||||||
|
|
||||||
class OutOfQueriesError(GeoIP2Error):
|
|
||||||
"""Your account is out of funds for the service queried."""
|
|
||||||
|
|
||||||
|
|
||||||
class PermissionRequiredError(GeoIP2Error):
|
|
||||||
"""Your account does not have permission to access this service."""
|
|
|
@ -1,16 +0,0 @@
|
||||||
"""This package contains utility mixins"""
|
|
||||||
# pylint: disable=too-few-public-methods
|
|
||||||
from abc import ABCMeta
|
|
||||||
|
|
||||||
|
|
||||||
class SimpleEquality(object):
|
|
||||||
"""Naive __dict__ equality mixin"""
|
|
||||||
|
|
||||||
__metaclass__ = ABCMeta
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return (isinstance(other, self.__class__) and
|
|
||||||
self.__dict__ == other.__dict__)
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self.__eq__(other)
|
|
|
@ -1,472 +0,0 @@
|
||||||
"""
|
|
||||||
Models
|
|
||||||
======
|
|
||||||
|
|
||||||
These classes provide models for the data returned by the GeoIP2
|
|
||||||
web service and databases.
|
|
||||||
|
|
||||||
The only difference between the City and Insights model classes is which
|
|
||||||
fields in each record may be populated. See
|
|
||||||
http://dev.maxmind.com/geoip/geoip2/web-services for more details.
|
|
||||||
|
|
||||||
"""
|
|
||||||
# pylint: disable=too-many-instance-attributes,too-few-public-methods
|
|
||||||
from abc import ABCMeta
|
|
||||||
|
|
||||||
import geoip2.records
|
|
||||||
from geoip2.mixins import SimpleEquality
|
|
||||||
|
|
||||||
|
|
||||||
class Country(SimpleEquality):
|
|
||||||
"""Model for the GeoIP2 Precision: Country and the GeoIP2 Country database.
|
|
||||||
|
|
||||||
This class provides the following attributes:
|
|
||||||
|
|
||||||
.. attribute:: continent
|
|
||||||
|
|
||||||
Continent object for the requested IP address.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.Continent`
|
|
||||||
|
|
||||||
.. attribute:: country
|
|
||||||
|
|
||||||
Country object for the requested IP address. This record represents the
|
|
||||||
country where MaxMind believes the IP is located.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.Country`
|
|
||||||
|
|
||||||
.. attribute:: maxmind
|
|
||||||
|
|
||||||
Information related to your MaxMind account.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.MaxMind`
|
|
||||||
|
|
||||||
.. attribute:: registered_country
|
|
||||||
|
|
||||||
The registered country object for the requested IP address. This record
|
|
||||||
represents the country where the ISP has registered a given IP block in
|
|
||||||
and may differ from the user's country.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.Country`
|
|
||||||
|
|
||||||
.. attribute:: represented_country
|
|
||||||
|
|
||||||
Object for the country represented by the users of the IP address
|
|
||||||
when that country is different than the country in ``country``. For
|
|
||||||
instance, the country represented by an overseas military base.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.RepresentedCountry`
|
|
||||||
|
|
||||||
.. attribute:: traits
|
|
||||||
|
|
||||||
Object with the traits of the requested IP address.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.Traits`
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, raw_response, locales=None):
|
|
||||||
if locales is None:
|
|
||||||
locales = ['en']
|
|
||||||
self._locales = locales
|
|
||||||
self.continent = \
|
|
||||||
geoip2.records.Continent(locales,
|
|
||||||
**raw_response.get('continent', {}))
|
|
||||||
self.country = \
|
|
||||||
geoip2.records.Country(locales,
|
|
||||||
**raw_response.get('country', {}))
|
|
||||||
self.registered_country = \
|
|
||||||
geoip2.records.Country(locales,
|
|
||||||
**raw_response.get('registered_country',
|
|
||||||
{}))
|
|
||||||
self.represented_country \
|
|
||||||
= geoip2.records.RepresentedCountry(locales,
|
|
||||||
**raw_response.get(
|
|
||||||
'represented_country', {}))
|
|
||||||
|
|
||||||
self.maxmind = \
|
|
||||||
geoip2.records.MaxMind(**raw_response.get('maxmind', {}))
|
|
||||||
|
|
||||||
self.traits = geoip2.records.Traits(**raw_response.get('traits', {}))
|
|
||||||
self.raw = raw_response
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return '{module}.{class_name}({data}, {locales})'.format(
|
|
||||||
module=self.__module__,
|
|
||||||
class_name=self.__class__.__name__,
|
|
||||||
data=self.raw,
|
|
||||||
locales=self._locales)
|
|
||||||
|
|
||||||
|
|
||||||
class City(Country):
|
|
||||||
"""Model for the GeoIP2 Precision: City and the GeoIP2 City database.
|
|
||||||
|
|
||||||
.. attribute:: city
|
|
||||||
|
|
||||||
City object for the requested IP address.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.City`
|
|
||||||
|
|
||||||
.. attribute:: continent
|
|
||||||
|
|
||||||
Continent object for the requested IP address.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.Continent`
|
|
||||||
|
|
||||||
.. attribute:: country
|
|
||||||
|
|
||||||
Country object for the requested IP address. This record represents the
|
|
||||||
country where MaxMind believes the IP is located.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.Country`
|
|
||||||
|
|
||||||
.. attribute:: location
|
|
||||||
|
|
||||||
Location object for the requested IP address.
|
|
||||||
|
|
||||||
.. attribute:: maxmind
|
|
||||||
|
|
||||||
Information related to your MaxMind account.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.MaxMind`
|
|
||||||
|
|
||||||
.. attribute:: registered_country
|
|
||||||
|
|
||||||
The registered country object for the requested IP address. This record
|
|
||||||
represents the country where the ISP has registered a given IP block in
|
|
||||||
and may differ from the user's country.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.Country`
|
|
||||||
|
|
||||||
.. attribute:: represented_country
|
|
||||||
|
|
||||||
Object for the country represented by the users of the IP address
|
|
||||||
when that country is different than the country in ``country``. For
|
|
||||||
instance, the country represented by an overseas military base.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.RepresentedCountry`
|
|
||||||
|
|
||||||
.. attribute:: subdivisions
|
|
||||||
|
|
||||||
Object (tuple) representing the subdivisions of the country to which
|
|
||||||
the location of the requested IP address belongs.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.Subdivisions`
|
|
||||||
|
|
||||||
.. attribute:: traits
|
|
||||||
|
|
||||||
Object with the traits of the requested IP address.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.Traits`
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, raw_response, locales=None):
|
|
||||||
super(City, self).__init__(raw_response, locales)
|
|
||||||
self.city = \
|
|
||||||
geoip2.records.City(locales, **raw_response.get('city', {}))
|
|
||||||
self.location = \
|
|
||||||
geoip2.records.Location(**raw_response.get('location', {}))
|
|
||||||
self.postal = \
|
|
||||||
geoip2.records.Postal(**raw_response.get('postal', {}))
|
|
||||||
self.subdivisions = \
|
|
||||||
geoip2.records.Subdivisions(locales,
|
|
||||||
*raw_response.get('subdivisions', []))
|
|
||||||
|
|
||||||
|
|
||||||
class Insights(City):
|
|
||||||
"""Model for the GeoIP2 Precision: Insights web service endpoint.
|
|
||||||
|
|
||||||
.. attribute:: city
|
|
||||||
|
|
||||||
City object for the requested IP address.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.City`
|
|
||||||
|
|
||||||
.. attribute:: continent
|
|
||||||
|
|
||||||
Continent object for the requested IP address.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.Continent`
|
|
||||||
|
|
||||||
.. attribute:: country
|
|
||||||
|
|
||||||
Country object for the requested IP address. This record represents the
|
|
||||||
country where MaxMind believes the IP is located.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.Country`
|
|
||||||
|
|
||||||
.. attribute:: location
|
|
||||||
|
|
||||||
Location object for the requested IP address.
|
|
||||||
|
|
||||||
.. attribute:: maxmind
|
|
||||||
|
|
||||||
Information related to your MaxMind account.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.MaxMind`
|
|
||||||
|
|
||||||
.. attribute:: registered_country
|
|
||||||
|
|
||||||
The registered country object for the requested IP address. This record
|
|
||||||
represents the country where the ISP has registered a given IP block in
|
|
||||||
and may differ from the user's country.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.Country`
|
|
||||||
|
|
||||||
.. attribute:: represented_country
|
|
||||||
|
|
||||||
Object for the country represented by the users of the IP address
|
|
||||||
when that country is different than the country in ``country``. For
|
|
||||||
instance, the country represented by an overseas military base.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.RepresentedCountry`
|
|
||||||
|
|
||||||
.. attribute:: subdivisions
|
|
||||||
|
|
||||||
Object (tuple) representing the subdivisions of the country to which
|
|
||||||
the location of the requested IP address belongs.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.Subdivisions`
|
|
||||||
|
|
||||||
.. attribute:: traits
|
|
||||||
|
|
||||||
Object with the traits of the requested IP address.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.Traits`
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class Enterprise(City):
|
|
||||||
"""Model for the GeoIP2 Enterprise database.
|
|
||||||
|
|
||||||
.. attribute:: city
|
|
||||||
|
|
||||||
City object for the requested IP address.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.City`
|
|
||||||
|
|
||||||
.. attribute:: continent
|
|
||||||
|
|
||||||
Continent object for the requested IP address.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.Continent`
|
|
||||||
|
|
||||||
.. attribute:: country
|
|
||||||
|
|
||||||
Country object for the requested IP address. This record represents the
|
|
||||||
country where MaxMind believes the IP is located.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.Country`
|
|
||||||
|
|
||||||
.. attribute:: location
|
|
||||||
|
|
||||||
Location object for the requested IP address.
|
|
||||||
|
|
||||||
.. attribute:: maxmind
|
|
||||||
|
|
||||||
Information related to your MaxMind account.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.MaxMind`
|
|
||||||
|
|
||||||
.. attribute:: registered_country
|
|
||||||
|
|
||||||
The registered country object for the requested IP address. This record
|
|
||||||
represents the country where the ISP has registered a given IP block in
|
|
||||||
and may differ from the user's country.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.Country`
|
|
||||||
|
|
||||||
.. attribute:: represented_country
|
|
||||||
|
|
||||||
Object for the country represented by the users of the IP address
|
|
||||||
when that country is different than the country in ``country``. For
|
|
||||||
instance, the country represented by an overseas military base.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.RepresentedCountry`
|
|
||||||
|
|
||||||
.. attribute:: subdivisions
|
|
||||||
|
|
||||||
Object (tuple) representing the subdivisions of the country to which
|
|
||||||
the location of the requested IP address belongs.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.Subdivisions`
|
|
||||||
|
|
||||||
.. attribute:: traits
|
|
||||||
|
|
||||||
Object with the traits of the requested IP address.
|
|
||||||
|
|
||||||
:type: :py:class:`geoip2.records.Traits`
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class SimpleModel(SimpleEquality):
|
|
||||||
"""Provides basic methods for non-location models"""
|
|
||||||
|
|
||||||
__metaclass__ = ABCMeta
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
# pylint: disable=no-member
|
|
||||||
return '{module}.{class_name}({data})'.format(
|
|
||||||
module=self.__module__,
|
|
||||||
class_name=self.__class__.__name__,
|
|
||||||
data=str(self.raw))
|
|
||||||
|
|
||||||
|
|
||||||
class AnonymousIP(SimpleModel):
|
|
||||||
"""Model class for the GeoIP2 Anonymous IP.
|
|
||||||
|
|
||||||
This class provides the following attribute:
|
|
||||||
|
|
||||||
.. attribute:: is_anonymous
|
|
||||||
|
|
||||||
This is true if the IP address belongs to any sort of anonymous network.
|
|
||||||
|
|
||||||
:type: bool
|
|
||||||
|
|
||||||
.. attribute:: is_anonymous_vpn
|
|
||||||
|
|
||||||
This is true if the IP address belongs to an anonymous VPN system.
|
|
||||||
|
|
||||||
:type: bool
|
|
||||||
|
|
||||||
.. attribute:: is_hosting_provider
|
|
||||||
|
|
||||||
This is true if the IP address belongs to a hosting provider.
|
|
||||||
|
|
||||||
:type: bool
|
|
||||||
|
|
||||||
.. attribute:: is_public_proxy
|
|
||||||
|
|
||||||
This is true if the IP address belongs to a public proxy.
|
|
||||||
|
|
||||||
:type: bool
|
|
||||||
|
|
||||||
.. attribute:: is_tor_exit_node
|
|
||||||
|
|
||||||
This is true if the IP address is a Tor exit node.
|
|
||||||
|
|
||||||
:type: bool
|
|
||||||
|
|
||||||
.. attribute:: ip_address
|
|
||||||
|
|
||||||
The IP address used in the lookup.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, raw):
|
|
||||||
self.is_anonymous = raw.get('is_anonymous', False)
|
|
||||||
self.is_anonymous_vpn = raw.get('is_anonymous_vpn', False)
|
|
||||||
self.is_hosting_provider = raw.get('is_hosting_provider', False)
|
|
||||||
self.is_public_proxy = raw.get('is_public_proxy', False)
|
|
||||||
self.is_tor_exit_node = raw.get('is_tor_exit_node', False)
|
|
||||||
|
|
||||||
self.ip_address = raw.get('ip_address')
|
|
||||||
self.raw = raw
|
|
||||||
|
|
||||||
|
|
||||||
class ConnectionType(SimpleModel):
|
|
||||||
"""Model class for the GeoIP2 Connection-Type.
|
|
||||||
|
|
||||||
This class provides the following attribute:
|
|
||||||
|
|
||||||
.. attribute:: connection_type
|
|
||||||
|
|
||||||
The connection type may take the following values:
|
|
||||||
|
|
||||||
- Dialup
|
|
||||||
- Cable/DSL
|
|
||||||
- Corporate
|
|
||||||
- Cellular
|
|
||||||
|
|
||||||
Additional values may be added in the future.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
.. attribute:: ip_address
|
|
||||||
|
|
||||||
The IP address used in the lookup.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, raw):
|
|
||||||
self.connection_type = raw.get('connection_type')
|
|
||||||
self.ip_address = raw.get('ip_address')
|
|
||||||
self.raw = raw
|
|
||||||
|
|
||||||
|
|
||||||
class Domain(SimpleModel):
|
|
||||||
"""Model class for the GeoIP2 Domain.
|
|
||||||
|
|
||||||
This class provides the following attribute:
|
|
||||||
|
|
||||||
.. attribute:: domain
|
|
||||||
|
|
||||||
The domain associated with the IP address.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
.. attribute:: ip_address
|
|
||||||
|
|
||||||
The IP address used in the lookup.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, raw):
|
|
||||||
self.domain = raw.get('domain')
|
|
||||||
self.ip_address = raw.get('ip_address')
|
|
||||||
self.raw = raw
|
|
||||||
|
|
||||||
|
|
||||||
class ISP(SimpleModel):
|
|
||||||
"""Model class for the GeoIP2 ISP.
|
|
||||||
|
|
||||||
This class provides the following attribute:
|
|
||||||
|
|
||||||
.. attribute:: autonomous_system_number
|
|
||||||
|
|
||||||
The autonomous system number associated with the IP address.
|
|
||||||
|
|
||||||
:type: int
|
|
||||||
|
|
||||||
.. attribute:: autonomous_system_organization
|
|
||||||
|
|
||||||
The organization associated with the registered autonomous system number
|
|
||||||
for the IP address.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
.. attribute:: isp
|
|
||||||
|
|
||||||
The name of the ISP associated with the IP address.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
.. attribute:: organization
|
|
||||||
|
|
||||||
The name of the organization associated with the IP address.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
.. attribute:: ip_address
|
|
||||||
|
|
||||||
The IP address used in the lookup.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
"""
|
|
||||||
|
|
||||||
# pylint:disable=too-many-arguments
|
|
||||||
def __init__(self, raw):
|
|
||||||
self.autonomous_system_number = raw.get('autonomous_system_number')
|
|
||||||
self.autonomous_system_organization = raw.get(
|
|
||||||
'autonomous_system_organization')
|
|
||||||
self.isp = raw.get('isp')
|
|
||||||
self.organization = raw.get('organization')
|
|
||||||
self.ip_address = raw.get('ip_address')
|
|
||||||
self.raw = raw
|
|
|
@ -1,605 +0,0 @@
|
||||||
"""
|
|
||||||
|
|
||||||
Records
|
|
||||||
=======
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
# pylint:disable=R0903
|
|
||||||
from abc import ABCMeta
|
|
||||||
|
|
||||||
from geoip2.mixins import SimpleEquality
|
|
||||||
|
|
||||||
|
|
||||||
class Record(SimpleEquality):
|
|
||||||
"""All records are subclasses of the abstract class ``Record``."""
|
|
||||||
|
|
||||||
__metaclass__ = ABCMeta
|
|
||||||
|
|
||||||
_valid_attributes = set()
|
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
valid_args = dict((k, kwargs.get(k)) for k in self._valid_attributes)
|
|
||||||
self.__dict__.update(valid_args)
|
|
||||||
|
|
||||||
def __setattr__(self, name, value):
|
|
||||||
raise AttributeError("can't set attribute")
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
args = ', '.join('%s=%r' % x for x in self.__dict__.items())
|
|
||||||
return '{module}.{class_name}({data})'.format(
|
|
||||||
module=self.__module__,
|
|
||||||
class_name=self.__class__.__name__,
|
|
||||||
data=args)
|
|
||||||
|
|
||||||
|
|
||||||
class PlaceRecord(Record):
|
|
||||||
"""All records with :py:attr:`names` subclass :py:class:`PlaceRecord`."""
|
|
||||||
|
|
||||||
__metaclass__ = ABCMeta
|
|
||||||
|
|
||||||
def __init__(self, locales=None, **kwargs):
|
|
||||||
if locales is None:
|
|
||||||
locales = ['en']
|
|
||||||
if kwargs.get('names') is None:
|
|
||||||
kwargs['names'] = {}
|
|
||||||
object.__setattr__(self, '_locales', locales)
|
|
||||||
super(PlaceRecord, self).__init__(**kwargs)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self):
|
|
||||||
"""Dict with locale codes as keys and localized name as value."""
|
|
||||||
# pylint:disable=E1101
|
|
||||||
return next(
|
|
||||||
(self.names.get(x) for x in self._locales
|
|
||||||
if x in self.names), None)
|
|
||||||
|
|
||||||
|
|
||||||
class City(PlaceRecord):
|
|
||||||
"""Contains data for the city record associated with an IP address.
|
|
||||||
|
|
||||||
This class contains the city-level data associated with an IP address.
|
|
||||||
|
|
||||||
This record is returned by ``city``, ``enterprise``, and ``insights``.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
|
|
||||||
.. attribute:: confidence
|
|
||||||
|
|
||||||
A value from 0-100 indicating MaxMind's
|
|
||||||
confidence that the city is correct. This attribute is only available
|
|
||||||
from the Insights end point and the GeoIP2 Enterprise database.
|
|
||||||
|
|
||||||
:type: int
|
|
||||||
|
|
||||||
.. attribute:: geoname_id
|
|
||||||
|
|
||||||
The GeoName ID for the city.
|
|
||||||
|
|
||||||
:type: int
|
|
||||||
|
|
||||||
.. attribute:: name
|
|
||||||
|
|
||||||
The name of the city based on the locales list passed to the
|
|
||||||
constructor.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
.. attribute:: names
|
|
||||||
|
|
||||||
A dictionary where the keys are locale codes
|
|
||||||
and the values are names.
|
|
||||||
|
|
||||||
:type: dict
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
_valid_attributes = set(['confidence', 'geoname_id', 'names'])
|
|
||||||
|
|
||||||
|
|
||||||
class Continent(PlaceRecord):
|
|
||||||
"""Contains data for the continent record associated with an IP address.
|
|
||||||
|
|
||||||
This class contains the continent-level data associated with an IP
|
|
||||||
address.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
|
|
||||||
|
|
||||||
.. attribute:: code
|
|
||||||
|
|
||||||
A two character continent code like "NA" (North America)
|
|
||||||
or "OC" (Oceania).
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
.. attribute:: geoname_id
|
|
||||||
|
|
||||||
The GeoName ID for the continent.
|
|
||||||
|
|
||||||
:type: int
|
|
||||||
|
|
||||||
.. attribute:: name
|
|
||||||
|
|
||||||
Returns the name of the continent based on the locales list passed to
|
|
||||||
the constructor.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
.. attribute:: names
|
|
||||||
|
|
||||||
A dictionary where the keys are locale codes
|
|
||||||
and the values are names.
|
|
||||||
|
|
||||||
:type: dict
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
_valid_attributes = set(['code', 'geoname_id', 'names'])
|
|
||||||
|
|
||||||
|
|
||||||
class Country(PlaceRecord):
|
|
||||||
"""Contains data for the country record associated with an IP address.
|
|
||||||
|
|
||||||
This class contains the country-level data associated with an IP address.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
|
|
||||||
|
|
||||||
.. attribute:: confidence
|
|
||||||
|
|
||||||
A value from 0-100 indicating MaxMind's confidence that
|
|
||||||
the country is correct. This attribute is only available from the
|
|
||||||
Insights end point and the GeoIP2 Enterprise database.
|
|
||||||
|
|
||||||
:type: int
|
|
||||||
|
|
||||||
.. attribute:: geoname_id
|
|
||||||
|
|
||||||
The GeoName ID for the country.
|
|
||||||
|
|
||||||
:type: int
|
|
||||||
|
|
||||||
.. attribute:: iso_code
|
|
||||||
|
|
||||||
The two-character `ISO 3166-1
|
|
||||||
<http://en.wikipedia.org/wiki/ISO_3166-1>`_ alpha code for the
|
|
||||||
country.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
.. attribute:: name
|
|
||||||
|
|
||||||
The name of the country based on the locales list passed to the
|
|
||||||
constructor.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
.. attribute:: names
|
|
||||||
|
|
||||||
A dictionary where the keys are locale codes and the values
|
|
||||||
are names.
|
|
||||||
|
|
||||||
:type: dict
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
_valid_attributes = set(['confidence', 'geoname_id', 'iso_code', 'names'])
|
|
||||||
|
|
||||||
|
|
||||||
class RepresentedCountry(Country):
|
|
||||||
"""Contains data for the represented country associated with an IP address.
|
|
||||||
|
|
||||||
This class contains the country-level data associated with an IP address
|
|
||||||
for the IP's represented country. The represented country is the country
|
|
||||||
represented by something like a military base.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
|
|
||||||
|
|
||||||
.. attribute:: confidence
|
|
||||||
|
|
||||||
A value from 0-100 indicating MaxMind's confidence that
|
|
||||||
the country is correct. This attribute is only available from the
|
|
||||||
Insights end point and the GeoIP2 Enterprise database.
|
|
||||||
|
|
||||||
:type: int
|
|
||||||
|
|
||||||
.. attribute:: geoname_id
|
|
||||||
|
|
||||||
The GeoName ID for the country.
|
|
||||||
|
|
||||||
:type: int
|
|
||||||
|
|
||||||
.. attribute:: iso_code
|
|
||||||
|
|
||||||
The two-character `ISO 3166-1
|
|
||||||
<http://en.wikipedia.org/wiki/ISO_3166-1>`_ alpha code for the country.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
.. attribute:: name
|
|
||||||
|
|
||||||
The name of the country based on the locales list passed to the
|
|
||||||
constructor.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
.. attribute:: names
|
|
||||||
|
|
||||||
A dictionary where the keys are locale codes and the values
|
|
||||||
are names.
|
|
||||||
|
|
||||||
:type: dict
|
|
||||||
|
|
||||||
|
|
||||||
.. attribute:: type
|
|
||||||
|
|
||||||
A string indicating the type of entity that is representing the
|
|
||||||
country. Currently we only return ``military`` but this could expand to
|
|
||||||
include other types in the future.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
_valid_attributes = set(['confidence', 'geoname_id', 'iso_code', 'names',
|
|
||||||
'type'])
|
|
||||||
|
|
||||||
|
|
||||||
class Location(Record):
|
|
||||||
"""Contains data for the location record associated with an IP address.
|
|
||||||
|
|
||||||
This class contains the location data associated with an IP address.
|
|
||||||
|
|
||||||
This record is returned by ``city``, ``enterprise``, and ``insights``.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
|
|
||||||
.. attribute:: average_income
|
|
||||||
|
|
||||||
The average income in US dollars associated with the requested IP
|
|
||||||
address. This attribute is only available from the Insights end point.
|
|
||||||
|
|
||||||
:type: int
|
|
||||||
|
|
||||||
.. attribute:: accuracy_radius
|
|
||||||
|
|
||||||
The radius in kilometers around the specified location where the IP
|
|
||||||
address is likely to be.
|
|
||||||
|
|
||||||
:type: int
|
|
||||||
|
|
||||||
.. attribute:: latitude
|
|
||||||
|
|
||||||
The approximate latitude of the location associated with the IP
|
|
||||||
address. This value is not precise and should not be used to identify a
|
|
||||||
particular address or household.
|
|
||||||
|
|
||||||
:type: float
|
|
||||||
|
|
||||||
.. attribute:: longitude
|
|
||||||
|
|
||||||
The approximate longitude of the location associated with the IP
|
|
||||||
address. This value is not precise and should not be used to identify a
|
|
||||||
particular address or household.
|
|
||||||
|
|
||||||
:type: float
|
|
||||||
|
|
||||||
.. attribute:: metro_code
|
|
||||||
|
|
||||||
The metro code of the location if the
|
|
||||||
location is in the US. MaxMind returns the same metro codes as the
|
|
||||||
`Google AdWords API
|
|
||||||
<https://developers.google.com/adwords/api/docs/appendix/cities-DMAregions>`_.
|
|
||||||
|
|
||||||
:type: int
|
|
||||||
|
|
||||||
.. attribute:: population_density
|
|
||||||
|
|
||||||
The estimated population per square kilometer associated with the IP
|
|
||||||
address. This attribute is only available from the Insights end point.
|
|
||||||
|
|
||||||
:type: int
|
|
||||||
|
|
||||||
.. attribute:: time_zone
|
|
||||||
|
|
||||||
The time zone associated with location, as specified by the `IANA Time
|
|
||||||
Zone Database <http://www.iana.org/time-zones>`_, e.g.,
|
|
||||||
"America/New_York".
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
_valid_attributes = set(['average_income', 'accuracy_radius', 'latitude',
|
|
||||||
'longitude', 'metro_code', 'population_density',
|
|
||||||
'postal_code', 'postal_confidence', 'time_zone'])
|
|
||||||
|
|
||||||
|
|
||||||
class MaxMind(Record):
|
|
||||||
"""Contains data related to your MaxMind account.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
|
|
||||||
.. attribute:: queries_remaining
|
|
||||||
|
|
||||||
The number of remaining queries you have
|
|
||||||
for the end point you are calling.
|
|
||||||
|
|
||||||
:type: int
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
_valid_attributes = set(['queries_remaining'])
|
|
||||||
|
|
||||||
|
|
||||||
class Postal(Record):
|
|
||||||
"""Contains data for the postal record associated with an IP address.
|
|
||||||
|
|
||||||
This class contains the postal data associated with an IP address.
|
|
||||||
|
|
||||||
This attribute is returned by ``city``, ``enterprise``, and ``insights``.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
|
|
||||||
.. attribute:: code
|
|
||||||
|
|
||||||
The postal code of the location. Postal
|
|
||||||
codes are not available for all countries. In some countries, this will
|
|
||||||
only contain part of the postal code.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
.. attribute:: confidence
|
|
||||||
|
|
||||||
A value from 0-100 indicating
|
|
||||||
MaxMind's confidence that the postal code is correct. This attribute is
|
|
||||||
only available from the Insights end point and the GeoIP2 Enterprise
|
|
||||||
database.
|
|
||||||
|
|
||||||
:type: int
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
_valid_attributes = set(['code', 'confidence'])
|
|
||||||
|
|
||||||
|
|
||||||
class Subdivision(PlaceRecord):
|
|
||||||
"""Contains data for the subdivisions associated with an IP address.
|
|
||||||
|
|
||||||
This class contains the subdivision data associated with an IP address.
|
|
||||||
|
|
||||||
This attribute is returned by ``city``, ``enterprise``, and ``insights``.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
|
|
||||||
.. attribute:: confidence
|
|
||||||
|
|
||||||
This is a value from 0-100 indicating MaxMind's
|
|
||||||
confidence that the subdivision is correct. This attribute is only
|
|
||||||
available from the Insights end point and the GeoIP2 Enterprise
|
|
||||||
database.
|
|
||||||
|
|
||||||
:type: int
|
|
||||||
|
|
||||||
.. attribute:: geoname_id
|
|
||||||
|
|
||||||
This is a GeoName ID for the subdivision.
|
|
||||||
|
|
||||||
:type: int
|
|
||||||
|
|
||||||
.. attribute:: iso_code
|
|
||||||
|
|
||||||
This is a string up to three characters long
|
|
||||||
contain the subdivision portion of the `ISO 3166-2 code
|
|
||||||
<http://en.wikipedia.org/wiki/ISO_3166-2>`_.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
.. attribute:: name
|
|
||||||
|
|
||||||
The name of the subdivision based on the locales list passed to the
|
|
||||||
constructor.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
.. attribute:: names
|
|
||||||
|
|
||||||
A dictionary where the keys are locale codes and the
|
|
||||||
values are names
|
|
||||||
|
|
||||||
:type: dict
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
_valid_attributes = set(['confidence', 'geoname_id', 'iso_code', 'names'])
|
|
||||||
|
|
||||||
|
|
||||||
class Subdivisions(tuple):
|
|
||||||
"""A tuple-like collection of subdivisions associated with an IP address.
|
|
||||||
|
|
||||||
This class contains the subdivisions of the country associated with the
|
|
||||||
IP address from largest to smallest.
|
|
||||||
|
|
||||||
For instance, the response for Oxford in the United Kingdom would have
|
|
||||||
England as the first element and Oxfordshire as the second element.
|
|
||||||
|
|
||||||
This attribute is returned by ``city``, ``enterprise``, and ``insights``.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __new__(cls, locales, *subdivisions):
|
|
||||||
subdivisions = [Subdivision(locales, **x) for x in subdivisions]
|
|
||||||
obj = super(cls, Subdivisions).__new__(cls, subdivisions)
|
|
||||||
return obj
|
|
||||||
|
|
||||||
def __init__(self, locales, *subdivisions): # pylint:disable=W0613
|
|
||||||
self._locales = locales
|
|
||||||
super(Subdivisions, self).__init__()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def most_specific(self):
|
|
||||||
"""The most specific (smallest) subdivision available.
|
|
||||||
|
|
||||||
If there are no :py:class:`Subdivision` objects for the response,
|
|
||||||
this returns an empty :py:class:`Subdivision`.
|
|
||||||
|
|
||||||
:type: :py:class:`Subdivision`
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return self[-1]
|
|
||||||
except IndexError:
|
|
||||||
return Subdivision(self._locales)
|
|
||||||
|
|
||||||
|
|
||||||
class Traits(Record):
|
|
||||||
"""Contains data for the traits record associated with an IP address.
|
|
||||||
|
|
||||||
This class contains the traits data associated with an IP address.
|
|
||||||
|
|
||||||
This class has the following attributes:
|
|
||||||
|
|
||||||
|
|
||||||
.. attribute:: autonomous_system_number
|
|
||||||
|
|
||||||
The `autonomous system
|
|
||||||
number <http://en.wikipedia.org/wiki/Autonomous_system_(Internet)>`_
|
|
||||||
associated with the IP address. This attribute is only available from
|
|
||||||
the City and Insights web service end points and the GeoIP2 Enterprise
|
|
||||||
database.
|
|
||||||
|
|
||||||
:type: int
|
|
||||||
|
|
||||||
.. attribute:: autonomous_system_organization
|
|
||||||
|
|
||||||
The organization associated with the registered `autonomous system
|
|
||||||
number <http://en.wikipedia.org/wiki/Autonomous_system_(Internet)>`_ for
|
|
||||||
the IP address. This attribute is only available from the City and
|
|
||||||
Insights web service end points and the GeoIP2 Enterprise database.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
.. attribute:: connection_type
|
|
||||||
|
|
||||||
The connection type may take the following values:
|
|
||||||
|
|
||||||
- Dialup
|
|
||||||
- Cable/DSL
|
|
||||||
- Corporate
|
|
||||||
- Cellular
|
|
||||||
|
|
||||||
Additional values may be added in the future.
|
|
||||||
|
|
||||||
This attribute is only available in the GeoIP2 Enterprise database.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
.. attribute:: domain
|
|
||||||
|
|
||||||
The second level domain associated with the
|
|
||||||
IP address. This will be something like "example.com" or
|
|
||||||
"example.co.uk", not "foo.example.com". This attribute is only available
|
|
||||||
from the City and Insights web service end points and the GeoIP2
|
|
||||||
Enterprise database.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
.. attribute:: ip_address
|
|
||||||
|
|
||||||
The IP address that the data in the model
|
|
||||||
is for. If you performed a "me" lookup against the web service, this
|
|
||||||
will be the externally routable IP address for the system the code is
|
|
||||||
running on. If the system is behind a NAT, this may differ from the IP
|
|
||||||
address locally assigned to it.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
.. attribute:: is_anonymous_proxy
|
|
||||||
|
|
||||||
This is true if the IP is an anonymous
|
|
||||||
proxy. See http://dev.maxmind.com/faq/geoip#anonproxy for further
|
|
||||||
details.
|
|
||||||
|
|
||||||
:type: bool
|
|
||||||
|
|
||||||
.. deprecated:: 2.2.0
|
|
||||||
Use our our `GeoIP2 Anonymous IP database
|
|
||||||
<https://www.maxmind.com/en/geoip2-anonymous-ip-database GeoIP2>`_
|
|
||||||
instead.
|
|
||||||
|
|
||||||
.. attribute:: is_legitimate_proxy
|
|
||||||
|
|
||||||
This attribute is true if MaxMind believes this IP address to be a
|
|
||||||
legitimate proxy, such as an internal VPN used by a corporation. This
|
|
||||||
attribute is only available in the GeoIP2 Enterprise database.
|
|
||||||
|
|
||||||
:type: bool
|
|
||||||
|
|
||||||
.. attribute:: is_satellite_provider
|
|
||||||
|
|
||||||
This is true if the IP address is from a satellite provider that
|
|
||||||
provides service to multiple countries.
|
|
||||||
|
|
||||||
:type: bool
|
|
||||||
|
|
||||||
.. deprecated:: 2.2.0
|
|
||||||
Due to the increased coverage by mobile carriers, very few
|
|
||||||
satellite providers now serve multiple countries. As a result, the
|
|
||||||
output does not provide sufficiently relevant data for us to maintain
|
|
||||||
it.
|
|
||||||
|
|
||||||
.. attribute:: isp
|
|
||||||
|
|
||||||
The name of the ISP associated with the IP address. This attribute is
|
|
||||||
only available from the City and Insights web service end points and the
|
|
||||||
GeoIP2 Enterprise database.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
.. attribute:: organization
|
|
||||||
|
|
||||||
The name of the organization associated with the IP address. This
|
|
||||||
attribute is only available from the City and Insights web service end
|
|
||||||
points and the GeoIP2 Enterprise database.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
.. attribute:: user_type
|
|
||||||
|
|
||||||
The user type associated with the IP
|
|
||||||
address. This can be one of the following values:
|
|
||||||
|
|
||||||
* business
|
|
||||||
* cafe
|
|
||||||
* cellular
|
|
||||||
* college
|
|
||||||
* content_delivery_network
|
|
||||||
* dialup
|
|
||||||
* government
|
|
||||||
* hosting
|
|
||||||
* library
|
|
||||||
* military
|
|
||||||
* residential
|
|
||||||
* router
|
|
||||||
* school
|
|
||||||
* search_engine_spider
|
|
||||||
* traveler
|
|
||||||
|
|
||||||
This attribute is only available from the Insights end point and the
|
|
||||||
GeoIP2 Enterprise database.
|
|
||||||
|
|
||||||
:type: unicode
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
_valid_attributes = set(
|
|
||||||
['autonomous_system_number', 'autonomous_system_organization',
|
|
||||||
'connection_type', 'domain', 'is_anonymous_proxy',
|
|
||||||
'is_legitimate_proxy', 'is_satellite_provider', 'isp', 'ip_address',
|
|
||||||
'organization', 'user_type'])
|
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
for k in ['is_anonymous_proxy', 'is_legitimate_proxy',
|
|
||||||
'is_satellite_provider']:
|
|
||||||
kwargs[k] = bool(kwargs.get(k, False))
|
|
||||||
super(Traits, self).__init__(**kwargs)
|
|
|
@ -1,219 +0,0 @@
|
||||||
"""
|
|
||||||
============================
|
|
||||||
WebServices Client API
|
|
||||||
============================
|
|
||||||
|
|
||||||
This class provides a client API for all the GeoIP2 Precision web service end
|
|
||||||
points. The end points are Country, City, and Insights. Each end point returns
|
|
||||||
a different set of data about an IP address, with Country returning the least
|
|
||||||
data and Insights the most.
|
|
||||||
|
|
||||||
Each web service end point is represented by a different model class, and
|
|
||||||
these model classes in turn contain multiple record classes. The record
|
|
||||||
classes have attributes which contain data about the IP address.
|
|
||||||
|
|
||||||
If the web service does not return a particular piece of data for an IP
|
|
||||||
address, the associated attribute is not populated.
|
|
||||||
|
|
||||||
The web service may not return any information for an entire record, in which
|
|
||||||
case all of the attributes for that record class will be empty.
|
|
||||||
|
|
||||||
SSL
|
|
||||||
---
|
|
||||||
|
|
||||||
Requests to the GeoIP2 Precision web service are always made with SSL.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import requests
|
|
||||||
|
|
||||||
from requests.utils import default_user_agent
|
|
||||||
|
|
||||||
import geoip2
|
|
||||||
import geoip2.models
|
|
||||||
|
|
||||||
from .compat import compat_ip_address
|
|
||||||
|
|
||||||
from .errors import (AddressNotFoundError, AuthenticationError, GeoIP2Error,
|
|
||||||
HTTPError, InvalidRequestError, OutOfQueriesError,
|
|
||||||
PermissionRequiredError)
|
|
||||||
|
|
||||||
|
|
||||||
class Client(object):
|
|
||||||
"""Creates a new client object.
|
|
||||||
|
|
||||||
It accepts the following required arguments:
|
|
||||||
|
|
||||||
:param user_id: Your MaxMind User ID.
|
|
||||||
:param license_key: Your MaxMind license key.
|
|
||||||
|
|
||||||
Go to https://www.maxmind.com/en/my_license_key to see your MaxMind
|
|
||||||
User ID and license key.
|
|
||||||
|
|
||||||
The following keyword arguments are also accepted:
|
|
||||||
|
|
||||||
:param host: The hostname to make a request against. This defaults to
|
|
||||||
"geoip.maxmind.com". In most cases, you should not need to set this
|
|
||||||
explicitly.
|
|
||||||
:param locales: This is list of locale codes. This argument will be
|
|
||||||
passed on to record classes to use when their name properties are
|
|
||||||
called. The default value is ['en'].
|
|
||||||
|
|
||||||
The order of the locales is significant. When a record class has
|
|
||||||
multiple names (country, city, etc.), its name property will return
|
|
||||||
the name in the first locale that has one.
|
|
||||||
|
|
||||||
Note that the only locale which is always present in the GeoIP2
|
|
||||||
data is "en". If you do not include this locale, the name property
|
|
||||||
may end up returning None even when the record has an English name.
|
|
||||||
|
|
||||||
Currently, the valid locale codes are:
|
|
||||||
|
|
||||||
* de -- German
|
|
||||||
* en -- English names may still include accented characters if that is
|
|
||||||
the accepted spelling in English. In other words, English does not
|
|
||||||
mean ASCII.
|
|
||||||
* es -- Spanish
|
|
||||||
* fr -- French
|
|
||||||
* ja -- Japanese
|
|
||||||
* pt-BR -- Brazilian Portuguese
|
|
||||||
* ru -- Russian
|
|
||||||
* zh-CN -- Simplified Chinese.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self,
|
|
||||||
user_id,
|
|
||||||
license_key,
|
|
||||||
host='geoip.maxmind.com',
|
|
||||||
locales=None,
|
|
||||||
timeout=None):
|
|
||||||
"""Construct a Client."""
|
|
||||||
# pylint: disable=too-many-arguments
|
|
||||||
if locales is None:
|
|
||||||
locales = ['en']
|
|
||||||
self._locales = locales
|
|
||||||
self._user_id = user_id
|
|
||||||
self._license_key = license_key
|
|
||||||
self._base_uri = 'https://%s/geoip/v2.1' % host
|
|
||||||
self._timeout = timeout
|
|
||||||
|
|
||||||
def city(self, ip_address='me'):
|
|
||||||
"""Call GeoIP2 Precision City endpoint with the specified IP.
|
|
||||||
|
|
||||||
:param ip_address: IPv4 or IPv6 address as a string. If no
|
|
||||||
address is provided, the address that the web service is
|
|
||||||
called from will be used.
|
|
||||||
|
|
||||||
:returns: :py:class:`geoip2.models.City` object
|
|
||||||
|
|
||||||
"""
|
|
||||||
return self._response_for('city', geoip2.models.City, ip_address)
|
|
||||||
|
|
||||||
def country(self, ip_address='me'):
|
|
||||||
"""Call the GeoIP2 Country endpoint with the specified IP.
|
|
||||||
|
|
||||||
:param ip_address: IPv4 or IPv6 address as a string. If no address
|
|
||||||
is provided, the address that the web service is called from will
|
|
||||||
be used.
|
|
||||||
|
|
||||||
:returns: :py:class:`geoip2.models.Country` object
|
|
||||||
|
|
||||||
"""
|
|
||||||
return self._response_for('country', geoip2.models.Country, ip_address)
|
|
||||||
|
|
||||||
def insights(self, ip_address='me'):
|
|
||||||
"""Call the GeoIP2 Precision: Insights endpoint with the specified IP.
|
|
||||||
|
|
||||||
:param ip_address: IPv4 or IPv6 address as a string. If no address
|
|
||||||
is provided, the address that the web service is called from will
|
|
||||||
be used.
|
|
||||||
|
|
||||||
:returns: :py:class:`geoip2.models.Insights` object
|
|
||||||
|
|
||||||
"""
|
|
||||||
return self._response_for('insights', geoip2.models.Insights,
|
|
||||||
ip_address)
|
|
||||||
|
|
||||||
def _response_for(self, path, model_class, ip_address):
|
|
||||||
if ip_address != 'me':
|
|
||||||
ip_address = str(compat_ip_address(ip_address))
|
|
||||||
uri = '/'.join([self._base_uri, path, ip_address])
|
|
||||||
response = requests.get(uri,
|
|
||||||
auth=(self._user_id, self._license_key),
|
|
||||||
headers={'Accept': 'application/json',
|
|
||||||
'User-Agent': self._user_agent()},
|
|
||||||
timeout=self._timeout)
|
|
||||||
if response.status_code == 200:
|
|
||||||
body = self._handle_success(response, uri)
|
|
||||||
return model_class(body, locales=self._locales)
|
|
||||||
else:
|
|
||||||
self._handle_error(response, uri)
|
|
||||||
|
|
||||||
def _user_agent(self):
|
|
||||||
return 'GeoIP2 Python Client v%s (%s)' % (geoip2.__version__,
|
|
||||||
default_user_agent())
|
|
||||||
|
|
||||||
def _handle_success(self, response, uri):
|
|
||||||
try:
|
|
||||||
return response.json()
|
|
||||||
except ValueError as ex:
|
|
||||||
raise GeoIP2Error('Received a 200 response for %(uri)s'
|
|
||||||
' but could not decode the response as '
|
|
||||||
'JSON: ' % locals() + ', '.join(ex.args), 200,
|
|
||||||
uri)
|
|
||||||
|
|
||||||
def _handle_error(self, response, uri):
|
|
||||||
status = response.status_code
|
|
||||||
|
|
||||||
if 400 <= status < 500:
|
|
||||||
self._handle_4xx_status(response, status, uri)
|
|
||||||
elif 500 <= status < 600:
|
|
||||||
self._handle_5xx_status(status, uri)
|
|
||||||
else:
|
|
||||||
self._handle_non_200_status(status, uri)
|
|
||||||
|
|
||||||
def _handle_4xx_status(self, response, status, uri):
|
|
||||||
if not response.content:
|
|
||||||
raise HTTPError('Received a %(status)i error for %(uri)s '
|
|
||||||
'with no body.' % locals(), status, uri)
|
|
||||||
elif response.headers['Content-Type'].find('json') == -1:
|
|
||||||
raise HTTPError('Received a %i for %s with the following '
|
|
||||||
'body: %s' % (status, uri, response.content),
|
|
||||||
status, uri)
|
|
||||||
try:
|
|
||||||
body = response.json()
|
|
||||||
except ValueError as ex:
|
|
||||||
raise HTTPError(
|
|
||||||
'Received a %(status)i error for %(uri)s but it did'
|
|
||||||
' not include the expected JSON body: ' % locals() +
|
|
||||||
', '.join(ex.args), status, uri)
|
|
||||||
else:
|
|
||||||
if 'code' in body and 'error' in body:
|
|
||||||
self._handle_web_service_error(
|
|
||||||
body.get('error'), body.get('code'), status, uri)
|
|
||||||
else:
|
|
||||||
raise HTTPError(
|
|
||||||
'Response contains JSON but it does not specify '
|
|
||||||
'code or error keys', status, uri)
|
|
||||||
|
|
||||||
def _handle_web_service_error(self, message, code, status, uri):
|
|
||||||
if code in ('IP_ADDRESS_NOT_FOUND', 'IP_ADDRESS_RESERVED'):
|
|
||||||
raise AddressNotFoundError(message)
|
|
||||||
elif code in ('AUTHORIZATION_INVALID', 'LICENSE_KEY_REQUIRED',
|
|
||||||
'USER_ID_REQUIRED', 'USER_ID_UNKNOWN'):
|
|
||||||
raise AuthenticationError(message)
|
|
||||||
elif code in ('INSUFFICIENT_FUNDS', 'OUT_OF_QUERIES'):
|
|
||||||
raise OutOfQueriesError(message)
|
|
||||||
elif code == 'PERMISSION_REQUIRED':
|
|
||||||
raise PermissionRequiredError(message)
|
|
||||||
|
|
||||||
raise InvalidRequestError(message, code, status, uri)
|
|
||||||
|
|
||||||
def _handle_5xx_status(self, status, uri):
|
|
||||||
raise HTTPError('Received a server error (%(status)i) for '
|
|
||||||
'%(uri)s' % locals(), status, uri)
|
|
||||||
|
|
||||||
def _handle_non_200_status(self, status, uri):
|
|
||||||
raise HTTPError('Received a very surprising HTTP status '
|
|
||||||
'(%(status)i) for %(uri)s' % locals(), status, uri)
|
|
|
@ -1,46 +0,0 @@
|
||||||
# pylint:disable=C0111
|
|
||||||
import os
|
|
||||||
|
|
||||||
import maxminddb.reader
|
|
||||||
|
|
||||||
try:
|
|
||||||
import maxminddb.extension
|
|
||||||
except ImportError:
|
|
||||||
maxminddb.extension = None
|
|
||||||
|
|
||||||
from maxminddb.const import (MODE_AUTO, MODE_MMAP, MODE_MMAP_EXT, MODE_FILE,
|
|
||||||
MODE_MEMORY)
|
|
||||||
from maxminddb.decoder import InvalidDatabaseError
|
|
||||||
|
|
||||||
|
|
||||||
def open_database(database, mode=MODE_AUTO):
|
|
||||||
"""Open a Maxmind DB database
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
database -- A path to a valid MaxMind DB file such as a GeoIP2
|
|
||||||
database file.
|
|
||||||
mode -- mode to open the database with. Valid mode are:
|
|
||||||
* MODE_MMAP_EXT - use the C extension with memory map.
|
|
||||||
* MODE_MMAP - read from memory map. Pure Python.
|
|
||||||
* MODE_FILE - read database as standard file. Pure Python.
|
|
||||||
* MODE_MEMORY - load database into memory. Pure Python.
|
|
||||||
* MODE_AUTO - tries MODE_MMAP_EXT, MODE_MMAP, MODE_FILE in that
|
|
||||||
order. Default mode.
|
|
||||||
"""
|
|
||||||
if (mode == MODE_AUTO and maxminddb.extension and
|
|
||||||
hasattr(maxminddb.extension, 'Reader')) or mode == MODE_MMAP_EXT:
|
|
||||||
return maxminddb.extension.Reader(database)
|
|
||||||
elif mode in (MODE_AUTO, MODE_MMAP, MODE_FILE, MODE_MEMORY):
|
|
||||||
return maxminddb.reader.Reader(database, mode)
|
|
||||||
raise ValueError('Unsupported open mode: {0}'.format(mode))
|
|
||||||
|
|
||||||
|
|
||||||
def Reader(database): # pylint: disable=invalid-name
|
|
||||||
"""This exists for backwards compatibility. Use open_database instead"""
|
|
||||||
return open_database(database)
|
|
||||||
|
|
||||||
__title__ = 'maxminddb'
|
|
||||||
__version__ = '1.2.1'
|
|
||||||
__author__ = 'Gregory Oschwald'
|
|
||||||
__license__ = 'Apache License, Version 2.0'
|
|
||||||
__copyright__ = 'Copyright 2014 Maxmind, Inc.'
|
|
|
@ -1,33 +0,0 @@
|
||||||
import sys
|
|
||||||
|
|
||||||
import ipaddress
|
|
||||||
|
|
||||||
# pylint: skip-file
|
|
||||||
|
|
||||||
if sys.version_info[0] == 2:
|
|
||||||
def compat_ip_address(address):
|
|
||||||
if isinstance(address, bytes):
|
|
||||||
address = address.decode()
|
|
||||||
return ipaddress.ip_address(address)
|
|
||||||
|
|
||||||
int_from_byte = ord
|
|
||||||
|
|
||||||
FileNotFoundError = IOError
|
|
||||||
|
|
||||||
def int_from_bytes(b):
|
|
||||||
if b:
|
|
||||||
return int(b.encode("hex"), 16)
|
|
||||||
return 0
|
|
||||||
|
|
||||||
byte_from_int = chr
|
|
||||||
else:
|
|
||||||
def compat_ip_address(address):
|
|
||||||
return ipaddress.ip_address(address)
|
|
||||||
|
|
||||||
int_from_byte = lambda x: x
|
|
||||||
|
|
||||||
FileNotFoundError = FileNotFoundError
|
|
||||||
|
|
||||||
int_from_bytes = lambda x: int.from_bytes(x, 'big')
|
|
||||||
|
|
||||||
byte_from_int = lambda x: bytes([x])
|
|
|
@ -1,7 +0,0 @@
|
||||||
"""Constants used in the API"""
|
|
||||||
|
|
||||||
MODE_AUTO = 0
|
|
||||||
MODE_MMAP_EXT = 1
|
|
||||||
MODE_MMAP = 2
|
|
||||||
MODE_FILE = 4
|
|
||||||
MODE_MEMORY = 8
|
|
|
@ -1,173 +0,0 @@
|
||||||
"""
|
|
||||||
maxminddb.decoder
|
|
||||||
~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
This package contains code for decoding the MaxMind DB data section.
|
|
||||||
|
|
||||||
"""
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import struct
|
|
||||||
|
|
||||||
from maxminddb.compat import byte_from_int, int_from_bytes
|
|
||||||
from maxminddb.errors import InvalidDatabaseError
|
|
||||||
|
|
||||||
|
|
||||||
class Decoder(object): # pylint: disable=too-few-public-methods
|
|
||||||
|
|
||||||
"""Decoder for the data section of the MaxMind DB"""
|
|
||||||
|
|
||||||
def __init__(self, database_buffer, pointer_base=0, pointer_test=False):
|
|
||||||
"""Created a Decoder for a MaxMind DB
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
database_buffer -- an mmap'd MaxMind DB file.
|
|
||||||
pointer_base -- the base number to use when decoding a pointer
|
|
||||||
pointer_test -- used for internal unit testing of pointer code
|
|
||||||
"""
|
|
||||||
self._pointer_test = pointer_test
|
|
||||||
self._buffer = database_buffer
|
|
||||||
self._pointer_base = pointer_base
|
|
||||||
|
|
||||||
def _decode_array(self, size, offset):
|
|
||||||
array = []
|
|
||||||
for _ in range(size):
|
|
||||||
(value, offset) = self.decode(offset)
|
|
||||||
array.append(value)
|
|
||||||
return array, offset
|
|
||||||
|
|
||||||
def _decode_boolean(self, size, offset):
|
|
||||||
return size != 0, offset
|
|
||||||
|
|
||||||
def _decode_bytes(self, size, offset):
|
|
||||||
new_offset = offset + size
|
|
||||||
return self._buffer[offset:new_offset], new_offset
|
|
||||||
|
|
||||||
# pylint: disable=no-self-argument
|
|
||||||
# |-> I am open to better ways of doing this as long as it doesn't involve
|
|
||||||
# lots of code duplication.
|
|
||||||
def _decode_packed_type(type_code, type_size, pad=False):
|
|
||||||
# pylint: disable=protected-access, missing-docstring
|
|
||||||
def unpack_type(self, size, offset):
|
|
||||||
if not pad:
|
|
||||||
self._verify_size(size, type_size)
|
|
||||||
new_offset = offset + type_size
|
|
||||||
packed_bytes = self._buffer[offset:new_offset]
|
|
||||||
if pad:
|
|
||||||
packed_bytes = packed_bytes.rjust(type_size, b'\x00')
|
|
||||||
(value,) = struct.unpack(type_code, packed_bytes)
|
|
||||||
return value, new_offset
|
|
||||||
return unpack_type
|
|
||||||
|
|
||||||
def _decode_map(self, size, offset):
|
|
||||||
container = {}
|
|
||||||
for _ in range(size):
|
|
||||||
(key, offset) = self.decode(offset)
|
|
||||||
(value, offset) = self.decode(offset)
|
|
||||||
container[key] = value
|
|
||||||
return container, offset
|
|
||||||
|
|
||||||
_pointer_value_offset = {
|
|
||||||
1: 0,
|
|
||||||
2: 2048,
|
|
||||||
3: 526336,
|
|
||||||
4: 0,
|
|
||||||
}
|
|
||||||
|
|
||||||
def _decode_pointer(self, size, offset):
|
|
||||||
pointer_size = ((size >> 3) & 0x3) + 1
|
|
||||||
new_offset = offset + pointer_size
|
|
||||||
pointer_bytes = self._buffer[offset:new_offset]
|
|
||||||
packed = pointer_bytes if pointer_size == 4 else struct.pack(
|
|
||||||
b'!c', byte_from_int(size & 0x7)) + pointer_bytes
|
|
||||||
unpacked = int_from_bytes(packed)
|
|
||||||
pointer = unpacked + self._pointer_base + \
|
|
||||||
self._pointer_value_offset[pointer_size]
|
|
||||||
if self._pointer_test:
|
|
||||||
return pointer, new_offset
|
|
||||||
(value, _) = self.decode(pointer)
|
|
||||||
return value, new_offset
|
|
||||||
|
|
||||||
def _decode_uint(self, size, offset):
|
|
||||||
new_offset = offset + size
|
|
||||||
uint_bytes = self._buffer[offset:new_offset]
|
|
||||||
return int_from_bytes(uint_bytes), new_offset
|
|
||||||
|
|
||||||
def _decode_utf8_string(self, size, offset):
|
|
||||||
new_offset = offset + size
|
|
||||||
return self._buffer[offset:new_offset].decode('utf-8'), new_offset
|
|
||||||
|
|
||||||
_type_decoder = {
|
|
||||||
1: _decode_pointer,
|
|
||||||
2: _decode_utf8_string,
|
|
||||||
3: _decode_packed_type(b'!d', 8), # double,
|
|
||||||
4: _decode_bytes,
|
|
||||||
5: _decode_uint, # uint16
|
|
||||||
6: _decode_uint, # uint32
|
|
||||||
7: _decode_map,
|
|
||||||
8: _decode_packed_type(b'!i', 4, pad=True), # int32
|
|
||||||
9: _decode_uint, # uint64
|
|
||||||
10: _decode_uint, # uint128
|
|
||||||
11: _decode_array,
|
|
||||||
14: _decode_boolean,
|
|
||||||
15: _decode_packed_type(b'!f', 4), # float,
|
|
||||||
}
|
|
||||||
|
|
||||||
def decode(self, offset):
|
|
||||||
"""Decode a section of the data section starting at offset
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
offset -- the location of the data structure to decode
|
|
||||||
"""
|
|
||||||
new_offset = offset + 1
|
|
||||||
(ctrl_byte,) = struct.unpack(b'!B', self._buffer[offset:new_offset])
|
|
||||||
type_num = ctrl_byte >> 5
|
|
||||||
# Extended type
|
|
||||||
if not type_num:
|
|
||||||
(type_num, new_offset) = self._read_extended(new_offset)
|
|
||||||
|
|
||||||
if type_num not in self._type_decoder:
|
|
||||||
raise InvalidDatabaseError('Unexpected type number ({type}) '
|
|
||||||
'encountered'.format(type=type_num))
|
|
||||||
|
|
||||||
(size, new_offset) = self._size_from_ctrl_byte(
|
|
||||||
ctrl_byte, new_offset, type_num)
|
|
||||||
return self._type_decoder[type_num](self, size, new_offset)
|
|
||||||
|
|
||||||
def _read_extended(self, offset):
|
|
||||||
(next_byte,) = struct.unpack(b'!B', self._buffer[offset:offset + 1])
|
|
||||||
type_num = next_byte + 7
|
|
||||||
if type_num < 7:
|
|
||||||
raise InvalidDatabaseError(
|
|
||||||
'Something went horribly wrong in the decoder. An '
|
|
||||||
'extended type resolved to a type number < 8 '
|
|
||||||
'({type})'.format(type=type_num))
|
|
||||||
return type_num, offset + 1
|
|
||||||
|
|
||||||
def _verify_size(self, expected, actual):
|
|
||||||
if expected != actual:
|
|
||||||
raise InvalidDatabaseError(
|
|
||||||
'The MaxMind DB file\'s data section contains bad data '
|
|
||||||
'(unknown data type or corrupt data)'
|
|
||||||
)
|
|
||||||
|
|
||||||
def _size_from_ctrl_byte(self, ctrl_byte, offset, type_num):
|
|
||||||
size = ctrl_byte & 0x1f
|
|
||||||
if type_num == 1:
|
|
||||||
return size, offset
|
|
||||||
bytes_to_read = 0 if size < 29 else size - 28
|
|
||||||
|
|
||||||
new_offset = offset + bytes_to_read
|
|
||||||
size_bytes = self._buffer[offset:new_offset]
|
|
||||||
|
|
||||||
# Using unpack rather than int_from_bytes as it is about 200 lookups
|
|
||||||
# per second faster here.
|
|
||||||
if size == 29:
|
|
||||||
size = 29 + struct.unpack(b'!B', size_bytes)[0]
|
|
||||||
elif size == 30:
|
|
||||||
size = 285 + struct.unpack(b'!H', size_bytes)[0]
|
|
||||||
elif size > 30:
|
|
||||||
size = struct.unpack(
|
|
||||||
b'!I', size_bytes.rjust(4, b'\x00'))[0] + 65821
|
|
||||||
|
|
||||||
return size, new_offset
|
|
|
@ -1,11 +0,0 @@
|
||||||
"""
|
|
||||||
maxminddb.errors
|
|
||||||
~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
This module contains custom errors for the MaxMind DB reader
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidDatabaseError(RuntimeError):
|
|
||||||
|
|
||||||
"""This error is thrown when unexpected data is found in the database."""
|
|
|
@ -1,570 +0,0 @@
|
||||||
#include <Python.h>
|
|
||||||
#include <maxminddb.h>
|
|
||||||
#include "structmember.h"
|
|
||||||
|
|
||||||
#define __STDC_FORMAT_MACROS
|
|
||||||
#include <inttypes.h>
|
|
||||||
|
|
||||||
static PyTypeObject Reader_Type;
|
|
||||||
static PyTypeObject Metadata_Type;
|
|
||||||
static PyObject *MaxMindDB_error;
|
|
||||||
|
|
||||||
typedef struct {
|
|
||||||
PyObject_HEAD /* no semicolon */
|
|
||||||
MMDB_s *mmdb;
|
|
||||||
} Reader_obj;
|
|
||||||
|
|
||||||
typedef struct {
|
|
||||||
PyObject_HEAD /* no semicolon */
|
|
||||||
PyObject *binary_format_major_version;
|
|
||||||
PyObject *binary_format_minor_version;
|
|
||||||
PyObject *build_epoch;
|
|
||||||
PyObject *database_type;
|
|
||||||
PyObject *description;
|
|
||||||
PyObject *ip_version;
|
|
||||||
PyObject *languages;
|
|
||||||
PyObject *node_count;
|
|
||||||
PyObject *record_size;
|
|
||||||
} Metadata_obj;
|
|
||||||
|
|
||||||
static PyObject *from_entry_data_list(MMDB_entry_data_list_s **entry_data_list);
|
|
||||||
static PyObject *from_map(MMDB_entry_data_list_s **entry_data_list);
|
|
||||||
static PyObject *from_array(MMDB_entry_data_list_s **entry_data_list);
|
|
||||||
static PyObject *from_uint128(const MMDB_entry_data_list_s *entry_data_list);
|
|
||||||
|
|
||||||
#if PY_MAJOR_VERSION >= 3
|
|
||||||
#define MOD_INIT(name) PyMODINIT_FUNC PyInit_ ## name(void)
|
|
||||||
#define RETURN_MOD_INIT(m) return (m)
|
|
||||||
#define FILE_NOT_FOUND_ERROR PyExc_FileNotFoundError
|
|
||||||
#else
|
|
||||||
#define MOD_INIT(name) PyMODINIT_FUNC init ## name(void)
|
|
||||||
#define RETURN_MOD_INIT(m) return
|
|
||||||
#define PyInt_FromLong PyLong_FromLong
|
|
||||||
#define FILE_NOT_FOUND_ERROR PyExc_IOError
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifdef __GNUC__
|
|
||||||
# define UNUSED(x) UNUSED_ ## x __attribute__((__unused__))
|
|
||||||
#else
|
|
||||||
# define UNUSED(x) UNUSED_ ## x
|
|
||||||
#endif
|
|
||||||
|
|
||||||
static int Reader_init(PyObject *self, PyObject *args, PyObject *kwds)
|
|
||||||
{
|
|
||||||
char *filename;
|
|
||||||
int mode = 0;
|
|
||||||
|
|
||||||
static char *kwlist[] = {"database", "mode", NULL};
|
|
||||||
if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|i", kwlist, &filename, &mode)) {
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (mode != 0 && mode != 1) {
|
|
||||||
PyErr_Format(PyExc_ValueError, "Unsupported open mode (%i). Only "
|
|
||||||
"MODE_AUTO and MODE_MMAP_EXT are supported by this extension.",
|
|
||||||
mode);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (0 != access(filename, R_OK)) {
|
|
||||||
PyErr_Format(FILE_NOT_FOUND_ERROR,
|
|
||||||
"No such file or directory: '%s'",
|
|
||||||
filename);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
MMDB_s *mmdb = (MMDB_s *)malloc(sizeof(MMDB_s));
|
|
||||||
if (NULL == mmdb) {
|
|
||||||
PyErr_NoMemory();
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
Reader_obj *mmdb_obj = (Reader_obj *)self;
|
|
||||||
if (!mmdb_obj) {
|
|
||||||
free(mmdb);
|
|
||||||
PyErr_NoMemory();
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
uint16_t status = MMDB_open(filename, MMDB_MODE_MMAP, mmdb);
|
|
||||||
|
|
||||||
if (MMDB_SUCCESS != status) {
|
|
||||||
free(mmdb);
|
|
||||||
PyErr_Format(
|
|
||||||
MaxMindDB_error,
|
|
||||||
"Error opening database file (%s). Is this a valid MaxMind DB file?",
|
|
||||||
filename
|
|
||||||
);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
mmdb_obj->mmdb = mmdb;
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
static PyObject *Reader_get(PyObject *self, PyObject *args)
|
|
||||||
{
|
|
||||||
char *ip_address = NULL;
|
|
||||||
|
|
||||||
Reader_obj *mmdb_obj = (Reader_obj *)self;
|
|
||||||
if (!PyArg_ParseTuple(args, "s", &ip_address)) {
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
MMDB_s *mmdb = mmdb_obj->mmdb;
|
|
||||||
|
|
||||||
if (NULL == mmdb) {
|
|
||||||
PyErr_SetString(PyExc_ValueError,
|
|
||||||
"Attempt to read from a closed MaxMind DB.");
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
int gai_error = 0;
|
|
||||||
int mmdb_error = MMDB_SUCCESS;
|
|
||||||
MMDB_lookup_result_s result =
|
|
||||||
MMDB_lookup_string(mmdb, ip_address, &gai_error,
|
|
||||||
&mmdb_error);
|
|
||||||
|
|
||||||
if (0 != gai_error) {
|
|
||||||
PyErr_Format(PyExc_ValueError,
|
|
||||||
"'%s' does not appear to be an IPv4 or IPv6 address.",
|
|
||||||
ip_address);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (MMDB_SUCCESS != mmdb_error) {
|
|
||||||
PyObject *exception;
|
|
||||||
if (MMDB_IPV6_LOOKUP_IN_IPV4_DATABASE_ERROR == mmdb_error) {
|
|
||||||
exception = PyExc_ValueError;
|
|
||||||
} else {
|
|
||||||
exception = MaxMindDB_error;
|
|
||||||
}
|
|
||||||
PyErr_Format(exception, "Error looking up %s. %s",
|
|
||||||
ip_address, MMDB_strerror(mmdb_error));
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!result.found_entry) {
|
|
||||||
Py_RETURN_NONE;
|
|
||||||
}
|
|
||||||
|
|
||||||
MMDB_entry_data_list_s *entry_data_list = NULL;
|
|
||||||
int status = MMDB_get_entry_data_list(&result.entry, &entry_data_list);
|
|
||||||
if (MMDB_SUCCESS != status) {
|
|
||||||
PyErr_Format(MaxMindDB_error,
|
|
||||||
"Error while looking up data for %s. %s",
|
|
||||||
ip_address, MMDB_strerror(status));
|
|
||||||
MMDB_free_entry_data_list(entry_data_list);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
MMDB_entry_data_list_s *original_entry_data_list = entry_data_list;
|
|
||||||
PyObject *py_obj = from_entry_data_list(&entry_data_list);
|
|
||||||
MMDB_free_entry_data_list(original_entry_data_list);
|
|
||||||
return py_obj;
|
|
||||||
}
|
|
||||||
|
|
||||||
static PyObject *Reader_metadata(PyObject *self, PyObject *UNUSED(args))
|
|
||||||
{
|
|
||||||
Reader_obj *mmdb_obj = (Reader_obj *)self;
|
|
||||||
|
|
||||||
if (NULL == mmdb_obj->mmdb) {
|
|
||||||
PyErr_SetString(PyExc_IOError,
|
|
||||||
"Attempt to read from a closed MaxMind DB.");
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
MMDB_entry_data_list_s *entry_data_list;
|
|
||||||
MMDB_get_metadata_as_entry_data_list(mmdb_obj->mmdb, &entry_data_list);
|
|
||||||
MMDB_entry_data_list_s *original_entry_data_list = entry_data_list;
|
|
||||||
|
|
||||||
PyObject *metadata_dict = from_entry_data_list(&entry_data_list);
|
|
||||||
MMDB_free_entry_data_list(original_entry_data_list);
|
|
||||||
if (NULL == metadata_dict || !PyDict_Check(metadata_dict)) {
|
|
||||||
PyErr_SetString(MaxMindDB_error,
|
|
||||||
"Error decoding metadata.");
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
PyObject *args = PyTuple_New(0);
|
|
||||||
if (NULL == args) {
|
|
||||||
Py_DECREF(metadata_dict);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
PyObject *metadata = PyObject_Call((PyObject *)&Metadata_Type, args,
|
|
||||||
metadata_dict);
|
|
||||||
|
|
||||||
Py_DECREF(metadata_dict);
|
|
||||||
return metadata;
|
|
||||||
}
|
|
||||||
|
|
||||||
static PyObject *Reader_close(PyObject *self, PyObject *UNUSED(args))
|
|
||||||
{
|
|
||||||
Reader_obj *mmdb_obj = (Reader_obj *)self;
|
|
||||||
|
|
||||||
if (NULL != mmdb_obj->mmdb) {
|
|
||||||
MMDB_close(mmdb_obj->mmdb);
|
|
||||||
free(mmdb_obj->mmdb);
|
|
||||||
mmdb_obj->mmdb = NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
Py_RETURN_NONE;
|
|
||||||
}
|
|
||||||
|
|
||||||
static void Reader_dealloc(PyObject *self)
|
|
||||||
{
|
|
||||||
Reader_obj *obj = (Reader_obj *)self;
|
|
||||||
if (NULL != obj->mmdb) {
|
|
||||||
Reader_close(self, NULL);
|
|
||||||
}
|
|
||||||
|
|
||||||
PyObject_Del(self);
|
|
||||||
}
|
|
||||||
|
|
||||||
static int Metadata_init(PyObject *self, PyObject *args, PyObject *kwds)
|
|
||||||
{
|
|
||||||
|
|
||||||
PyObject
|
|
||||||
*binary_format_major_version,
|
|
||||||
*binary_format_minor_version,
|
|
||||||
*build_epoch,
|
|
||||||
*database_type,
|
|
||||||
*description,
|
|
||||||
*ip_version,
|
|
||||||
*languages,
|
|
||||||
*node_count,
|
|
||||||
*record_size;
|
|
||||||
|
|
||||||
static char *kwlist[] = {
|
|
||||||
"binary_format_major_version",
|
|
||||||
"binary_format_minor_version",
|
|
||||||
"build_epoch",
|
|
||||||
"database_type",
|
|
||||||
"description",
|
|
||||||
"ip_version",
|
|
||||||
"languages",
|
|
||||||
"node_count",
|
|
||||||
"record_size",
|
|
||||||
NULL
|
|
||||||
};
|
|
||||||
|
|
||||||
if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOOOOOOOO", kwlist,
|
|
||||||
&binary_format_major_version,
|
|
||||||
&binary_format_minor_version,
|
|
||||||
&build_epoch,
|
|
||||||
&database_type,
|
|
||||||
&description,
|
|
||||||
&ip_version,
|
|
||||||
&languages,
|
|
||||||
&node_count,
|
|
||||||
&record_size)) {
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
Metadata_obj *obj = (Metadata_obj *)self;
|
|
||||||
|
|
||||||
obj->binary_format_major_version = binary_format_major_version;
|
|
||||||
obj->binary_format_minor_version = binary_format_minor_version;
|
|
||||||
obj->build_epoch = build_epoch;
|
|
||||||
obj->database_type = database_type;
|
|
||||||
obj->description = description;
|
|
||||||
obj->ip_version = ip_version;
|
|
||||||
obj->languages = languages;
|
|
||||||
obj->node_count = node_count;
|
|
||||||
obj->record_size = record_size;
|
|
||||||
|
|
||||||
Py_INCREF(obj->binary_format_major_version);
|
|
||||||
Py_INCREF(obj->binary_format_minor_version);
|
|
||||||
Py_INCREF(obj->build_epoch);
|
|
||||||
Py_INCREF(obj->database_type);
|
|
||||||
Py_INCREF(obj->description);
|
|
||||||
Py_INCREF(obj->ip_version);
|
|
||||||
Py_INCREF(obj->languages);
|
|
||||||
Py_INCREF(obj->node_count);
|
|
||||||
Py_INCREF(obj->record_size);
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
static void Metadata_dealloc(PyObject *self)
|
|
||||||
{
|
|
||||||
Metadata_obj *obj = (Metadata_obj *)self;
|
|
||||||
Py_DECREF(obj->binary_format_major_version);
|
|
||||||
Py_DECREF(obj->binary_format_minor_version);
|
|
||||||
Py_DECREF(obj->build_epoch);
|
|
||||||
Py_DECREF(obj->database_type);
|
|
||||||
Py_DECREF(obj->description);
|
|
||||||
Py_DECREF(obj->ip_version);
|
|
||||||
Py_DECREF(obj->languages);
|
|
||||||
Py_DECREF(obj->node_count);
|
|
||||||
Py_DECREF(obj->record_size);
|
|
||||||
PyObject_Del(self);
|
|
||||||
}
|
|
||||||
|
|
||||||
static PyObject *from_entry_data_list(MMDB_entry_data_list_s **entry_data_list)
|
|
||||||
{
|
|
||||||
if (NULL == entry_data_list || NULL == *entry_data_list) {
|
|
||||||
PyErr_SetString(
|
|
||||||
MaxMindDB_error,
|
|
||||||
"Error while looking up data. Your database may be corrupt or you have found a bug in libmaxminddb."
|
|
||||||
);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
switch ((*entry_data_list)->entry_data.type) {
|
|
||||||
case MMDB_DATA_TYPE_MAP:
|
|
||||||
return from_map(entry_data_list);
|
|
||||||
case MMDB_DATA_TYPE_ARRAY:
|
|
||||||
return from_array(entry_data_list);
|
|
||||||
case MMDB_DATA_TYPE_UTF8_STRING:
|
|
||||||
return PyUnicode_FromStringAndSize(
|
|
||||||
(*entry_data_list)->entry_data.utf8_string,
|
|
||||||
(*entry_data_list)->entry_data.data_size
|
|
||||||
);
|
|
||||||
case MMDB_DATA_TYPE_BYTES:
|
|
||||||
return PyByteArray_FromStringAndSize(
|
|
||||||
(const char *)(*entry_data_list)->entry_data.bytes,
|
|
||||||
(Py_ssize_t)(*entry_data_list)->entry_data.data_size);
|
|
||||||
case MMDB_DATA_TYPE_DOUBLE:
|
|
||||||
return PyFloat_FromDouble((*entry_data_list)->entry_data.double_value);
|
|
||||||
case MMDB_DATA_TYPE_FLOAT:
|
|
||||||
return PyFloat_FromDouble((*entry_data_list)->entry_data.float_value);
|
|
||||||
case MMDB_DATA_TYPE_UINT16:
|
|
||||||
return PyLong_FromLong( (*entry_data_list)->entry_data.uint16);
|
|
||||||
case MMDB_DATA_TYPE_UINT32:
|
|
||||||
return PyLong_FromLong((*entry_data_list)->entry_data.uint32);
|
|
||||||
case MMDB_DATA_TYPE_BOOLEAN:
|
|
||||||
return PyBool_FromLong((*entry_data_list)->entry_data.boolean);
|
|
||||||
case MMDB_DATA_TYPE_UINT64:
|
|
||||||
return PyLong_FromUnsignedLongLong(
|
|
||||||
(*entry_data_list)->entry_data.uint64);
|
|
||||||
case MMDB_DATA_TYPE_UINT128:
|
|
||||||
return from_uint128(*entry_data_list);
|
|
||||||
case MMDB_DATA_TYPE_INT32:
|
|
||||||
return PyLong_FromLong((*entry_data_list)->entry_data.int32);
|
|
||||||
default:
|
|
||||||
PyErr_Format(MaxMindDB_error,
|
|
||||||
"Invalid data type arguments: %d",
|
|
||||||
(*entry_data_list)->entry_data.type);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
static PyObject *from_map(MMDB_entry_data_list_s **entry_data_list)
|
|
||||||
{
|
|
||||||
PyObject *py_obj = PyDict_New();
|
|
||||||
if (NULL == py_obj) {
|
|
||||||
PyErr_NoMemory();
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
const uint32_t map_size = (*entry_data_list)->entry_data.data_size;
|
|
||||||
|
|
||||||
uint i;
|
|
||||||
// entry_data_list cannot start out NULL (see from_entry_data_list). We
|
|
||||||
// check it in the loop because it may become NULL.
|
|
||||||
// coverity[check_after_deref]
|
|
||||||
for (i = 0; i < map_size && entry_data_list; i++) {
|
|
||||||
*entry_data_list = (*entry_data_list)->next;
|
|
||||||
|
|
||||||
PyObject *key = PyUnicode_FromStringAndSize(
|
|
||||||
(char *)(*entry_data_list)->entry_data.utf8_string,
|
|
||||||
(*entry_data_list)->entry_data.data_size
|
|
||||||
);
|
|
||||||
|
|
||||||
*entry_data_list = (*entry_data_list)->next;
|
|
||||||
|
|
||||||
PyObject *value = from_entry_data_list(entry_data_list);
|
|
||||||
if (NULL == value) {
|
|
||||||
Py_DECREF(key);
|
|
||||||
Py_DECREF(py_obj);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
PyDict_SetItem(py_obj, key, value);
|
|
||||||
Py_DECREF(value);
|
|
||||||
Py_DECREF(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
return py_obj;
|
|
||||||
}
|
|
||||||
|
|
||||||
static PyObject *from_array(MMDB_entry_data_list_s **entry_data_list)
|
|
||||||
{
|
|
||||||
const uint32_t size = (*entry_data_list)->entry_data.data_size;
|
|
||||||
|
|
||||||
PyObject *py_obj = PyList_New(size);
|
|
||||||
if (NULL == py_obj) {
|
|
||||||
PyErr_NoMemory();
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
uint i;
|
|
||||||
// entry_data_list cannot start out NULL (see from_entry_data_list). We
|
|
||||||
// check it in the loop because it may become NULL.
|
|
||||||
// coverity[check_after_deref]
|
|
||||||
for (i = 0; i < size && entry_data_list; i++) {
|
|
||||||
*entry_data_list = (*entry_data_list)->next;
|
|
||||||
PyObject *value = from_entry_data_list(entry_data_list);
|
|
||||||
if (NULL == value) {
|
|
||||||
Py_DECREF(py_obj);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
// PyList_SetItem 'steals' the reference
|
|
||||||
PyList_SetItem(py_obj, i, value);
|
|
||||||
}
|
|
||||||
return py_obj;
|
|
||||||
}
|
|
||||||
|
|
||||||
static PyObject *from_uint128(const MMDB_entry_data_list_s *entry_data_list)
|
|
||||||
{
|
|
||||||
uint64_t high = 0;
|
|
||||||
uint64_t low = 0;
|
|
||||||
#if MMDB_UINT128_IS_BYTE_ARRAY
|
|
||||||
int i;
|
|
||||||
for (i = 0; i < 8; i++) {
|
|
||||||
high = (high << 8) | entry_data_list->entry_data.uint128[i];
|
|
||||||
}
|
|
||||||
|
|
||||||
for (i = 8; i < 16; i++) {
|
|
||||||
low = (low << 8) | entry_data_list->entry_data.uint128[i];
|
|
||||||
}
|
|
||||||
#else
|
|
||||||
high = entry_data_list->entry_data.uint128 >> 64;
|
|
||||||
low = (uint64_t)entry_data_list->entry_data.uint128;
|
|
||||||
#endif
|
|
||||||
|
|
||||||
char *num_str = malloc(33);
|
|
||||||
if (NULL == num_str) {
|
|
||||||
PyErr_NoMemory();
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
snprintf(num_str, 33, "%016" PRIX64 "%016" PRIX64, high, low);
|
|
||||||
|
|
||||||
PyObject *py_obj = PyLong_FromString(num_str, NULL, 16);
|
|
||||||
|
|
||||||
free(num_str);
|
|
||||||
return py_obj;
|
|
||||||
}
|
|
||||||
|
|
||||||
static PyMethodDef Reader_methods[] = {
|
|
||||||
{ "get", Reader_get, METH_VARARGS,
|
|
||||||
"Get record for IP address" },
|
|
||||||
{ "metadata", Reader_metadata, METH_NOARGS,
|
|
||||||
"Returns metadata object for database" },
|
|
||||||
{ "close", Reader_close, METH_NOARGS, "Closes database"},
|
|
||||||
{ NULL, NULL, 0, NULL }
|
|
||||||
};
|
|
||||||
|
|
||||||
static PyTypeObject Reader_Type = {
|
|
||||||
PyVarObject_HEAD_INIT(NULL, 0)
|
|
||||||
.tp_basicsize = sizeof(Reader_obj),
|
|
||||||
.tp_dealloc = Reader_dealloc,
|
|
||||||
.tp_doc = "Reader object",
|
|
||||||
.tp_flags = Py_TPFLAGS_DEFAULT,
|
|
||||||
.tp_methods = Reader_methods,
|
|
||||||
.tp_name = "Reader",
|
|
||||||
.tp_init = Reader_init,
|
|
||||||
};
|
|
||||||
|
|
||||||
static PyMethodDef Metadata_methods[] = {
|
|
||||||
{ NULL, NULL, 0, NULL }
|
|
||||||
};
|
|
||||||
|
|
||||||
/* *INDENT-OFF* */
|
|
||||||
static PyMemberDef Metadata_members[] = {
|
|
||||||
{ "binary_format_major_version", T_OBJECT, offsetof(
|
|
||||||
Metadata_obj, binary_format_major_version), READONLY, NULL },
|
|
||||||
{ "binary_format_minor_version", T_OBJECT, offsetof(
|
|
||||||
Metadata_obj, binary_format_minor_version), READONLY, NULL },
|
|
||||||
{ "build_epoch", T_OBJECT, offsetof(Metadata_obj, build_epoch),
|
|
||||||
READONLY, NULL },
|
|
||||||
{ "database_type", T_OBJECT, offsetof(Metadata_obj, database_type),
|
|
||||||
READONLY, NULL },
|
|
||||||
{ "description", T_OBJECT, offsetof(Metadata_obj, description),
|
|
||||||
READONLY, NULL },
|
|
||||||
{ "ip_version", T_OBJECT, offsetof(Metadata_obj, ip_version),
|
|
||||||
READONLY, NULL },
|
|
||||||
{ "languages", T_OBJECT, offsetof(Metadata_obj, languages), READONLY,
|
|
||||||
NULL },
|
|
||||||
{ "node_count", T_OBJECT, offsetof(Metadata_obj, node_count),
|
|
||||||
READONLY, NULL },
|
|
||||||
{ "record_size", T_OBJECT, offsetof(Metadata_obj, record_size),
|
|
||||||
READONLY, NULL },
|
|
||||||
{ NULL, 0, 0, 0, NULL }
|
|
||||||
};
|
|
||||||
/* *INDENT-ON* */
|
|
||||||
|
|
||||||
static PyTypeObject Metadata_Type = {
|
|
||||||
PyVarObject_HEAD_INIT(NULL, 0)
|
|
||||||
.tp_basicsize = sizeof(Metadata_obj),
|
|
||||||
.tp_dealloc = Metadata_dealloc,
|
|
||||||
.tp_doc = "Metadata object",
|
|
||||||
.tp_flags = Py_TPFLAGS_DEFAULT,
|
|
||||||
.tp_members = Metadata_members,
|
|
||||||
.tp_methods = Metadata_methods,
|
|
||||||
.tp_name = "Metadata",
|
|
||||||
.tp_init = Metadata_init
|
|
||||||
};
|
|
||||||
|
|
||||||
static PyMethodDef MaxMindDB_methods[] = {
|
|
||||||
{ NULL, NULL, 0, NULL }
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
#if PY_MAJOR_VERSION >= 3
|
|
||||||
static struct PyModuleDef MaxMindDB_module = {
|
|
||||||
PyModuleDef_HEAD_INIT,
|
|
||||||
.m_name = "extension",
|
|
||||||
.m_doc = "This is a C extension to read MaxMind DB file format",
|
|
||||||
.m_methods = MaxMindDB_methods,
|
|
||||||
};
|
|
||||||
#endif
|
|
||||||
|
|
||||||
MOD_INIT(extension){
|
|
||||||
PyObject *m;
|
|
||||||
|
|
||||||
#if PY_MAJOR_VERSION >= 3
|
|
||||||
m = PyModule_Create(&MaxMindDB_module);
|
|
||||||
#else
|
|
||||||
m = Py_InitModule("extension", MaxMindDB_methods);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (!m) {
|
|
||||||
RETURN_MOD_INIT(NULL);
|
|
||||||
}
|
|
||||||
|
|
||||||
Reader_Type.tp_new = PyType_GenericNew;
|
|
||||||
if (PyType_Ready(&Reader_Type)) {
|
|
||||||
RETURN_MOD_INIT(NULL);
|
|
||||||
}
|
|
||||||
Py_INCREF(&Reader_Type);
|
|
||||||
PyModule_AddObject(m, "Reader", (PyObject *)&Reader_Type);
|
|
||||||
|
|
||||||
Metadata_Type.tp_new = PyType_GenericNew;
|
|
||||||
if (PyType_Ready(&Metadata_Type)) {
|
|
||||||
RETURN_MOD_INIT(NULL);
|
|
||||||
}
|
|
||||||
PyModule_AddObject(m, "extension", (PyObject *)&Metadata_Type);
|
|
||||||
|
|
||||||
PyObject* error_mod = PyImport_ImportModule("maxminddb.errors");
|
|
||||||
if (error_mod == NULL) {
|
|
||||||
RETURN_MOD_INIT(NULL);
|
|
||||||
}
|
|
||||||
|
|
||||||
MaxMindDB_error = PyObject_GetAttrString(error_mod, "InvalidDatabaseError");
|
|
||||||
Py_DECREF(error_mod);
|
|
||||||
|
|
||||||
if (MaxMindDB_error == NULL) {
|
|
||||||
RETURN_MOD_INIT(NULL);
|
|
||||||
}
|
|
||||||
|
|
||||||
Py_INCREF(MaxMindDB_error);
|
|
||||||
|
|
||||||
/* We primarily add it to the module for backwards compatibility */
|
|
||||||
PyModule_AddObject(m, "InvalidDatabaseError", MaxMindDB_error);
|
|
||||||
|
|
||||||
RETURN_MOD_INIT(m);
|
|
||||||
}
|
|
|
@ -1,66 +0,0 @@
|
||||||
"""For internal use only. It provides a slice-like file reader."""
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
try:
|
|
||||||
# pylint: disable=no-name-in-module
|
|
||||||
from multiprocessing import Lock
|
|
||||||
except ImportError:
|
|
||||||
from threading import Lock
|
|
||||||
|
|
||||||
|
|
||||||
class FileBuffer(object):
|
|
||||||
|
|
||||||
"""A slice-able file reader"""
|
|
||||||
|
|
||||||
def __init__(self, database):
|
|
||||||
self._handle = open(database, 'rb')
|
|
||||||
self._size = os.fstat(self._handle.fileno()).st_size
|
|
||||||
if not hasattr(os, 'pread'):
|
|
||||||
self._lock = Lock()
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
|
||||||
if isinstance(key, slice):
|
|
||||||
return self._read(key.stop - key.start, key.start)
|
|
||||||
elif isinstance(key, int):
|
|
||||||
return self._read(1, key)
|
|
||||||
else:
|
|
||||||
raise TypeError("Invalid argument type.")
|
|
||||||
|
|
||||||
def rfind(self, needle, start):
|
|
||||||
"""Reverse find needle from start"""
|
|
||||||
pos = self._read(self._size - start - 1, start).rfind(needle)
|
|
||||||
if pos == -1:
|
|
||||||
return pos
|
|
||||||
return start + pos
|
|
||||||
|
|
||||||
def size(self):
|
|
||||||
"""Size of file"""
|
|
||||||
return self._size
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
"""Close file"""
|
|
||||||
self._handle.close()
|
|
||||||
|
|
||||||
if hasattr(os, 'pread'):
|
|
||||||
|
|
||||||
def _read(self, buffersize, offset):
|
|
||||||
"""read that uses pread"""
|
|
||||||
# pylint: disable=no-member
|
|
||||||
return os.pread(self._handle.fileno(), buffersize, offset)
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
def _read(self, buffersize, offset):
|
|
||||||
"""read with a lock
|
|
||||||
|
|
||||||
This lock is necessary as after a fork, the different processes
|
|
||||||
will share the same file table entry, even if we dup the fd, and
|
|
||||||
as such the same offsets. There does not appear to be a way to
|
|
||||||
duplicate the file table entry and we cannot re-open based on the
|
|
||||||
original path as that file may have replaced with another or
|
|
||||||
unlinked.
|
|
||||||
"""
|
|
||||||
with self._lock:
|
|
||||||
self._handle.seek(offset)
|
|
||||||
return self._handle.read(buffersize)
|
|
|
@ -1,223 +0,0 @@
|
||||||
"""
|
|
||||||
maxminddb.reader
|
|
||||||
~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
This module contains the pure Python database reader and related classes.
|
|
||||||
|
|
||||||
"""
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
try:
|
|
||||||
import mmap
|
|
||||||
except ImportError:
|
|
||||||
# pylint: disable=invalid-name
|
|
||||||
mmap = None
|
|
||||||
|
|
||||||
import struct
|
|
||||||
|
|
||||||
from maxminddb.compat import byte_from_int, int_from_byte, compat_ip_address
|
|
||||||
from maxminddb.const import MODE_AUTO, MODE_MMAP, MODE_FILE, MODE_MEMORY
|
|
||||||
from maxminddb.decoder import Decoder
|
|
||||||
from maxminddb.errors import InvalidDatabaseError
|
|
||||||
from maxminddb.file import FileBuffer
|
|
||||||
|
|
||||||
|
|
||||||
class Reader(object):
|
|
||||||
|
|
||||||
"""
|
|
||||||
Instances of this class provide a reader for the MaxMind DB format. IP
|
|
||||||
addresses can be looked up using the ``get`` method.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_DATA_SECTION_SEPARATOR_SIZE = 16
|
|
||||||
_METADATA_START_MARKER = b"\xAB\xCD\xEFMaxMind.com"
|
|
||||||
|
|
||||||
_ipv4_start = None
|
|
||||||
|
|
||||||
def __init__(self, database, mode=MODE_AUTO):
|
|
||||||
"""Reader for the MaxMind DB file format
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
database -- A path to a valid MaxMind DB file such as a GeoIP2
|
|
||||||
database file.
|
|
||||||
mode -- mode to open the database with. Valid mode are:
|
|
||||||
* MODE_MMAP - read from memory map.
|
|
||||||
* MODE_FILE - read database as standard file.
|
|
||||||
* MODE_MEMORY - load database into memory.
|
|
||||||
* MODE_AUTO - tries MODE_MMAP and then MODE_FILE. Default.
|
|
||||||
"""
|
|
||||||
# pylint: disable=redefined-variable-type
|
|
||||||
if (mode == MODE_AUTO and mmap) or mode == MODE_MMAP:
|
|
||||||
with open(database, 'rb') as db_file:
|
|
||||||
self._buffer = mmap.mmap(
|
|
||||||
db_file.fileno(), 0, access=mmap.ACCESS_READ)
|
|
||||||
self._buffer_size = self._buffer.size()
|
|
||||||
elif mode in (MODE_AUTO, MODE_FILE):
|
|
||||||
self._buffer = FileBuffer(database)
|
|
||||||
self._buffer_size = self._buffer.size()
|
|
||||||
elif mode == MODE_MEMORY:
|
|
||||||
with open(database, 'rb') as db_file:
|
|
||||||
self._buffer = db_file.read()
|
|
||||||
self._buffer_size = len(self._buffer)
|
|
||||||
else:
|
|
||||||
raise ValueError('Unsupported open mode ({0}). Only MODE_AUTO, '
|
|
||||||
' MODE_FILE, and MODE_MEMORY are support by the pure Python '
|
|
||||||
'Reader'.format(mode))
|
|
||||||
|
|
||||||
metadata_start = self._buffer.rfind(self._METADATA_START_MARKER,
|
|
||||||
max(0, self._buffer_size
|
|
||||||
- 128 * 1024))
|
|
||||||
|
|
||||||
if metadata_start == -1:
|
|
||||||
self.close()
|
|
||||||
raise InvalidDatabaseError('Error opening database file ({0}). '
|
|
||||||
'Is this a valid MaxMind DB file?'
|
|
||||||
''.format(database))
|
|
||||||
|
|
||||||
metadata_start += len(self._METADATA_START_MARKER)
|
|
||||||
metadata_decoder = Decoder(self._buffer, metadata_start)
|
|
||||||
(metadata, _) = metadata_decoder.decode(metadata_start)
|
|
||||||
self._metadata = Metadata(
|
|
||||||
**metadata) # pylint: disable=bad-option-value
|
|
||||||
|
|
||||||
self._decoder = Decoder(self._buffer, self._metadata.search_tree_size
|
|
||||||
+ self._DATA_SECTION_SEPARATOR_SIZE)
|
|
||||||
|
|
||||||
def metadata(self):
|
|
||||||
"""Return the metadata associated with the MaxMind DB file"""
|
|
||||||
return self._metadata
|
|
||||||
|
|
||||||
def get(self, ip_address):
|
|
||||||
"""Return the record for the ip_address in the MaxMind DB
|
|
||||||
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
ip_address -- an IP address in the standard string notation
|
|
||||||
"""
|
|
||||||
|
|
||||||
address = compat_ip_address(ip_address)
|
|
||||||
|
|
||||||
if address.version == 6 and self._metadata.ip_version == 4:
|
|
||||||
raise ValueError('Error looking up {0}. You attempted to look up '
|
|
||||||
'an IPv6 address in an IPv4-only database.'.format(
|
|
||||||
ip_address))
|
|
||||||
pointer = self._find_address_in_tree(address)
|
|
||||||
|
|
||||||
return self._resolve_data_pointer(pointer) if pointer else None
|
|
||||||
|
|
||||||
def _find_address_in_tree(self, ip_address):
|
|
||||||
packed = ip_address.packed
|
|
||||||
|
|
||||||
bit_count = len(packed) * 8
|
|
||||||
node = self._start_node(bit_count)
|
|
||||||
|
|
||||||
for i in range(bit_count):
|
|
||||||
if node >= self._metadata.node_count:
|
|
||||||
break
|
|
||||||
bit = 1 & (int_from_byte(packed[i >> 3]) >> 7 - (i % 8))
|
|
||||||
node = self._read_node(node, bit)
|
|
||||||
if node == self._metadata.node_count:
|
|
||||||
# Record is empty
|
|
||||||
return 0
|
|
||||||
elif node > self._metadata.node_count:
|
|
||||||
return node
|
|
||||||
|
|
||||||
raise InvalidDatabaseError('Invalid node in search tree')
|
|
||||||
|
|
||||||
def _start_node(self, length):
|
|
||||||
if self._metadata.ip_version != 6 or length == 128:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
# We are looking up an IPv4 address in an IPv6 tree. Skip over the
|
|
||||||
# first 96 nodes.
|
|
||||||
if self._ipv4_start:
|
|
||||||
return self._ipv4_start
|
|
||||||
|
|
||||||
node = 0
|
|
||||||
for _ in range(96):
|
|
||||||
if node >= self._metadata.node_count:
|
|
||||||
break
|
|
||||||
node = self._read_node(node, 0)
|
|
||||||
self._ipv4_start = node
|
|
||||||
return node
|
|
||||||
|
|
||||||
def _read_node(self, node_number, index):
|
|
||||||
base_offset = node_number * self._metadata.node_byte_size
|
|
||||||
|
|
||||||
record_size = self._metadata.record_size
|
|
||||||
if record_size == 24:
|
|
||||||
offset = base_offset + index * 3
|
|
||||||
node_bytes = b'\x00' + self._buffer[offset:offset + 3]
|
|
||||||
elif record_size == 28:
|
|
||||||
(middle,) = struct.unpack(
|
|
||||||
b'!B', self._buffer[base_offset + 3:base_offset + 4])
|
|
||||||
if index:
|
|
||||||
middle &= 0x0F
|
|
||||||
else:
|
|
||||||
middle = (0xF0 & middle) >> 4
|
|
||||||
offset = base_offset + index * 4
|
|
||||||
node_bytes = byte_from_int(
|
|
||||||
middle) + self._buffer[offset:offset + 3]
|
|
||||||
elif record_size == 32:
|
|
||||||
offset = base_offset + index * 4
|
|
||||||
node_bytes = self._buffer[offset:offset + 4]
|
|
||||||
else:
|
|
||||||
raise InvalidDatabaseError(
|
|
||||||
'Unknown record size: {0}'.format(record_size))
|
|
||||||
return struct.unpack(b'!I', node_bytes)[0]
|
|
||||||
|
|
||||||
def _resolve_data_pointer(self, pointer):
|
|
||||||
resolved = pointer - self._metadata.node_count + \
|
|
||||||
self._metadata.search_tree_size
|
|
||||||
|
|
||||||
if resolved > self._buffer_size:
|
|
||||||
raise InvalidDatabaseError(
|
|
||||||
"The MaxMind DB file's search tree is corrupt")
|
|
||||||
|
|
||||||
(data, _) = self._decoder.decode(resolved)
|
|
||||||
return data
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
"""Closes the MaxMind DB file and returns the resources to the system"""
|
|
||||||
# pylint: disable=unidiomatic-typecheck
|
|
||||||
if type(self._buffer) not in (str, bytes):
|
|
||||||
self._buffer.close()
|
|
||||||
|
|
||||||
|
|
||||||
class Metadata(object):
|
|
||||||
|
|
||||||
"""Metadata for the MaxMind DB reader"""
|
|
||||||
|
|
||||||
# pylint: disable=too-many-instance-attributes
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
"""Creates new Metadata object. kwargs are key/value pairs from spec"""
|
|
||||||
# Although I could just update __dict__, that is less obvious and it
|
|
||||||
# doesn't work well with static analysis tools and some IDEs
|
|
||||||
self.node_count = kwargs['node_count']
|
|
||||||
self.record_size = kwargs['record_size']
|
|
||||||
self.ip_version = kwargs['ip_version']
|
|
||||||
self.database_type = kwargs['database_type']
|
|
||||||
self.languages = kwargs['languages']
|
|
||||||
self.binary_format_major_version = kwargs[
|
|
||||||
'binary_format_major_version']
|
|
||||||
self.binary_format_minor_version = kwargs[
|
|
||||||
'binary_format_minor_version']
|
|
||||||
self.build_epoch = kwargs['build_epoch']
|
|
||||||
self.description = kwargs['description']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def node_byte_size(self):
|
|
||||||
"""The size of a node in bytes"""
|
|
||||||
return self.record_size // 4
|
|
||||||
|
|
||||||
@property
|
|
||||||
def search_tree_size(self):
|
|
||||||
"""The size of the search tree"""
|
|
||||||
return self.node_count * self.node_byte_size
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
args = ', '.join('%s=%r' % x for x in self.__dict__.items())
|
|
||||||
return '{module}.{class_name}({data})'.format(
|
|
||||||
module=self.__module__,
|
|
||||||
class_name=self.__class__.__name__,
|
|
||||||
data=args)
|
|
|
@ -472,7 +472,7 @@ def initialize_scheduler():
|
||||||
pms_update_check_hours = CONFIG.PMS_UPDATE_CHECK_INTERVAL if 1 <= CONFIG.PMS_UPDATE_CHECK_INTERVAL else 24
|
pms_update_check_hours = CONFIG.PMS_UPDATE_CHECK_INTERVAL if 1 <= CONFIG.PMS_UPDATE_CHECK_INTERVAL else 24
|
||||||
|
|
||||||
schedule_job(versioncheck.check_update, 'Check GitHub for updates',
|
schedule_job(versioncheck.check_update, 'Check GitHub for updates',
|
||||||
hours=0, minutes=github_minutes, seconds=0, args=(bool(CONFIG.PLEXPY_AUTO_UPDATE), True))
|
hours=0, minutes=github_minutes, seconds=0, args=(True, True))
|
||||||
|
|
||||||
backup_hours = CONFIG.BACKUP_INTERVAL if 1 <= CONFIG.BACKUP_INTERVAL <= 24 else 6
|
backup_hours = CONFIG.BACKUP_INTERVAL if 1 <= CONFIG.BACKUP_INTERVAL <= 24 else 6
|
||||||
|
|
||||||
|
@ -480,15 +480,15 @@ def initialize_scheduler():
|
||||||
hours=backup_hours, minutes=0, seconds=0, args=(True, True))
|
hours=backup_hours, minutes=0, seconds=0, args=(True, True))
|
||||||
schedule_job(config.make_backup, 'Backup Tautulli config',
|
schedule_job(config.make_backup, 'Backup Tautulli config',
|
||||||
hours=backup_hours, minutes=0, seconds=0, args=(True, True))
|
hours=backup_hours, minutes=0, seconds=0, args=(True, True))
|
||||||
schedule_job(helpers.update_geoip_db, 'Update GeoLite2 database',
|
|
||||||
hours=12 * bool(CONFIG.GEOIP_DB_INSTALLED), minutes=0, seconds=0)
|
|
||||||
|
|
||||||
if WS_CONNECTED and CONFIG.PMS_IP and CONFIG.PMS_TOKEN:
|
if WS_CONNECTED and CONFIG.PMS_IP and CONFIG.PMS_TOKEN:
|
||||||
schedule_job(plextv.get_server_resources, 'Refresh Plex server URLs',
|
schedule_job(plextv.get_server_resources, 'Refresh Plex server URLs',
|
||||||
hours=12 * (not bool(CONFIG.PMS_URL_MANUAL)), minutes=0, seconds=0)
|
hours=12 * (not bool(CONFIG.PMS_URL_MANUAL)), minutes=0, seconds=0)
|
||||||
|
|
||||||
|
pms_remote_access_seconds = CONFIG.REMOTE_ACCESS_PING_INTERVAL if 60 <= CONFIG.REMOTE_ACCESS_PING_INTERVAL else 60
|
||||||
|
|
||||||
schedule_job(activity_pinger.check_server_access, 'Check for Plex remote access',
|
schedule_job(activity_pinger.check_server_access, 'Check for Plex remote access',
|
||||||
hours=0, minutes=0, seconds=60 * bool(CONFIG.MONITOR_REMOTE_ACCESS))
|
hours=0, minutes=0, seconds=pms_remote_access_seconds * bool(CONFIG.MONITOR_REMOTE_ACCESS))
|
||||||
schedule_job(activity_pinger.check_server_updates, 'Check for Plex updates',
|
schedule_job(activity_pinger.check_server_updates, 'Check for Plex updates',
|
||||||
hours=pms_update_check_hours * bool(CONFIG.MONITOR_PMS_UPDATES), minutes=0, seconds=0)
|
hours=pms_update_check_hours * bool(CONFIG.MONITOR_PMS_UPDATES), minutes=0, seconds=0)
|
||||||
|
|
||||||
|
@ -612,8 +612,8 @@ def dbcheck():
|
||||||
'CREATE TABLE IF NOT EXISTS sessions (id INTEGER PRIMARY KEY AUTOINCREMENT, session_key INTEGER, session_id TEXT, '
|
'CREATE TABLE IF NOT EXISTS sessions (id INTEGER PRIMARY KEY AUTOINCREMENT, session_key INTEGER, session_id TEXT, '
|
||||||
'transcode_key TEXT, rating_key INTEGER, section_id INTEGER, media_type TEXT, started INTEGER, stopped INTEGER, '
|
'transcode_key TEXT, rating_key INTEGER, section_id INTEGER, media_type TEXT, started INTEGER, stopped INTEGER, '
|
||||||
'paused_counter INTEGER DEFAULT 0, state TEXT, user_id INTEGER, user TEXT, friendly_name TEXT, '
|
'paused_counter INTEGER DEFAULT 0, state TEXT, user_id INTEGER, user TEXT, friendly_name TEXT, '
|
||||||
'ip_address TEXT, machine_id TEXT, player TEXT, product TEXT, platform TEXT, title TEXT, parent_title TEXT, '
|
'ip_address TEXT, machine_id TEXT, bandwidth INTEGER, location TEXT, player TEXT, product TEXT, platform TEXT, '
|
||||||
'grandparent_title TEXT, original_title TEXT, full_title TEXT, '
|
'title TEXT, parent_title TEXT, grandparent_title TEXT, original_title TEXT, full_title TEXT, '
|
||||||
'media_index INTEGER, parent_media_index INTEGER, '
|
'media_index INTEGER, parent_media_index INTEGER, '
|
||||||
'thumb TEXT, parent_thumb TEXT, grandparent_thumb TEXT, year INTEGER, '
|
'thumb TEXT, parent_thumb TEXT, grandparent_thumb TEXT, year INTEGER, '
|
||||||
'parent_rating_key INTEGER, grandparent_rating_key INTEGER, '
|
'parent_rating_key INTEGER, grandparent_rating_key INTEGER, '
|
||||||
|
@ -640,7 +640,13 @@ def dbcheck():
|
||||||
'live INTEGER, live_uuid TEXT, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT, '
|
'live INTEGER, live_uuid TEXT, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT, '
|
||||||
'secure INTEGER, relayed INTEGER, '
|
'secure INTEGER, relayed INTEGER, '
|
||||||
'buffer_count INTEGER DEFAULT 0, buffer_last_triggered INTEGER, last_paused INTEGER, watched INTEGER DEFAULT 0, '
|
'buffer_count INTEGER DEFAULT 0, buffer_last_triggered INTEGER, last_paused INTEGER, watched INTEGER DEFAULT 0, '
|
||||||
'write_attempts INTEGER DEFAULT 0, raw_stream_info TEXT)'
|
'initial_stream INTEGER DEFAULT 1, write_attempts INTEGER DEFAULT 0, raw_stream_info TEXT)'
|
||||||
|
)
|
||||||
|
|
||||||
|
# sessions_continued table :: This is a temp table that keeps track of continued streaming sessions
|
||||||
|
c_db.execute(
|
||||||
|
'CREATE TABLE IF NOT EXISTS sessions_continued (id INTEGER PRIMARY KEY AUTOINCREMENT, '
|
||||||
|
'user_id INTEGER, machine_id TEXT, media_type TEXT, stopped INTEGER)'
|
||||||
)
|
)
|
||||||
|
|
||||||
# session_history table :: This is a history table which logs essential stream details
|
# session_history table :: This is a history table which logs essential stream details
|
||||||
|
@ -1294,6 +1300,27 @@ def dbcheck():
|
||||||
'ALTER TABLE sessions ADD COLUMN guid TEXT'
|
'ALTER TABLE sessions ADD COLUMN guid TEXT'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Upgrade sessions table from earlier versions
|
||||||
|
try:
|
||||||
|
c_db.execute('SELECT bandwidth FROM sessions')
|
||||||
|
except sqlite3.OperationalError:
|
||||||
|
logger.debug(u"Altering database. Updating database table sessions.")
|
||||||
|
c_db.execute(
|
||||||
|
'ALTER TABLE sessions ADD COLUMN bandwidth INTEGER'
|
||||||
|
)
|
||||||
|
c_db.execute(
|
||||||
|
'ALTER TABLE sessions ADD COLUMN location TEXT'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Upgrade sessions table from earlier versions
|
||||||
|
try:
|
||||||
|
c_db.execute('SELECT initial_stream FROM sessions')
|
||||||
|
except sqlite3.OperationalError:
|
||||||
|
logger.debug(u"Altering database. Updating database table sessions.")
|
||||||
|
c_db.execute(
|
||||||
|
'ALTER TABLE sessions ADD COLUMN initial_stream INTEGER DEFAULT 1'
|
||||||
|
)
|
||||||
|
|
||||||
# Upgrade session_history table from earlier versions
|
# Upgrade session_history table from earlier versions
|
||||||
try:
|
try:
|
||||||
c_db.execute('SELECT reference_id FROM session_history')
|
c_db.execute('SELECT reference_id FROM session_history')
|
||||||
|
|
|
@ -96,14 +96,14 @@ class ActivityHandler(object):
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def update_db_session(self, session=None):
|
def update_db_session(self, session=None, notify=False):
|
||||||
if session is None:
|
if session is None:
|
||||||
session = self.get_live_session()
|
session = self.get_live_session()
|
||||||
|
|
||||||
if session:
|
if session:
|
||||||
# Update our session temp table values
|
# Update our session temp table values
|
||||||
ap = activity_processor.ActivityProcessor()
|
ap = activity_processor.ActivityProcessor()
|
||||||
ap.write_session(session=session, notify=False)
|
ap.write_session(session=session, notify=notify)
|
||||||
|
|
||||||
self.set_session_state()
|
self.set_session_state()
|
||||||
|
|
||||||
|
@ -133,10 +133,11 @@ class ActivityHandler(object):
|
||||||
% (str(session['session_key']), str(session['user_id']), session['username'],
|
% (str(session['session_key']), str(session['user_id']), session['username'],
|
||||||
str(session['rating_key']), session['full_title'], '[Live TV]' if session['live'] else ''))
|
str(session['rating_key']), session['full_title'], '[Live TV]' if session['live'] else ''))
|
||||||
|
|
||||||
plexpy.NOTIFY_QUEUE.put({'stream_data': session.copy(), 'notify_action': 'on_play'})
|
# Send notification after updating db
|
||||||
|
#plexpy.NOTIFY_QUEUE.put({'stream_data': session.copy(), 'notify_action': 'on_play'})
|
||||||
|
|
||||||
# Write the new session to our temp session table
|
# Write the new session to our temp session table
|
||||||
self.update_db_session(session=session)
|
self.update_db_session(session=session, notify=True)
|
||||||
|
|
||||||
# Schedule a callback to force stop a stale stream 5 minutes later
|
# Schedule a callback to force stop a stale stream 5 minutes later
|
||||||
schedule_callback('session_key-{}'.format(self.get_session_key()),
|
schedule_callback('session_key-{}'.format(self.get_session_key()),
|
||||||
|
|
|
@ -17,7 +17,6 @@ from __future__ import unicode_literals
|
||||||
from future.builtins import str
|
from future.builtins import str
|
||||||
|
|
||||||
import threading
|
import threading
|
||||||
import time
|
|
||||||
|
|
||||||
import plexpy
|
import plexpy
|
||||||
if plexpy.PYTHON2:
|
if plexpy.PYTHON2:
|
||||||
|
@ -327,31 +326,27 @@ def check_server_access():
|
||||||
|
|
||||||
# Check for remote access
|
# Check for remote access
|
||||||
if server_response:
|
if server_response:
|
||||||
|
if server_response['reason']:
|
||||||
mapping_state = server_response['mapping_state']
|
|
||||||
mapping_error = server_response['mapping_error']
|
|
||||||
|
|
||||||
# Check if the port is mapped
|
|
||||||
if not mapping_state == 'mapped':
|
|
||||||
ext_ping_count += 1
|
ext_ping_count += 1
|
||||||
logger.warn("Tautulli Monitor :: Plex remote access port not mapped, ping attempt %s." \
|
logger.warn("Tautulli Monitor :: Remote access failed: %s, ping attempt %s." \
|
||||||
% str(ext_ping_count))
|
% (server_response['reason'], str(ext_ping_count)))
|
||||||
# Check if the port is open
|
|
||||||
elif mapping_error == 'unreachable':
|
# Waiting for port mapping
|
||||||
ext_ping_count += 1
|
elif server_response['mapping_state'] == 'waiting':
|
||||||
logger.warn("Tautulli Monitor :: Plex remote access port mapped, but mapping failed, ping attempt %s." \
|
logger.warn("Tautulli Monitor :: Remote access waiting for port mapping, ping attempt %s." \
|
||||||
% str(ext_ping_count))
|
% str(ext_ping_count))
|
||||||
|
|
||||||
# Reset external ping counter
|
# Reset external ping counter
|
||||||
else:
|
else:
|
||||||
if ext_ping_count >= plexpy.CONFIG.REMOTE_ACCESS_PING_THRESHOLD:
|
if ext_ping_count >= plexpy.CONFIG.REMOTE_ACCESS_PING_THRESHOLD:
|
||||||
logger.info("Tautulli Monitor :: Plex remote access is back up.")
|
logger.info("Tautulli Monitor :: Plex remote access is back up.")
|
||||||
|
|
||||||
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_extup'})
|
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_extup', 'remote_access_info': server_response})
|
||||||
|
|
||||||
ext_ping_count = 0
|
ext_ping_count = 0
|
||||||
|
|
||||||
if ext_ping_count == plexpy.CONFIG.REMOTE_ACCESS_PING_THRESHOLD:
|
if ext_ping_count == plexpy.CONFIG.REMOTE_ACCESS_PING_THRESHOLD:
|
||||||
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_extdown'})
|
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_extdown', 'remote_access_info': server_response})
|
||||||
|
|
||||||
|
|
||||||
def check_server_updates():
|
def check_server_updates():
|
||||||
|
|
|
@ -19,7 +19,6 @@ from future.builtins import object
|
||||||
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
import json
|
import json
|
||||||
import time
|
|
||||||
|
|
||||||
import plexpy
|
import plexpy
|
||||||
if plexpy.PYTHON2:
|
if plexpy.PYTHON2:
|
||||||
|
@ -68,6 +67,8 @@ class ActivityProcessor(object):
|
||||||
'year': session.get('year', ''),
|
'year': session.get('year', ''),
|
||||||
'friendly_name': session.get('friendly_name', ''),
|
'friendly_name': session.get('friendly_name', ''),
|
||||||
'ip_address': session.get('ip_address', ''),
|
'ip_address': session.get('ip_address', ''),
|
||||||
|
'bandwidth': session.get('bandwidth', 0),
|
||||||
|
'location': session.get('location', ''),
|
||||||
'player': session.get('player', ''),
|
'player': session.get('player', ''),
|
||||||
'product': session.get('product', ''),
|
'product': session.get('product', ''),
|
||||||
'platform': session.get('platform', ''),
|
'platform': session.get('platform', ''),
|
||||||
|
@ -152,15 +153,20 @@ class ActivityProcessor(object):
|
||||||
result = self.db.upsert('sessions', values, keys)
|
result = self.db.upsert('sessions', values, keys)
|
||||||
|
|
||||||
if result == 'insert':
|
if result == 'insert':
|
||||||
# Check if any notification agents have notifications enabled
|
|
||||||
if notify:
|
|
||||||
plexpy.NOTIFY_QUEUE.put({'stream_data': values.copy(), 'notify_action': 'on_play'})
|
|
||||||
|
|
||||||
# If it's our first write then time stamp it.
|
# If it's our first write then time stamp it.
|
||||||
started = helpers.timestamp()
|
started = helpers.timestamp()
|
||||||
timestamp = {'started': started}
|
initial_stream = self.is_initial_stream(user_id=values['user_id'],
|
||||||
|
machine_id=values['machine_id'],
|
||||||
|
media_type=values['media_type'],
|
||||||
|
started=started)
|
||||||
|
timestamp = {'started': started, 'initial_stream': initial_stream}
|
||||||
self.db.upsert('sessions', timestamp, keys)
|
self.db.upsert('sessions', timestamp, keys)
|
||||||
|
|
||||||
|
# Check if any notification agents have notifications enabled
|
||||||
|
if notify:
|
||||||
|
session.update(timestamp)
|
||||||
|
plexpy.NOTIFY_QUEUE.put({'stream_data': session.copy(), 'notify_action': 'on_play'})
|
||||||
|
|
||||||
# Add Live TV library if it hasn't been added
|
# Add Live TV library if it hasn't been added
|
||||||
if values['live']:
|
if values['live']:
|
||||||
libraries.add_live_tv_library()
|
libraries.add_live_tv_library()
|
||||||
|
@ -209,6 +215,12 @@ class ActivityProcessor(object):
|
||||||
state='stopped',
|
state='stopped',
|
||||||
stopped=stopped)
|
stopped=stopped)
|
||||||
|
|
||||||
|
if not is_import:
|
||||||
|
self.write_continued_session(user_id=session['user_id'],
|
||||||
|
machine_id=session['machine_id'],
|
||||||
|
media_type=session['media_type'],
|
||||||
|
stopped=stopped)
|
||||||
|
|
||||||
if str(session['rating_key']).isdigit() and session['media_type'] in ('movie', 'episode', 'track'):
|
if str(session['rating_key']).isdigit() and session['media_type'] in ('movie', 'episode', 'track'):
|
||||||
logging_enabled = True
|
logging_enabled = True
|
||||||
else:
|
else:
|
||||||
|
@ -637,3 +649,16 @@ class ActivityProcessor(object):
|
||||||
self.db.action('UPDATE sessions SET watched = ?'
|
self.db.action('UPDATE sessions SET watched = ?'
|
||||||
'WHERE session_key = ?',
|
'WHERE session_key = ?',
|
||||||
[1, session_key])
|
[1, session_key])
|
||||||
|
|
||||||
|
def write_continued_session(self, user_id=None, machine_id=None, media_type=None, stopped=None):
|
||||||
|
keys = {'user_id': user_id, 'machine_id': machine_id, 'media_type': media_type}
|
||||||
|
values = {'stopped': stopped}
|
||||||
|
self.db.upsert(table_name='sessions_continued', key_dict=keys, value_dict=values)
|
||||||
|
|
||||||
|
def is_initial_stream(self, user_id=None, machine_id=None, media_type=None, started=None):
|
||||||
|
last_session = self.db.select_single('SELECT stopped '
|
||||||
|
'FROM sessions_continued '
|
||||||
|
'WHERE user_id = ? AND machine_id = ? AND media_type = ? '
|
||||||
|
'ORDER BY stopped DESC',
|
||||||
|
[user_id, machine_id, media_type])
|
||||||
|
return int(started - last_session.get('stopped', 0) >= plexpy.CONFIG.NOTIFY_CONTINUED_SESSION_THRESHOLD)
|
||||||
|
|
|
@ -631,6 +631,12 @@ General optional parameters:
|
||||||
cherrypy.response.headers['Content-Type'] = 'image/jpeg'
|
cherrypy.response.headers['Content-Type'] = 'image/jpeg'
|
||||||
return out['response']['data']
|
return out['response']['data']
|
||||||
|
|
||||||
|
elif self._api_cmd == 'get_geoip_lookup':
|
||||||
|
# Remove nested data and put error message inside data for backwards compatibility
|
||||||
|
out['response']['data'] = out['response']['data'].get('data')
|
||||||
|
if not out['response']['data']:
|
||||||
|
out['response']['data'] = {'error': out['response']['message']}
|
||||||
|
|
||||||
if self._api_out_type == 'json':
|
if self._api_out_type == 'json':
|
||||||
cherrypy.response.headers['Content-Type'] = 'application/json;charset=UTF-8'
|
cherrypy.response.headers['Content-Type'] = 'application/json;charset=UTF-8'
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -224,8 +224,7 @@ SCHEDULER_LIST = [
|
||||||
'Refresh libraries list',
|
'Refresh libraries list',
|
||||||
'Refresh Plex server URLs',
|
'Refresh Plex server URLs',
|
||||||
'Backup Tautulli database',
|
'Backup Tautulli database',
|
||||||
'Backup Tautulli config',
|
'Backup Tautulli config'
|
||||||
'Update GeoLite2 database'
|
|
||||||
]
|
]
|
||||||
|
|
||||||
DATE_TIME_FORMATS = [
|
DATE_TIME_FORMATS = [
|
||||||
|
@ -350,10 +349,13 @@ NOTIFICATION_PARAMETERS = [
|
||||||
{
|
{
|
||||||
'category': 'Stream Details',
|
'category': 'Stream Details',
|
||||||
'parameters': [
|
'parameters': [
|
||||||
{'name': 'Streams', 'type': 'int', 'value': 'streams', 'description': 'The number of concurrent streams.'},
|
{'name': 'Streams', 'type': 'int', 'value': 'streams', 'description': 'The total number of concurrent streams.'},
|
||||||
{'name': 'Direct Plays', 'type': 'int', 'value': 'direct_plays', 'description': 'The number of concurrent direct plays.'},
|
{'name': 'Direct Plays', 'type': 'int', 'value': 'direct_plays', 'description': 'The total number of concurrent direct plays.'},
|
||||||
{'name': 'Direct Streams', 'type': 'int', 'value': 'direct_streams', 'description': 'The number of concurrent direct streams.'},
|
{'name': 'Direct Streams', 'type': 'int', 'value': 'direct_streams', 'description': 'The total number of concurrent direct streams.'},
|
||||||
{'name': 'Transcodes', 'type': 'int', 'value': 'transcodes', 'description': 'The number of concurrent transcodes.'},
|
{'name': 'Transcodes', 'type': 'int', 'value': 'transcodes', 'description': 'The total number of concurrent transcodes.'},
|
||||||
|
{'name': 'Total Bandwidth', 'type': 'int', 'value': 'total_bandwidth', 'description': 'The total Plex Streaming Brain reserved bandwidth (in kbps).', 'help_text': 'not the used bandwidth'},
|
||||||
|
{'name': 'LAN Bandwidth', 'type': 'int', 'value': 'lan_bandwidth', 'description': 'The total Plex Streaming Brain reserved LAN bandwidth (in kbps).', 'help_text': 'not the used bandwidth'},
|
||||||
|
{'name': 'WAN Bandwidth', 'type': 'int', 'value': 'wan_bandwidth', 'description': 'The total Plex Streaming Brain reserved WAN bandwidth (in kbps).', 'help_text': 'not the used bandwidth'},
|
||||||
{'name': 'User Streams', 'type': 'int', 'value': 'user_streams', 'description': 'The number of concurrent streams by the user streaming.'},
|
{'name': 'User Streams', 'type': 'int', 'value': 'user_streams', 'description': 'The number of concurrent streams by the user streaming.'},
|
||||||
{'name': 'User Direct Plays', 'type': 'int', 'value': 'user_direct_plays', 'description': 'The number of concurrent direct plays by the user streaming.'},
|
{'name': 'User Direct Plays', 'type': 'int', 'value': 'user_direct_plays', 'description': 'The number of concurrent direct plays by the user streaming.'},
|
||||||
{'name': 'User Direct Streams', 'type': 'int', 'value': 'user_direct_streams', 'description': 'The number of concurrent direct streams by the user streaming.'},
|
{'name': 'User Direct Streams', 'type': 'int', 'value': 'user_direct_streams', 'description': 'The number of concurrent direct streams by the user streaming.'},
|
||||||
|
@ -361,10 +363,12 @@ NOTIFICATION_PARAMETERS = [
|
||||||
{'name': 'User', 'type': 'str', 'value': 'user', 'description': 'The friendly name of the user streaming.'},
|
{'name': 'User', 'type': 'str', 'value': 'user', 'description': 'The friendly name of the user streaming.'},
|
||||||
{'name': 'Username', 'type': 'str', 'value': 'username', 'description': 'The username of the user streaming.'},
|
{'name': 'Username', 'type': 'str', 'value': 'username', 'description': 'The username of the user streaming.'},
|
||||||
{'name': 'User Email', 'type': 'str', 'value': 'user_email', 'description': 'The email address of the user streaming.'},
|
{'name': 'User Email', 'type': 'str', 'value': 'user_email', 'description': 'The email address of the user streaming.'},
|
||||||
|
{'name': 'User Thumb', 'type': 'str', 'value': 'user_thumb', 'description': 'The profile picture URL of the user streaming.'},
|
||||||
{'name': 'Device', 'type': 'str', 'value': 'device', 'description': 'The type of client device being used for playback.'},
|
{'name': 'Device', 'type': 'str', 'value': 'device', 'description': 'The type of client device being used for playback.'},
|
||||||
{'name': 'Platform', 'type': 'str', 'value': 'platform', 'description': 'The type of client platform being used for playback.'},
|
{'name': 'Platform', 'type': 'str', 'value': 'platform', 'description': 'The type of client platform being used for playback.'},
|
||||||
{'name': 'Product', 'type': 'str', 'value': 'product', 'description': 'The type of client product being used for playback.'},
|
{'name': 'Product', 'type': 'str', 'value': 'product', 'description': 'The type of client product being used for playback.'},
|
||||||
{'name': 'Player', 'type': 'str', 'value': 'player', 'description': 'The name of the player being used for playback.'},
|
{'name': 'Player', 'type': 'str', 'value': 'player', 'description': 'The name of the player being used for playback.'},
|
||||||
|
{'name': 'Initial Stream', 'type': 'int', 'value': 'initial_stream', 'description': 'If the stream is the initial stream of a continuous streaming session.', 'example': '0 or 1'},
|
||||||
{'name': 'IP Address', 'type': 'str', 'value': 'ip_address', 'description': 'The IP address of the device being used for playback.'},
|
{'name': 'IP Address', 'type': 'str', 'value': 'ip_address', 'description': 'The IP address of the device being used for playback.'},
|
||||||
{'name': 'Stream Duration', 'type': 'int', 'value': 'stream_duration', 'description': 'The duration (in minutes) for the stream.'},
|
{'name': 'Stream Duration', 'type': 'int', 'value': 'stream_duration', 'description': 'The duration (in minutes) for the stream.'},
|
||||||
{'name': 'Stream Time', 'type': 'str', 'value': 'stream_time', 'description': 'The duration (in time format) of the stream.'},
|
{'name': 'Stream Time', 'type': 'str', 'value': 'stream_time', 'description': 'The duration (in time format) of the stream.'},
|
||||||
|
@ -389,7 +393,7 @@ NOTIFICATION_PARAMETERS = [
|
||||||
{'name': 'Relayed', 'type': 'int', 'value': 'relayed', 'description': 'If the stream is using Plex Relay.', 'example': '0 or 1'},
|
{'name': 'Relayed', 'type': 'int', 'value': 'relayed', 'description': 'If the stream is using Plex Relay.', 'example': '0 or 1'},
|
||||||
{'name': 'Stream Local', 'type': 'int', 'value': 'stream_local', 'description': 'If the stream is local.', 'example': '0 or 1'},
|
{'name': 'Stream Local', 'type': 'int', 'value': 'stream_local', 'description': 'If the stream is local.', 'example': '0 or 1'},
|
||||||
{'name': 'Stream Location', 'type': 'str', 'value': 'stream_location', 'description': 'The network location of the stream.', 'example': 'lan or wan'},
|
{'name': 'Stream Location', 'type': 'str', 'value': 'stream_location', 'description': 'The network location of the stream.', 'example': 'lan or wan'},
|
||||||
{'name': 'Stream Bandwidth', 'type': 'int', 'value': 'stream_bandwidth', 'description': 'The required bandwidth (in kbps) of the stream.', 'help_text': 'not the used bandwidth'},
|
{'name': 'Stream Bandwidth', 'type': 'int', 'value': 'stream_bandwidth', 'description': 'The Plex Streaming Brain reserved bandwidth (in kbps) of the stream.', 'help_text': 'not the used bandwidth'},
|
||||||
{'name': 'Stream Container', 'type': 'str', 'value': 'stream_container', 'description': 'The media container of the stream.'},
|
{'name': 'Stream Container', 'type': 'str', 'value': 'stream_container', 'description': 'The media container of the stream.'},
|
||||||
{'name': 'Stream Bitrate', 'type': 'int', 'value': 'stream_bitrate', 'description': 'The bitrate (in kbps) of the stream.'},
|
{'name': 'Stream Bitrate', 'type': 'int', 'value': 'stream_bitrate', 'description': 'The bitrate (in kbps) of the stream.'},
|
||||||
{'name': 'Stream Aspect Ratio', 'type': 'float', 'value': 'stream_aspect_ratio', 'description': 'The aspect ratio of the stream.'},
|
{'name': 'Stream Aspect Ratio', 'type': 'float', 'value': 'stream_aspect_ratio', 'description': 'The aspect ratio of the stream.'},
|
||||||
|
@ -556,6 +560,18 @@ NOTIFICATION_PARAMETERS = [
|
||||||
{'name': 'Indexes', 'type': 'int', 'value': 'indexes', 'description': 'If the media has video preview thumbnails.', 'example': '0 or 1'},
|
{'name': 'Indexes', 'type': 'int', 'value': 'indexes', 'description': 'If the media has video preview thumbnails.', 'example': '0 or 1'},
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
'category': 'Plex Remote Access',
|
||||||
|
'parameters': [
|
||||||
|
{'name': 'Remote Access Mapping State', 'type': 'str', 'value': 'remote_access_mapping_state', 'description': 'The mapping state of the Plex remote access port.'},
|
||||||
|
{'name': 'Remote Access Mapping Error', 'type': 'str', 'value': 'remote_access_mapping_error', 'description': 'The mapping error of the Plex remote access port.'},
|
||||||
|
{'name': 'Remote Access Public IP Address', 'type': 'str', 'value': 'remote_access_public_address', 'description': 'The Plex remote access public IP address.'},
|
||||||
|
{'name': 'Remote Access Public Port', 'type': 'str', 'value': 'remote_access_public_port', 'description': 'The Plex remote access public port.'},
|
||||||
|
{'name': 'Remote Access Private IP Address', 'type': 'str', 'value': 'remote_access_private_address', 'description': 'The Plex remote access private IP address.'},
|
||||||
|
{'name': 'Remote Access Private Port', 'type': 'str', 'value': 'remote_access_private_port', 'description': 'The Plex remote access private port.'},
|
||||||
|
{'name': 'Remote Access Failure Reason', 'type': 'str', 'value': 'remote_access_reason', 'description': 'The failure reason for Plex remote access going down.'},
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
'category': 'Plex Update Available',
|
'category': 'Plex Update Available',
|
||||||
'parameters': [
|
'parameters': [
|
||||||
|
|
|
@ -182,9 +182,6 @@ _CONFIG_DEFINITIONS = {
|
||||||
'FACEBOOK_ON_NEWDEVICE': (int, 'Facebook', 0),
|
'FACEBOOK_ON_NEWDEVICE': (int, 'Facebook', 0),
|
||||||
'FIRST_RUN_COMPLETE': (int, 'General', 0),
|
'FIRST_RUN_COMPLETE': (int, 'General', 0),
|
||||||
'FREEZE_DB': (int, 'General', 0),
|
'FREEZE_DB': (int, 'General', 0),
|
||||||
'GEOIP_DB': (str, 'General', ''),
|
|
||||||
'GEOIP_DB_INSTALLED': (int, 'General', 0),
|
|
||||||
'GEOIP_DB_UPDATE_DAYS': (int, 'General', 30),
|
|
||||||
'GET_FILE_SIZES': (int, 'General', 0),
|
'GET_FILE_SIZES': (int, 'General', 0),
|
||||||
'GET_FILE_SIZES_HOLD': (dict, 'General', {'section_ids': [], 'rating_keys': []}),
|
'GET_FILE_SIZES_HOLD': (dict, 'General', {'section_ids': [], 'rating_keys': []}),
|
||||||
'GIT_BRANCH': (str, 'General', 'master'),
|
'GIT_BRANCH': (str, 'General', 'master'),
|
||||||
|
@ -299,7 +296,6 @@ _CONFIG_DEFINITIONS = {
|
||||||
'LOG_BLACKLIST': (int, 'General', 1),
|
'LOG_BLACKLIST': (int, 'General', 1),
|
||||||
'LOG_DIR': (str, 'General', ''),
|
'LOG_DIR': (str, 'General', ''),
|
||||||
'LOGGING_IGNORE_INTERVAL': (int, 'Monitoring', 120),
|
'LOGGING_IGNORE_INTERVAL': (int, 'Monitoring', 120),
|
||||||
'MAXMIND_LICENSE_KEY': (str, 'General', ''),
|
|
||||||
'METADATA_CACHE_SECONDS': (int, 'Advanced', 1800),
|
'METADATA_CACHE_SECONDS': (int, 'Advanced', 1800),
|
||||||
'MOVIE_LOGGING_ENABLE': (int, 'Monitoring', 1),
|
'MOVIE_LOGGING_ENABLE': (int, 'Monitoring', 1),
|
||||||
'MOVIE_NOTIFY_ENABLE': (int, 'Monitoring', 0),
|
'MOVIE_NOTIFY_ENABLE': (int, 'Monitoring', 0),
|
||||||
|
@ -345,6 +341,7 @@ _CONFIG_DEFINITIONS = {
|
||||||
'NMA_ON_NEWDEVICE': (int, 'NMA', 0),
|
'NMA_ON_NEWDEVICE': (int, 'NMA', 0),
|
||||||
'NOTIFICATION_THREADS': (int, 'Advanced', 2),
|
'NOTIFICATION_THREADS': (int, 'Advanced', 2),
|
||||||
'NOTIFY_CONSECUTIVE': (int, 'Monitoring', 1),
|
'NOTIFY_CONSECUTIVE': (int, 'Monitoring', 1),
|
||||||
|
'NOTIFY_CONTINUED_SESSION_THRESHOLD': (int, 'Monitoring', 15),
|
||||||
'NOTIFY_GROUP_RECENTLY_ADDED_GRANDPARENT': (int, 'Monitoring', 1),
|
'NOTIFY_GROUP_RECENTLY_ADDED_GRANDPARENT': (int, 'Monitoring', 1),
|
||||||
'NOTIFY_GROUP_RECENTLY_ADDED_PARENT': (int, 'Monitoring', 1),
|
'NOTIFY_GROUP_RECENTLY_ADDED_PARENT': (int, 'Monitoring', 1),
|
||||||
'NOTIFY_GROUP_RECENTLY_ADDED': (int, 'Monitoring', 1),
|
'NOTIFY_GROUP_RECENTLY_ADDED': (int, 'Monitoring', 1),
|
||||||
|
@ -497,6 +494,7 @@ _CONFIG_DEFINITIONS = {
|
||||||
'REFRESH_LIBRARIES_ON_STARTUP': (int, 'Monitoring', 1),
|
'REFRESH_LIBRARIES_ON_STARTUP': (int, 'Monitoring', 1),
|
||||||
'REFRESH_USERS_INTERVAL': (int, 'Monitoring', 12),
|
'REFRESH_USERS_INTERVAL': (int, 'Monitoring', 12),
|
||||||
'REFRESH_USERS_ON_STARTUP': (int, 'Monitoring', 1),
|
'REFRESH_USERS_ON_STARTUP': (int, 'Monitoring', 1),
|
||||||
|
'REMOTE_ACCESS_PING_INTERVAL': (int, 'Advanced', 60),
|
||||||
'REMOTE_ACCESS_PING_THRESHOLD': (int, 'Advanced', 3),
|
'REMOTE_ACCESS_PING_THRESHOLD': (int, 'Advanced', 3),
|
||||||
'SESSION_DB_WRITE_ATTEMPTS': (int, 'Advanced', 5),
|
'SESSION_DB_WRITE_ATTEMPTS': (int, 'Advanced', 5),
|
||||||
'SHOW_ADVANCED_SETTINGS': (int, 'General', 0),
|
'SHOW_ADVANCED_SETTINGS': (int, 'General', 0),
|
||||||
|
@ -937,8 +935,6 @@ class Config(object):
|
||||||
self.CONFIG_VERSION = 13
|
self.CONFIG_VERSION = 13
|
||||||
|
|
||||||
if self.CONFIG_VERSION == 13:
|
if self.CONFIG_VERSION == 13:
|
||||||
if not self.GEOIP_DB:
|
|
||||||
self.GEOIP_DB = os.path.join(plexpy.DATA_DIR, 'GeoLite2-City.mmdb')
|
|
||||||
|
|
||||||
self.CONFIG_VERSION = 14
|
self.CONFIG_VERSION = 14
|
||||||
|
|
||||||
|
|
|
@ -250,7 +250,7 @@ class MonitorDatabase(object):
|
||||||
sql_results = self.action(query, args).fetchone()
|
sql_results = self.action(query, args).fetchone()
|
||||||
|
|
||||||
if sql_results is None or sql_results == "":
|
if sql_results is None or sql_results == "":
|
||||||
return ""
|
return {}
|
||||||
|
|
||||||
return sql_results
|
return sql_results
|
||||||
|
|
||||||
|
|
|
@ -246,6 +246,7 @@ class DataFactory(object):
|
||||||
|
|
||||||
row = {'reference_id': item['reference_id'],
|
row = {'reference_id': item['reference_id'],
|
||||||
'row_id': item['row_id'],
|
'row_id': item['row_id'],
|
||||||
|
'id': item['row_id'],
|
||||||
'date': item['date'],
|
'date': item['date'],
|
||||||
'started': item['started'],
|
'started': item['started'],
|
||||||
'stopped': item['stopped'],
|
'stopped': item['stopped'],
|
||||||
|
|
|
@ -23,15 +23,12 @@ from future.builtins import str
|
||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
import base64
|
import base64
|
||||||
import certifi
|
|
||||||
import cloudinary
|
import cloudinary
|
||||||
from cloudinary.api import delete_resources_by_tag
|
from cloudinary.api import delete_resources_by_tag
|
||||||
from cloudinary.uploader import upload
|
from cloudinary.uploader import upload
|
||||||
from cloudinary.utils import cloudinary_url
|
from cloudinary.utils import cloudinary_url
|
||||||
import datetime
|
import datetime
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
import geoip2.database
|
|
||||||
import geoip2.errors
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import imghdr
|
import imghdr
|
||||||
from future.moves.itertools import zip_longest
|
from future.moves.itertools import zip_longest
|
||||||
|
@ -41,19 +38,14 @@ import ipwhois.utils
|
||||||
from IPy import IP
|
from IPy import IP
|
||||||
import json
|
import json
|
||||||
import math
|
import math
|
||||||
import maxminddb
|
|
||||||
from operator import itemgetter
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shlex
|
import shlex
|
||||||
import shutil
|
|
||||||
import socket
|
import socket
|
||||||
import sys
|
import sys
|
||||||
import tarfile
|
|
||||||
import time
|
import time
|
||||||
import unicodedata
|
import unicodedata
|
||||||
from future.moves.urllib.parse import urlencode
|
from future.moves.urllib.parse import urlencode
|
||||||
import urllib3
|
|
||||||
from xml.dom import minidom
|
from xml.dom import minidom
|
||||||
import xmltodict
|
import xmltodict
|
||||||
|
|
||||||
|
@ -612,164 +604,6 @@ def is_valid_ip(address):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def update_geoip_db():
|
|
||||||
if plexpy.CONFIG.GEOIP_DB_INSTALLED:
|
|
||||||
logger.info("Tautulli Helpers :: Checking for GeoLite2 database updates.")
|
|
||||||
now = timestamp()
|
|
||||||
if now - plexpy.CONFIG.GEOIP_DB_INSTALLED >= plexpy.CONFIG.GEOIP_DB_UPDATE_DAYS * 24 * 60 * 60:
|
|
||||||
return install_geoip_db(update=True)
|
|
||||||
logger.info("Tautulli Helpers :: GeoLite2 database already updated within the last %s days."
|
|
||||||
% plexpy.CONFIG.GEOIP_DB_UPDATE_DAYS)
|
|
||||||
|
|
||||||
|
|
||||||
def install_geoip_db(update=False):
|
|
||||||
if not plexpy.CONFIG.MAXMIND_LICENSE_KEY:
|
|
||||||
logger.error("Tautulli Helpers :: Failed to download GeoLite2 database file from MaxMind: Missing MaxMindLicense Key")
|
|
||||||
return False
|
|
||||||
|
|
||||||
maxmind_db = 'GeoLite2-City'
|
|
||||||
maxmind_url = 'https://download.maxmind.com/app/geoip_download?edition_id={db}&suffix={{suffix}}&license_key={key}'.format(
|
|
||||||
db=maxmind_db, key=plexpy.CONFIG.MAXMIND_LICENSE_KEY)
|
|
||||||
geolite2_db_url = maxmind_url.format(suffix='tar.gz')
|
|
||||||
geolite2_md5_url = maxmind_url.format(suffix='tar.gz.md5')
|
|
||||||
geolite2_gz = maxmind_db + '.tar.gz'
|
|
||||||
geolite2_md5 = geolite2_gz + '.md5'
|
|
||||||
geolite2_db = maxmind_db + '.mmdb'
|
|
||||||
geolite2_db_path = plexpy.CONFIG.GEOIP_DB or os.path.join(plexpy.DATA_DIR, geolite2_db)
|
|
||||||
|
|
||||||
# Check path ends with .mmdb
|
|
||||||
if os.path.splitext(geolite2_db_path)[1] != os.path.splitext(geolite2_db)[1]:
|
|
||||||
geolite2_db_path = os.path.join(geolite2_db_path, geolite2_db)
|
|
||||||
|
|
||||||
temp_gz = os.path.join(plexpy.CONFIG.CACHE_DIR, geolite2_gz)
|
|
||||||
temp_md5 = os.path.join(plexpy.CONFIG.CACHE_DIR, geolite2_md5)
|
|
||||||
|
|
||||||
# Retrieve the GeoLite2 gzip file
|
|
||||||
logger.debug("Tautulli Helpers :: Downloading GeoLite2 gzip file from MaxMind...")
|
|
||||||
try:
|
|
||||||
maxmind = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where())
|
|
||||||
with maxmind.request('GET', geolite2_db_url, preload_content=False) as r_db, open(temp_gz, 'wb') as f_db:
|
|
||||||
shutil.copyfileobj(r_db, f_db)
|
|
||||||
with maxmind.request('GET', geolite2_md5_url, preload_content=False) as r_md5, open(temp_md5, 'wb') as f_md5:
|
|
||||||
shutil.copyfileobj(r_md5, f_md5)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error("Tautulli Helpers :: Failed to download GeoLite2 gzip file from MaxMind: %s" % e)
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Check MD5 hash for GeoLite2 tar.gz file
|
|
||||||
logger.debug("Tautulli Helpers :: Checking MD5 checksum for GeoLite2 gzip file...")
|
|
||||||
try:
|
|
||||||
hash_md5 = hashlib.md5()
|
|
||||||
with open(temp_gz, 'rb') as f:
|
|
||||||
for chunk in iter(lambda: f.read(4096), b""):
|
|
||||||
hash_md5.update(chunk)
|
|
||||||
md5_hash = hash_md5.hexdigest()
|
|
||||||
|
|
||||||
with open(temp_md5, 'r') as f:
|
|
||||||
md5_checksum = f.read()
|
|
||||||
|
|
||||||
if md5_hash != md5_checksum:
|
|
||||||
logger.error("Tautulli Helpers :: MD5 checksum doesn't match for GeoLite2 database. "
|
|
||||||
"Checksum: %s, file hash: %s" % (md5_checksum, md5_hash))
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
logger.error("Tautulli Helpers :: Failed to generate MD5 checksum for GeoLite2 gzip file: %s" % e)
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Extract the GeoLite2 database file
|
|
||||||
logger.debug("Tautulli Helpers :: Extracting GeoLite2 database...")
|
|
||||||
try:
|
|
||||||
mmdb = None
|
|
||||||
with tarfile.open(temp_gz, 'r:gz') as tar:
|
|
||||||
for member in tar.getmembers():
|
|
||||||
if geolite2_db in member.name:
|
|
||||||
member.name = os.path.basename(member.name)
|
|
||||||
tar.extractall(path=os.path.dirname(geolite2_db_path), members=[member])
|
|
||||||
mmdb = True
|
|
||||||
break
|
|
||||||
if not mmdb:
|
|
||||||
raise Exception("{} not found in gzip file.".format(geolite2_db))
|
|
||||||
except Exception as e:
|
|
||||||
logger.error("Tautulli Helpers :: Failed to extract the GeoLite2 database: %s" % e)
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Delete temportary GeoLite2 gzip file
|
|
||||||
logger.debug("Tautulli Helpers :: Deleting temporary GeoLite2 gzip file...")
|
|
||||||
try:
|
|
||||||
os.remove(temp_gz)
|
|
||||||
os.remove(temp_md5)
|
|
||||||
except Exception as e:
|
|
||||||
logger.warn("Tautulli Helpers :: Failed to remove temporary GeoLite2 gzip file: %s" % e)
|
|
||||||
|
|
||||||
plexpy.CONFIG.__setattr__('GEOIP_DB', geolite2_db_path)
|
|
||||||
plexpy.CONFIG.__setattr__('GEOIP_DB_INSTALLED', timestamp())
|
|
||||||
plexpy.CONFIG.write()
|
|
||||||
|
|
||||||
logger.debug("Tautulli Helpers :: GeoLite2 database installed successfully.")
|
|
||||||
|
|
||||||
if not update:
|
|
||||||
plexpy.schedule_job(update_geoip_db, 'Update GeoLite2 database', hours=12, minutes=0, seconds=0)
|
|
||||||
|
|
||||||
return plexpy.CONFIG.GEOIP_DB_INSTALLED
|
|
||||||
|
|
||||||
|
|
||||||
def uninstall_geoip_db():
|
|
||||||
logger.debug("Tautulli Helpers :: Uninstalling the GeoLite2 database...")
|
|
||||||
try:
|
|
||||||
os.remove(plexpy.CONFIG.GEOIP_DB)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error("Tautulli Helpers :: Failed to uninstall the GeoLite2 database: %s" % e)
|
|
||||||
return False
|
|
||||||
|
|
||||||
plexpy.CONFIG.__setattr__('GEOIP_DB_INSTALLED', 0)
|
|
||||||
plexpy.CONFIG.write()
|
|
||||||
|
|
||||||
logger.debug("Tautulli Helpers :: GeoLite2 database uninstalled successfully.")
|
|
||||||
|
|
||||||
plexpy.schedule_job(update_geoip_db, 'Update GeoLite2 database', hours=0, minutes=0, seconds=0)
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def geoip_lookup(ip_address):
|
|
||||||
if not plexpy.CONFIG.GEOIP_DB_INSTALLED:
|
|
||||||
return 'GeoLite2 database not installed. Please install from the ' \
|
|
||||||
'<a href="settings?install_geoip=true">Settings</a> page.'
|
|
||||||
|
|
||||||
if not ip_address:
|
|
||||||
return 'No IP address provided.'
|
|
||||||
|
|
||||||
try:
|
|
||||||
reader = geoip2.database.Reader(plexpy.CONFIG.GEOIP_DB)
|
|
||||||
geo = reader.city(ip_address)
|
|
||||||
reader.close()
|
|
||||||
except ValueError as e:
|
|
||||||
return 'Invalid IP address provided: %s.' % ip_address
|
|
||||||
except IOError as e:
|
|
||||||
return 'Missing GeoLite2 database. Please reinstall from the ' \
|
|
||||||
'<a href="settings?install_geoip=true">Settings</a> page.'
|
|
||||||
except maxminddb.InvalidDatabaseError as e:
|
|
||||||
return 'Invalid GeoLite2 database. Please reinstall from the ' \
|
|
||||||
'<a href="settings?install_geoip=true">Settings</a> page.'
|
|
||||||
except geoip2.errors.AddressNotFoundError as e:
|
|
||||||
return '%s' % e
|
|
||||||
except Exception as e:
|
|
||||||
return 'Error: %s' % e
|
|
||||||
|
|
||||||
geo_info = {'continent': geo.continent.name,
|
|
||||||
'country': geo.country.name,
|
|
||||||
'region': geo.subdivisions.most_specific.name,
|
|
||||||
'city': geo.city.name,
|
|
||||||
'postal_code': geo.postal.code,
|
|
||||||
'timezone': geo.location.time_zone,
|
|
||||||
'latitude': geo.location.latitude,
|
|
||||||
'longitude': geo.location.longitude,
|
|
||||||
'accuracy': geo.location.accuracy_radius
|
|
||||||
}
|
|
||||||
|
|
||||||
return geo_info
|
|
||||||
|
|
||||||
|
|
||||||
def whois_lookup(ip_address):
|
def whois_lookup(ip_address):
|
||||||
|
|
||||||
nets = []
|
nets = []
|
||||||
|
|
|
@ -755,7 +755,7 @@ class Libraries(object):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warn("Tautulli Libraries :: Unable to execute database query for set_config: %s." % e)
|
logger.warn("Tautulli Libraries :: Unable to execute database query for set_config: %s." % e)
|
||||||
|
|
||||||
def get_details(self, section_id=None):
|
def get_details(self, section_id=None, server_id=None):
|
||||||
default_return = {'row_id': 0,
|
default_return = {'row_id': 0,
|
||||||
'server_id': '',
|
'server_id': '',
|
||||||
'section_id': 0,
|
'section_id': 0,
|
||||||
|
@ -776,7 +776,10 @@ class Libraries(object):
|
||||||
if not section_id:
|
if not section_id:
|
||||||
return default_return
|
return default_return
|
||||||
|
|
||||||
def get_library_details(section_id=section_id):
|
if server_id is None:
|
||||||
|
server_id = plexpy.CONFIG.PMS_IDENTIFIER
|
||||||
|
|
||||||
|
def get_library_details(section_id=section_id, server_id=server_id):
|
||||||
monitor_db = database.MonitorDatabase()
|
monitor_db = database.MonitorDatabase()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -787,8 +790,8 @@ class Libraries(object):
|
||||||
'custom_art_url AS custom_art, is_active, ' \
|
'custom_art_url AS custom_art, is_active, ' \
|
||||||
'do_notify, do_notify_created, keep_history, deleted_section ' \
|
'do_notify, do_notify_created, keep_history, deleted_section ' \
|
||||||
'FROM library_sections ' \
|
'FROM library_sections ' \
|
||||||
'WHERE section_id = ? '
|
'WHERE section_id = ? AND server_id = ? '
|
||||||
result = monitor_db.select(query, args=[section_id])
|
result = monitor_db.select(query, args=[section_id, server_id])
|
||||||
else:
|
else:
|
||||||
result = []
|
result = []
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -828,7 +831,7 @@ class Libraries(object):
|
||||||
}
|
}
|
||||||
return library_details
|
return library_details
|
||||||
|
|
||||||
library_details = get_library_details(section_id=section_id)
|
library_details = get_library_details(section_id=section_id, server_id=server_id)
|
||||||
|
|
||||||
if library_details:
|
if library_details:
|
||||||
return library_details
|
return library_details
|
||||||
|
@ -839,7 +842,7 @@ class Libraries(object):
|
||||||
# Let's first refresh the libraries list to make sure the library isn't newly added and not in the db yet
|
# Let's first refresh the libraries list to make sure the library isn't newly added and not in the db yet
|
||||||
refresh_libraries()
|
refresh_libraries()
|
||||||
|
|
||||||
library_details = get_library_details(section_id=section_id)
|
library_details = get_library_details(section_id=section_id, server_id=server_id)
|
||||||
|
|
||||||
if library_details:
|
if library_details:
|
||||||
return library_details
|
return library_details
|
||||||
|
|
|
@ -18,7 +18,6 @@
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import time
|
|
||||||
|
|
||||||
from apscheduler.triggers.cron import CronTrigger
|
from apscheduler.triggers.cron import CronTrigger
|
||||||
import email.utils
|
import email.utils
|
||||||
|
|
|
@ -565,6 +565,10 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||||
stream_count = len(sessions)
|
stream_count = len(sessions)
|
||||||
user_stream_count = len(user_sessions)
|
user_stream_count = len(user_sessions)
|
||||||
|
|
||||||
|
lan_bandwidth = sum(helpers.cast_to_int(s['bandwidth']) for s in sessions if s['location'] == 'lan')
|
||||||
|
wan_bandwidth = sum(helpers.cast_to_int(s['bandwidth']) for s in sessions if s['location'] != 'lan')
|
||||||
|
total_bandwidth = lan_bandwidth + wan_bandwidth
|
||||||
|
|
||||||
# Generate a combined transcode decision value
|
# Generate a combined transcode decision value
|
||||||
if session.get('stream_video_decision', '') == 'transcode' or session.get('stream_audio_decision', '') == 'transcode':
|
if session.get('stream_video_decision', '') == 'transcode' or session.get('stream_audio_decision', '') == 'transcode':
|
||||||
transcode_decision = 'Transcode'
|
transcode_decision = 'Transcode'
|
||||||
|
@ -650,6 +654,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||||
themoviedb_info = lookup_themoviedb_by_id(rating_key=lookup_key,
|
themoviedb_info = lookup_themoviedb_by_id(rating_key=lookup_key,
|
||||||
thetvdb_id=notify_params.get('thetvdb_id'),
|
thetvdb_id=notify_params.get('thetvdb_id'),
|
||||||
imdb_id=notify_params.get('imdb_id'))
|
imdb_id=notify_params.get('imdb_id'))
|
||||||
|
themoviedb_info.pop('rating_key', None)
|
||||||
notify_params.update(themoviedb_info)
|
notify_params.update(themoviedb_info)
|
||||||
|
|
||||||
# Get TVmaze info (for tv shows only)
|
# Get TVmaze info (for tv shows only)
|
||||||
|
@ -665,6 +670,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||||
tvmaze_info = lookup_tvmaze_by_id(rating_key=lookup_key,
|
tvmaze_info = lookup_tvmaze_by_id(rating_key=lookup_key,
|
||||||
thetvdb_id=notify_params.get('thetvdb_id'),
|
thetvdb_id=notify_params.get('thetvdb_id'),
|
||||||
imdb_id=notify_params.get('imdb_id'))
|
imdb_id=notify_params.get('imdb_id'))
|
||||||
|
tvmaze_info.pop('rating_key', None)
|
||||||
notify_params.update(tvmaze_info)
|
notify_params.update(tvmaze_info)
|
||||||
|
|
||||||
if tvmaze_info.get('thetvdb_id'):
|
if tvmaze_info.get('thetvdb_id'):
|
||||||
|
@ -685,7 +691,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||||
tracks = notify_params['children_count']
|
tracks = notify_params['children_count']
|
||||||
else:
|
else:
|
||||||
musicbrainz_type = 'recording'
|
musicbrainz_type = 'recording'
|
||||||
artist = notify_params['original_title']
|
artist = notify_params['original_title'] or notify_params['grandparent_title']
|
||||||
release = notify_params['parent_title']
|
release = notify_params['parent_title']
|
||||||
recording = notify_params['title']
|
recording = notify_params['title']
|
||||||
tracks = notify_params['children_count']
|
tracks = notify_params['children_count']
|
||||||
|
@ -694,6 +700,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||||
musicbrainz_info = lookup_musicbrainz_info(musicbrainz_type=musicbrainz_type, rating_key=rating_key,
|
musicbrainz_info = lookup_musicbrainz_info(musicbrainz_type=musicbrainz_type, rating_key=rating_key,
|
||||||
artist=artist, release=release, recording=recording, tracks=tracks,
|
artist=artist, release=release, recording=recording, tracks=tracks,
|
||||||
tnum=tnum)
|
tnum=tnum)
|
||||||
|
musicbrainz_info.pop('rating_key', None)
|
||||||
notify_params.update(musicbrainz_info)
|
notify_params.update(musicbrainz_info)
|
||||||
|
|
||||||
if notify_params['media_type'] in ('movie', 'show', 'artist'):
|
if notify_params['media_type'] in ('movie', 'show', 'artist'):
|
||||||
|
@ -831,6 +838,9 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||||
'direct_plays': transcode_decision_count['direct play'],
|
'direct_plays': transcode_decision_count['direct play'],
|
||||||
'direct_streams': transcode_decision_count['copy'],
|
'direct_streams': transcode_decision_count['copy'],
|
||||||
'transcodes': transcode_decision_count['transcode'],
|
'transcodes': transcode_decision_count['transcode'],
|
||||||
|
'total_bandwidth': total_bandwidth,
|
||||||
|
'lan_bandwidth': lan_bandwidth,
|
||||||
|
'wan_bandwidth': wan_bandwidth,
|
||||||
'user_streams': user_stream_count,
|
'user_streams': user_stream_count,
|
||||||
'user_direct_plays': user_transcode_decision_count['direct play'],
|
'user_direct_plays': user_transcode_decision_count['direct play'],
|
||||||
'user_direct_streams': user_transcode_decision_count['copy'],
|
'user_direct_streams': user_transcode_decision_count['copy'],
|
||||||
|
@ -838,6 +848,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||||
'user': notify_params['friendly_name'],
|
'user': notify_params['friendly_name'],
|
||||||
'username': notify_params['user'],
|
'username': notify_params['user'],
|
||||||
'user_email': notify_params['email'],
|
'user_email': notify_params['email'],
|
||||||
|
'user_thumb': notify_params['user_thumb'],
|
||||||
'device': notify_params['device'],
|
'device': notify_params['device'],
|
||||||
'platform': notify_params['platform'],
|
'platform': notify_params['platform'],
|
||||||
'product': notify_params['product'],
|
'product': notify_params['product'],
|
||||||
|
@ -850,6 +861,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||||
'progress_duration': view_offset,
|
'progress_duration': view_offset,
|
||||||
'progress_time': arrow.get(view_offset * 60).format(duration_format),
|
'progress_time': arrow.get(view_offset * 60).format(duration_format),
|
||||||
'progress_percent': helpers.get_percent(view_offset, duration),
|
'progress_percent': helpers.get_percent(view_offset, duration),
|
||||||
|
'initial_stream': notify_params['initial_stream'],
|
||||||
'transcode_decision': transcode_decision,
|
'transcode_decision': transcode_decision,
|
||||||
'video_decision': notify_params['video_decision'],
|
'video_decision': notify_params['video_decision'],
|
||||||
'audio_decision': notify_params['audio_decision'],
|
'audio_decision': notify_params['audio_decision'],
|
||||||
|
@ -1047,6 +1059,7 @@ def build_server_notify_params(notify_action=None, **kwargs):
|
||||||
|
|
||||||
pms_download_info = defaultdict(str, kwargs.pop('pms_download_info', {}))
|
pms_download_info = defaultdict(str, kwargs.pop('pms_download_info', {}))
|
||||||
plexpy_download_info = defaultdict(str, kwargs.pop('plexpy_download_info', {}))
|
plexpy_download_info = defaultdict(str, kwargs.pop('plexpy_download_info', {}))
|
||||||
|
remote_access_info = defaultdict(str, kwargs.pop('remote_access_info', {}))
|
||||||
|
|
||||||
now = arrow.now()
|
now = arrow.now()
|
||||||
now_iso = now.isocalendar()
|
now_iso = now.isocalendar()
|
||||||
|
@ -1078,6 +1091,14 @@ def build_server_notify_params(notify_action=None, **kwargs):
|
||||||
'timestamp': now.format(time_format),
|
'timestamp': now.format(time_format),
|
||||||
'unixtime': helpers.timestamp(),
|
'unixtime': helpers.timestamp(),
|
||||||
'utctime': helpers.utc_now_iso(),
|
'utctime': helpers.utc_now_iso(),
|
||||||
|
# Plex remote access parameters
|
||||||
|
'remote_access_mapping_state': remote_access_info['mapping_state'],
|
||||||
|
'remote_access_mapping_error': remote_access_info['mapping_error'],
|
||||||
|
'remote_access_public_address': remote_access_info['public_address'],
|
||||||
|
'remote_access_public_port': remote_access_info['public_port'],
|
||||||
|
'remote_access_private_address': remote_access_info['private_address'],
|
||||||
|
'remote_access_private_port': remote_access_info['private_port'],
|
||||||
|
'remote_access_reason': remote_access_info['reason'],
|
||||||
# Plex Media Server update parameters
|
# Plex Media Server update parameters
|
||||||
'update_version': pms_download_info['version'],
|
'update_version': pms_download_info['version'],
|
||||||
'update_url': pms_download_info['download_url'],
|
'update_url': pms_download_info['download_url'],
|
||||||
|
|
|
@ -16,7 +16,6 @@
|
||||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
from future.builtins import next
|
|
||||||
from future.builtins import str
|
from future.builtins import str
|
||||||
from future.builtins import object
|
from future.builtins import object
|
||||||
|
|
||||||
|
@ -81,7 +80,6 @@ else:
|
||||||
|
|
||||||
BROWSER_NOTIFIERS = {}
|
BROWSER_NOTIFIERS = {}
|
||||||
|
|
||||||
|
|
||||||
AGENT_IDS = {'growl': 0,
|
AGENT_IDS = {'growl': 0,
|
||||||
'prowl': 1,
|
'prowl': 1,
|
||||||
'xbmc': 2,
|
'xbmc': 2,
|
||||||
|
@ -104,7 +102,8 @@ AGENT_IDS = {'growl': 0,
|
||||||
'groupme': 22,
|
'groupme': 22,
|
||||||
'mqtt': 23,
|
'mqtt': 23,
|
||||||
'zapier': 24,
|
'zapier': 24,
|
||||||
'webhook': 25
|
'webhook': 25,
|
||||||
|
'plexmobileapp': 26
|
||||||
}
|
}
|
||||||
|
|
||||||
DEFAULT_CUSTOM_CONDITIONS = [{'parameter': '', 'operator': '', 'value': ''}]
|
DEFAULT_CUSTOM_CONDITIONS = [{'parameter': '', 'operator': '', 'value': ''}]
|
||||||
|
@ -113,91 +112,141 @@ DEFAULT_CUSTOM_CONDITIONS = [{'parameter': '', 'operator': '', 'value': ''}]
|
||||||
def available_notification_agents():
|
def available_notification_agents():
|
||||||
agents = [{'label': 'Tautulli Remote Android App',
|
agents = [{'label': 'Tautulli Remote Android App',
|
||||||
'name': 'androidapp',
|
'name': 'androidapp',
|
||||||
'id': AGENT_IDS['androidapp']
|
'id': AGENT_IDS['androidapp'],
|
||||||
|
'class': ANDROIDAPP,
|
||||||
|
'action_types': ('all',)
|
||||||
},
|
},
|
||||||
{'label': 'Boxcar',
|
{'label': 'Boxcar',
|
||||||
'name': 'boxcar',
|
'name': 'boxcar',
|
||||||
'id': AGENT_IDS['boxcar']
|
'id': AGENT_IDS['boxcar'],
|
||||||
|
'class': BOXCAR,
|
||||||
|
'action_types': ('all',)
|
||||||
},
|
},
|
||||||
{'label': 'Browser',
|
{'label': 'Browser',
|
||||||
'name': 'browser',
|
'name': 'browser',
|
||||||
'id': AGENT_IDS['browser']
|
'id': AGENT_IDS['browser'],
|
||||||
|
'class': BROWSER,
|
||||||
|
'action_types': ('all',)
|
||||||
},
|
},
|
||||||
{'label': 'Discord',
|
{'label': 'Discord',
|
||||||
'name': 'discord',
|
'name': 'discord',
|
||||||
'id': AGENT_IDS['discord'],
|
'id': AGENT_IDS['discord'],
|
||||||
|
'class': DISCORD,
|
||||||
|
'action_types': ('all',)
|
||||||
},
|
},
|
||||||
{'label': 'Email',
|
{'label': 'Email',
|
||||||
'name': 'email',
|
'name': 'email',
|
||||||
'id': AGENT_IDS['email']
|
'id': AGENT_IDS['email'],
|
||||||
|
'class': EMAIL,
|
||||||
|
'action_types': ('all',)
|
||||||
},
|
},
|
||||||
{'label': 'Facebook',
|
{'label': 'Facebook',
|
||||||
'name': 'facebook',
|
'name': 'facebook',
|
||||||
'id': AGENT_IDS['facebook']
|
'id': AGENT_IDS['facebook'],
|
||||||
|
'class': FACEBOOK,
|
||||||
|
'action_types': ('all',)
|
||||||
},
|
},
|
||||||
{'label': 'GroupMe',
|
{'label': 'GroupMe',
|
||||||
'name': 'groupme',
|
'name': 'groupme',
|
||||||
'id': AGENT_IDS['groupme']
|
'id': AGENT_IDS['groupme'],
|
||||||
|
'class': GROUPME,
|
||||||
|
'action_types': ('all',)
|
||||||
},
|
},
|
||||||
{'label': 'Growl',
|
{'label': 'Growl',
|
||||||
'name': 'growl',
|
'name': 'growl',
|
||||||
'id': AGENT_IDS['growl']
|
'id': AGENT_IDS['growl'],
|
||||||
|
'class': GROWL,
|
||||||
|
'action_types': ('all',)
|
||||||
},
|
},
|
||||||
{'label': 'IFTTT',
|
{'label': 'IFTTT',
|
||||||
'name': 'ifttt',
|
'name': 'ifttt',
|
||||||
'id': AGENT_IDS['ifttt']
|
'id': AGENT_IDS['ifttt'],
|
||||||
|
'class': IFTTT,
|
||||||
|
'action_types': ('all',)
|
||||||
},
|
},
|
||||||
{'label': 'Join',
|
{'label': 'Join',
|
||||||
'name': 'join',
|
'name': 'join',
|
||||||
'id': AGENT_IDS['join']
|
'id': AGENT_IDS['join'],
|
||||||
|
'class': JOIN,
|
||||||
|
'action_types': ('all',)
|
||||||
},
|
},
|
||||||
{'label': 'Kodi',
|
{'label': 'Kodi',
|
||||||
'name': 'xbmc',
|
'name': 'xbmc',
|
||||||
'id': AGENT_IDS['xbmc']
|
'id': AGENT_IDS['xbmc'],
|
||||||
|
'class': XBMC,
|
||||||
|
'action_types': ('all',)
|
||||||
},
|
},
|
||||||
{'label': 'MQTT',
|
{'label': 'MQTT',
|
||||||
'name': 'mqtt',
|
'name': 'mqtt',
|
||||||
'id': AGENT_IDS['mqtt']
|
'id': AGENT_IDS['mqtt'],
|
||||||
|
'class': MQTT,
|
||||||
|
'action_types': ('all',)
|
||||||
},
|
},
|
||||||
{'label': 'Plex Home Theater',
|
{'label': 'Plex Home Theater',
|
||||||
'name': 'plex',
|
'name': 'plex',
|
||||||
'id': AGENT_IDS['plex']
|
'id': AGENT_IDS['plex'],
|
||||||
|
'class': PLEX,
|
||||||
|
'action_types': ('all',)
|
||||||
|
},
|
||||||
|
{'label': 'Plex Android / iOS App',
|
||||||
|
'name': 'plexmobileapp',
|
||||||
|
'id': AGENT_IDS['plexmobileapp'],
|
||||||
|
'class': PLEXMOBILEAPP,
|
||||||
|
'action_types': ('on_play', 'on_created', 'on_newdevice')
|
||||||
},
|
},
|
||||||
{'label': 'Prowl',
|
{'label': 'Prowl',
|
||||||
'name': 'prowl',
|
'name': 'prowl',
|
||||||
'id': AGENT_IDS['prowl']
|
'id': AGENT_IDS['prowl'],
|
||||||
|
'class': PROWL,
|
||||||
|
'action_types': ('all',)
|
||||||
},
|
},
|
||||||
{'label': 'Pushbullet',
|
{'label': 'Pushbullet',
|
||||||
'name': 'pushbullet',
|
'name': 'pushbullet',
|
||||||
'id': AGENT_IDS['pushbullet']
|
'id': AGENT_IDS['pushbullet'],
|
||||||
|
'class': PUSHBULLET,
|
||||||
|
'action_types': ('all',)
|
||||||
},
|
},
|
||||||
{'label': 'Pushover',
|
{'label': 'Pushover',
|
||||||
'name': 'pushover',
|
'name': 'pushover',
|
||||||
'id': AGENT_IDS['pushover']
|
'id': AGENT_IDS['pushover'],
|
||||||
|
'class': PUSHOVER,
|
||||||
|
'action_types': ('all',)
|
||||||
},
|
},
|
||||||
{'label': 'Script',
|
{'label': 'Script',
|
||||||
'name': 'scripts',
|
'name': 'scripts',
|
||||||
'id': AGENT_IDS['scripts']
|
'id': AGENT_IDS['scripts'],
|
||||||
|
'class': SCRIPTS,
|
||||||
|
'action_types': ('all',)
|
||||||
},
|
},
|
||||||
{'label': 'Slack',
|
{'label': 'Slack',
|
||||||
'name': 'slack',
|
'name': 'slack',
|
||||||
'id': AGENT_IDS['slack']
|
'id': AGENT_IDS['slack'],
|
||||||
|
'class': SLACK,
|
||||||
|
'action_types': ('all',)
|
||||||
},
|
},
|
||||||
{'label': 'Telegram',
|
{'label': 'Telegram',
|
||||||
'name': 'telegram',
|
'name': 'telegram',
|
||||||
'id': AGENT_IDS['telegram']
|
'id': AGENT_IDS['telegram'],
|
||||||
|
'class': TELEGRAM,
|
||||||
|
'action_types': ('all',)
|
||||||
},
|
},
|
||||||
{'label': 'Twitter',
|
{'label': 'Twitter',
|
||||||
'name': 'twitter',
|
'name': 'twitter',
|
||||||
'id': AGENT_IDS['twitter']
|
'id': AGENT_IDS['twitter'],
|
||||||
|
'class': TWITTER,
|
||||||
|
'action_types': ('all',)
|
||||||
},
|
},
|
||||||
{'label': 'Webhook',
|
{'label': 'Webhook',
|
||||||
'name': 'webhook',
|
'name': 'webhook',
|
||||||
'id': AGENT_IDS['webhook']
|
'id': AGENT_IDS['webhook'],
|
||||||
|
'class': WEBHOOK,
|
||||||
|
'action_types': ('all',)
|
||||||
},
|
},
|
||||||
{'label': 'Zapier',
|
{'label': 'Zapier',
|
||||||
'name': 'zapier',
|
'name': 'zapier',
|
||||||
'id': AGENT_IDS['zapier']
|
'id': AGENT_IDS['zapier'],
|
||||||
|
'class': ZAPIER,
|
||||||
|
'action_types': ('all',)
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -205,13 +254,15 @@ def available_notification_agents():
|
||||||
if OSX().validate():
|
if OSX().validate():
|
||||||
agents.append({'label': 'macOS Notification Center',
|
agents.append({'label': 'macOS Notification Center',
|
||||||
'name': 'osx',
|
'name': 'osx',
|
||||||
'id': AGENT_IDS['osx']
|
'id': AGENT_IDS['osx'],
|
||||||
|
'class': OSX,
|
||||||
|
'action_types': ('all',)
|
||||||
})
|
})
|
||||||
|
|
||||||
return agents
|
return agents
|
||||||
|
|
||||||
|
|
||||||
def available_notification_actions():
|
def available_notification_actions(agent_id=None):
|
||||||
actions = [{'label': 'Playback Start',
|
actions = [{'label': 'Playback Start',
|
||||||
'name': 'on_play',
|
'name': 'on_play',
|
||||||
'description': 'Trigger a notification when a stream is started.',
|
'description': 'Trigger a notification when a stream is started.',
|
||||||
|
@ -312,7 +363,7 @@ def available_notification_actions():
|
||||||
'name': 'on_extdown',
|
'name': 'on_extdown',
|
||||||
'description': 'Trigger a notification when the Plex Media Server cannot be reached externally.',
|
'description': 'Trigger a notification when the Plex Media Server cannot be reached externally.',
|
||||||
'subject': 'Tautulli ({server_name})',
|
'subject': 'Tautulli ({server_name})',
|
||||||
'body': 'The Plex Media Server remote access is down.',
|
'body': 'The Plex Media Server remote access is down. ({remote_access_reason})',
|
||||||
'icon': 'fa-server',
|
'icon': 'fa-server',
|
||||||
'media_types': ('server',)
|
'media_types': ('server',)
|
||||||
},
|
},
|
||||||
|
@ -350,72 +401,31 @@ def available_notification_actions():
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
|
if str(agent_id).isdigit():
|
||||||
|
action_types = get_notify_agents(return_dict=True).get(int(agent_id), {}).get('action_types', [])
|
||||||
|
if 'all' not in action_types:
|
||||||
|
actions = [a for a in actions if a['name'] in action_types]
|
||||||
|
|
||||||
return actions
|
return actions
|
||||||
|
|
||||||
|
|
||||||
def get_agent_class(agent_id=None, config=None):
|
def get_agent_class(agent_id=None, config=None):
|
||||||
if str(agent_id).isdigit():
|
if str(agent_id).isdigit():
|
||||||
agent_id = int(agent_id)
|
agent = get_notify_agents(return_dict=True).get(int(agent_id), {}).get('class', Notifier)
|
||||||
|
return agent(config=config)
|
||||||
if agent_id == 0:
|
|
||||||
return GROWL(config=config)
|
|
||||||
elif agent_id == 1:
|
|
||||||
return PROWL(config=config)
|
|
||||||
elif agent_id == 2:
|
|
||||||
return XBMC(config=config)
|
|
||||||
elif agent_id == 3:
|
|
||||||
return PLEX(config=config)
|
|
||||||
elif agent_id == 6:
|
|
||||||
return PUSHBULLET(config=config)
|
|
||||||
elif agent_id == 7:
|
|
||||||
return PUSHOVER(config=config)
|
|
||||||
elif agent_id == 8:
|
|
||||||
return OSX(config=config)
|
|
||||||
elif agent_id == 9:
|
|
||||||
return BOXCAR(config=config)
|
|
||||||
elif agent_id == 10:
|
|
||||||
return EMAIL(config=config)
|
|
||||||
elif agent_id == 11:
|
|
||||||
return TWITTER(config=config)
|
|
||||||
elif agent_id == 12:
|
|
||||||
return IFTTT(config=config)
|
|
||||||
elif agent_id == 13:
|
|
||||||
return TELEGRAM(config=config)
|
|
||||||
elif agent_id == 14:
|
|
||||||
return SLACK(config=config)
|
|
||||||
elif agent_id == 15:
|
|
||||||
return SCRIPTS(config=config)
|
|
||||||
elif agent_id == 16:
|
|
||||||
return FACEBOOK(config=config)
|
|
||||||
elif agent_id == 17:
|
|
||||||
return BROWSER(config=config)
|
|
||||||
elif agent_id == 18:
|
|
||||||
return JOIN(config=config)
|
|
||||||
elif agent_id == 20:
|
|
||||||
return DISCORD(config=config)
|
|
||||||
elif agent_id == 21:
|
|
||||||
return ANDROIDAPP(config=config)
|
|
||||||
elif agent_id == 22:
|
|
||||||
return GROUPME(config=config)
|
|
||||||
elif agent_id == 23:
|
|
||||||
return MQTT(config=config)
|
|
||||||
elif agent_id == 24:
|
|
||||||
return ZAPIER(config=config)
|
|
||||||
elif agent_id == 25:
|
|
||||||
return WEBHOOK(config=config)
|
|
||||||
else:
|
|
||||||
return Notifier(config=config)
|
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_notify_agents():
|
def get_notify_agents(return_dict=False):
|
||||||
|
if return_dict:
|
||||||
|
return {a['id']: a for a in available_notification_agents()}
|
||||||
return tuple(a['name'] for a in sorted(available_notification_agents(), key=lambda k: k['label']))
|
return tuple(a['name'] for a in sorted(available_notification_agents(), key=lambda k: k['label']))
|
||||||
|
|
||||||
|
|
||||||
def get_notify_actions(return_dict=False):
|
def get_notify_actions(return_dict=False):
|
||||||
if return_dict:
|
if return_dict:
|
||||||
return {a.pop('name'): a for a in available_notification_actions()}
|
return {a['name']: a for a in available_notification_actions()}
|
||||||
return tuple(a['name'] for a in available_notification_actions())
|
return tuple(a['name'] for a in available_notification_actions())
|
||||||
|
|
||||||
|
|
||||||
|
@ -523,7 +533,7 @@ def add_notifier_config(agent_id=None, **kwargs):
|
||||||
% agent_id)
|
% agent_id)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
agent = next((a for a in available_notification_agents() if a['id'] == agent_id), None)
|
agent = get_notify_agents(return_dict=True).get(agent_id, None)
|
||||||
|
|
||||||
if not agent:
|
if not agent:
|
||||||
logger.error("Tautulli Notifiers :: Unable to retrieve new notification agent: invalid agent_id %s."
|
logger.error("Tautulli Notifiers :: Unable to retrieve new notification agent: invalid agent_id %s."
|
||||||
|
@ -572,7 +582,7 @@ def set_notifier_config(notifier_id=None, agent_id=None, **kwargs):
|
||||||
% agent_id)
|
% agent_id)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
agent = next((a for a in available_notification_agents() if a['id'] == agent_id), None)
|
agent = get_notify_agents(return_dict=True).get(agent_id, None)
|
||||||
|
|
||||||
if not agent:
|
if not agent:
|
||||||
logger.error("Tautulli Notifiers :: Unable to retrieve existing notification agent: invalid agent_id %s."
|
logger.error("Tautulli Notifiers :: Unable to retrieve existing notification agent: invalid agent_id %s."
|
||||||
|
@ -2368,6 +2378,190 @@ class PLEX(Notifier):
|
||||||
return config_option
|
return config_option
|
||||||
|
|
||||||
|
|
||||||
|
class PLEXMOBILEAPP(Notifier):
|
||||||
|
"""
|
||||||
|
Plex Mobile App Notifications
|
||||||
|
"""
|
||||||
|
NAME = 'Plex Android / iOS App'
|
||||||
|
NOTIFICATION_URL = 'https://notifications.plex.tv/api/v1/notifications'
|
||||||
|
_DEFAULT_CONFIG = {'user_ids': [],
|
||||||
|
'tap_action': 'preplay',
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, config=None):
|
||||||
|
super(PLEXMOBILEAPP, self).__init__(config=config)
|
||||||
|
|
||||||
|
self.configurations = {
|
||||||
|
'created': {'group': 'media', 'identifier': 'tv.plex.notification.library.new'},
|
||||||
|
'play': {'group': 'media', 'identifier': 'tv.plex.notification.playback.started'},
|
||||||
|
'newdevice': {'group': 'admin', 'identifier': 'tv.plex.notification.device.new'}
|
||||||
|
}
|
||||||
|
|
||||||
|
def agent_notify(self, subject='', body='', action='', **kwargs):
|
||||||
|
if action not in self.configurations and not action.startswith('test'):
|
||||||
|
logger.error(u"Tautulli Notifiers :: Notification action %s not allowed for %s." % (action, self.NAME))
|
||||||
|
return
|
||||||
|
|
||||||
|
if action == 'test':
|
||||||
|
tests = []
|
||||||
|
for configuration in self.configurations:
|
||||||
|
tests.append(self.agent_notify(subject=subject, body=body, action='test_'+configuration))
|
||||||
|
return all(tests)
|
||||||
|
|
||||||
|
configuration_action = action.split('test_')[-1]
|
||||||
|
|
||||||
|
# No subject to always show up regardless of client selected filters
|
||||||
|
# icon can be info, warning, or error
|
||||||
|
# play = true to start playing when tapping the notification
|
||||||
|
# Send the minimal amount of data necessary through Plex servers
|
||||||
|
data = {
|
||||||
|
'group': self.configurations[configuration_action]['group'],
|
||||||
|
'identifier': self.configurations[configuration_action]['identifier'],
|
||||||
|
'to': self.config['user_ids'],
|
||||||
|
'data': {
|
||||||
|
'provider': {
|
||||||
|
'identifier': plexpy.CONFIG.PMS_IDENTIFIER,
|
||||||
|
'title': plexpy.CONFIG.PMS_NAME
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pretty_metadata = PrettyMetadata(kwargs.get('parameters'))
|
||||||
|
|
||||||
|
if action.startswith('test'):
|
||||||
|
data['data']['player'] = {
|
||||||
|
'title': 'Device',
|
||||||
|
'platform': 'Platform',
|
||||||
|
'machineIdentifier': 'Tautulli'
|
||||||
|
}
|
||||||
|
data['data']['user'] = {
|
||||||
|
'title': 'User',
|
||||||
|
'id': 0
|
||||||
|
}
|
||||||
|
data['metadata'] = {
|
||||||
|
'type': 'movie',
|
||||||
|
'title': subject,
|
||||||
|
'year': body
|
||||||
|
}
|
||||||
|
|
||||||
|
elif action in ('play', 'newdevice'):
|
||||||
|
data['data']['player'] = {
|
||||||
|
'title': pretty_metadata.parameters['player'],
|
||||||
|
'platform': pretty_metadata.parameters['platform'],
|
||||||
|
'machineIdentifier': pretty_metadata.parameters['machine_id']
|
||||||
|
}
|
||||||
|
data['data']['user'] = {
|
||||||
|
'title': pretty_metadata.parameters['user'],
|
||||||
|
'id': pretty_metadata.parameters['user_id'],
|
||||||
|
'thumb': pretty_metadata.parameters['user_thumb'],
|
||||||
|
}
|
||||||
|
|
||||||
|
elif action == 'created':
|
||||||
|
# No addition data required for recently added
|
||||||
|
pass
|
||||||
|
|
||||||
|
else:
|
||||||
|
logger.error(u"Tautulli Notifiers :: Notification action %s not supported for %s." % (action, self.NAME))
|
||||||
|
return
|
||||||
|
|
||||||
|
if data['group'] == 'media' and not action.startswith('test'):
|
||||||
|
media_type = pretty_metadata.media_type
|
||||||
|
uri_rating_key = None
|
||||||
|
|
||||||
|
if media_type == 'movie':
|
||||||
|
metadata = {
|
||||||
|
'type': media_type,
|
||||||
|
'title': pretty_metadata.parameters['title'],
|
||||||
|
'year': pretty_metadata.parameters['year'],
|
||||||
|
'thumb': pretty_metadata.parameters['thumb']
|
||||||
|
}
|
||||||
|
elif media_type == 'show':
|
||||||
|
metadata = {
|
||||||
|
'type': media_type,
|
||||||
|
'title': pretty_metadata.parameters['show_name'],
|
||||||
|
'thumb': pretty_metadata.parameters['thumb']
|
||||||
|
}
|
||||||
|
elif media_type == 'season':
|
||||||
|
metadata = {
|
||||||
|
'type': 'show',
|
||||||
|
'title': pretty_metadata.parameters['show_name'],
|
||||||
|
'thumb': pretty_metadata.parameters['thumb'],
|
||||||
|
}
|
||||||
|
data['data']['count'] = pretty_metadata.parameters['episode_count']
|
||||||
|
elif media_type == 'episode':
|
||||||
|
metadata = {
|
||||||
|
'type': media_type,
|
||||||
|
'title': pretty_metadata.parameters['episode_name'],
|
||||||
|
'grandparentTitle': pretty_metadata.parameters['show_name'],
|
||||||
|
'index': pretty_metadata.parameters['episode_num'],
|
||||||
|
'parentIndex': pretty_metadata.parameters['season_num'],
|
||||||
|
'grandparentThumb': pretty_metadata.parameters['grandparent_thumb']
|
||||||
|
}
|
||||||
|
elif media_type == 'artist':
|
||||||
|
metadata = {
|
||||||
|
'type': media_type,
|
||||||
|
'title': pretty_metadata.parameters['artist_name'],
|
||||||
|
'thumb': pretty_metadata.parameters['thumb']
|
||||||
|
}
|
||||||
|
elif media_type == 'album':
|
||||||
|
metadata = {
|
||||||
|
'type': media_type,
|
||||||
|
'title': pretty_metadata.parameters['album_name'],
|
||||||
|
'year': pretty_metadata.parameters['year'],
|
||||||
|
'parentTitle': pretty_metadata.parameters['artist_name'],
|
||||||
|
'thumb': pretty_metadata.parameters['thumb'],
|
||||||
|
}
|
||||||
|
elif media_type == 'track':
|
||||||
|
metadata = {
|
||||||
|
'type': 'album',
|
||||||
|
'title': pretty_metadata.parameters['album_name'],
|
||||||
|
'year': pretty_metadata.parameters['year'],
|
||||||
|
'parentTitle': pretty_metadata.parameters['artist_name'],
|
||||||
|
'thumb': pretty_metadata.parameters['parent_thumb']
|
||||||
|
}
|
||||||
|
uri_rating_key = pretty_metadata.parameters['parent_rating_key']
|
||||||
|
else:
|
||||||
|
logger.error(u"Tautulli Notifiers :: Media type %s not supported for %s." % (media_type, self.NAME))
|
||||||
|
return
|
||||||
|
|
||||||
|
data['metadata'] = metadata
|
||||||
|
data['uri'] = 'server://{}/com.plexapp.plugins.library/library/metadata/{}'.format(
|
||||||
|
plexpy.CONFIG.PMS_IDENTIFIER, uri_rating_key or pretty_metadata.parameters['rating_key']
|
||||||
|
)
|
||||||
|
data['play'] = self.config['tap_action'] == 'play'
|
||||||
|
|
||||||
|
headers = {'X-Plex-Token': plexpy.CONFIG.PMS_TOKEN}
|
||||||
|
|
||||||
|
return self.make_request(self.NOTIFICATION_URL, headers=headers, json=data)
|
||||||
|
|
||||||
|
def get_users(self):
|
||||||
|
user_ids = {u['user_id']: u['friendly_name'] for u in users.Users().get_users() if u['user_id']}
|
||||||
|
user_ids[''] = ''
|
||||||
|
return user_ids
|
||||||
|
|
||||||
|
def _return_config_options(self):
|
||||||
|
config_option = [{'label': 'Plex User(s)',
|
||||||
|
'value': self.config['user_ids'],
|
||||||
|
'name': 'plexmobileapp_user_ids',
|
||||||
|
'description': 'Select which Plex User(s) to receive notifications.<br>'
|
||||||
|
'Note: The user(s) must have notifications enabled '
|
||||||
|
'for the matching Tautulli triggers in their Plex mobile app.',
|
||||||
|
'input_type': 'select',
|
||||||
|
'select_options': self.get_users()
|
||||||
|
},
|
||||||
|
{'label': 'Notification Tap Action',
|
||||||
|
'value': self.config['tap_action'],
|
||||||
|
'name': 'plexmobileapp_tap_action',
|
||||||
|
'description': 'Set the action when tapping on the notification.',
|
||||||
|
'input_type': 'select',
|
||||||
|
'select_options': {'preplay': 'Go to media pre-play screen',
|
||||||
|
'play': 'Start playing the media'}
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
return config_option
|
||||||
|
|
||||||
|
|
||||||
class PROWL(Notifier):
|
class PROWL(Notifier):
|
||||||
"""
|
"""
|
||||||
Prowl notifications.
|
Prowl notifications.
|
||||||
|
|
|
@ -390,6 +390,14 @@ class PlexTV(object):
|
||||||
|
|
||||||
return request
|
return request
|
||||||
|
|
||||||
|
def get_plextv_geoip(self, ip_address='', output_format=''):
|
||||||
|
uri = '/api/v2/geoip?ip_address=%s' % ip_address
|
||||||
|
request = self.request_handler.make_request(uri=uri,
|
||||||
|
request_type='GET',
|
||||||
|
output_format=output_format)
|
||||||
|
|
||||||
|
return request
|
||||||
|
|
||||||
def get_full_users_list(self):
|
def get_full_users_list(self):
|
||||||
own_account = self.get_plextv_user_details(output_format='xml')
|
own_account = self.get_plextv_user_details(output_format='xml')
|
||||||
friends_list = self.get_plextv_friends(output_format='xml')
|
friends_list = self.get_plextv_friends(output_format='xml')
|
||||||
|
@ -936,3 +944,35 @@ class PlexTV(object):
|
||||||
"user_token": helpers.get_xml_attr(a, 'authToken')
|
"user_token": helpers.get_xml_attr(a, 'authToken')
|
||||||
}
|
}
|
||||||
return account_details
|
return account_details
|
||||||
|
|
||||||
|
def get_geoip_lookup(self, ip_address=''):
|
||||||
|
if not ip_address or not helpers.is_public_ip(ip_address):
|
||||||
|
return
|
||||||
|
|
||||||
|
geoip_data = self.get_plextv_geoip(ip_address=ip_address, output_format='xml')
|
||||||
|
|
||||||
|
try:
|
||||||
|
xml_head = geoip_data.getElementsByTagName('location')
|
||||||
|
except Exception as e:
|
||||||
|
logger.warn(u"Tautulli PlexTV :: Unable to parse XML for get_geoip_lookup: %s." % e)
|
||||||
|
return None
|
||||||
|
|
||||||
|
for a in xml_head:
|
||||||
|
coordinates = helpers.get_xml_attr(a, 'coordinates').split(',')
|
||||||
|
latitude = longitude = None
|
||||||
|
if len(coordinates) == 2:
|
||||||
|
latitude, longitude = [helpers.cast_to_float(c) for c in coordinates]
|
||||||
|
|
||||||
|
geo_info = {"code": helpers.get_xml_attr(a, 'code') or None,
|
||||||
|
"country": helpers.get_xml_attr(a, 'country') or None,
|
||||||
|
"region": helpers.get_xml_attr(a, 'subdivisions') or None,
|
||||||
|
"city": helpers.get_xml_attr(a, 'city') or None,
|
||||||
|
"postal_code": helpers.get_xml_attr(a, 'postal_code') or None,
|
||||||
|
"timezone": helpers.get_xml_attr(a, 'time_zone') or None,
|
||||||
|
"latitude": latitude,
|
||||||
|
"longitude": longitude,
|
||||||
|
"continent": None, # keep for backwards compatibility with GeoLite2
|
||||||
|
"accuracy": None # keep for backwards compatibility with GeoLite2
|
||||||
|
}
|
||||||
|
|
||||||
|
return geo_info
|
||||||
|
|
|
@ -2980,10 +2980,26 @@ class PmsConnect(object):
|
||||||
for a in xml_head:
|
for a in xml_head:
|
||||||
server_response = {'mapping_state': helpers.get_xml_attr(a, 'mappingState'),
|
server_response = {'mapping_state': helpers.get_xml_attr(a, 'mappingState'),
|
||||||
'mapping_error': helpers.get_xml_attr(a, 'mappingError'),
|
'mapping_error': helpers.get_xml_attr(a, 'mappingError'),
|
||||||
|
'sign_in_state': helpers.get_xml_attr(a, 'signInState'),
|
||||||
'public_address': helpers.get_xml_attr(a, 'publicAddress'),
|
'public_address': helpers.get_xml_attr(a, 'publicAddress'),
|
||||||
'public_port': helpers.get_xml_attr(a, 'publicPort')
|
'public_port': helpers.get_xml_attr(a, 'publicPort'),
|
||||||
|
'private_address': helpers.get_xml_attr(a, 'privateAddress'),
|
||||||
|
'private_port': helpers.get_xml_attr(a, 'privatePort')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if server_response['mapping_state'] == 'unknown':
|
||||||
|
server_response['reason'] = 'Plex remote access port mapping unknown'
|
||||||
|
elif server_response['mapping_state'] not in ('mapped', 'waiting'):
|
||||||
|
server_response['reason'] = 'Plex remote access port not mapped'
|
||||||
|
elif server_response['mapping_error'] == 'unreachable':
|
||||||
|
server_response['reason'] = 'Plex remote access port mapped, ' \
|
||||||
|
'but the port is unreachable from Plex.tv'
|
||||||
|
elif server_response['mapping_error'] == 'publisherror':
|
||||||
|
server_response['reason'] = 'Plex remote access port mapped, ' \
|
||||||
|
'but failed to publish the port to Plex.tv'
|
||||||
|
else:
|
||||||
|
server_response['reason'] = ''
|
||||||
|
|
||||||
return server_response
|
return server_response
|
||||||
|
|
||||||
def get_update_staus(self):
|
def get_update_staus(self):
|
||||||
|
|
|
@ -21,7 +21,6 @@ from future.builtins import str
|
||||||
from future.builtins import object
|
from future.builtins import object
|
||||||
|
|
||||||
import httpagentparser
|
import httpagentparser
|
||||||
import time
|
|
||||||
|
|
||||||
import plexpy
|
import plexpy
|
||||||
if plexpy.PYTHON2:
|
if plexpy.PYTHON2:
|
||||||
|
|
|
@ -18,4 +18,4 @@
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
PLEXPY_BRANCH = "python3"
|
PLEXPY_BRANCH = "python3"
|
||||||
PLEXPY_RELEASE_VERSION = "v2.2.2-beta"
|
PLEXPY_RELEASE_VERSION = "v2.2.3-beta"
|
||||||
|
|
|
@ -147,8 +147,8 @@ def getVersion():
|
||||||
return current_version, 'origin', current_branch
|
return current_version, 'origin', current_branch
|
||||||
|
|
||||||
|
|
||||||
def check_update(auto_update=False, notify=False):
|
def check_update(scheduler=False, notify=False):
|
||||||
check_github(auto_update=auto_update, notify=notify)
|
check_github(scheduler=scheduler, notify=notify)
|
||||||
|
|
||||||
if not plexpy.CURRENT_VERSION:
|
if not plexpy.CURRENT_VERSION:
|
||||||
plexpy.UPDATE_AVAILABLE = None
|
plexpy.UPDATE_AVAILABLE = None
|
||||||
|
@ -171,7 +171,7 @@ def check_update(auto_update=False, notify=False):
|
||||||
plexpy.WIN_SYS_TRAY_ICON.update(icon=icon, hover_text=hover_text)
|
plexpy.WIN_SYS_TRAY_ICON.update(icon=icon, hover_text=hover_text)
|
||||||
|
|
||||||
|
|
||||||
def check_github(auto_update=False, notify=False):
|
def check_github(scheduler=False, notify=False):
|
||||||
plexpy.COMMITS_BEHIND = 0
|
plexpy.COMMITS_BEHIND = 0
|
||||||
|
|
||||||
if plexpy.CONFIG.GIT_TOKEN:
|
if plexpy.CONFIG.GIT_TOKEN:
|
||||||
|
@ -248,7 +248,7 @@ def check_github(auto_update=False, notify=False):
|
||||||
'plexpy_update_commit': plexpy.LATEST_VERSION,
|
'plexpy_update_commit': plexpy.LATEST_VERSION,
|
||||||
'plexpy_update_behind': plexpy.COMMITS_BEHIND})
|
'plexpy_update_behind': plexpy.COMMITS_BEHIND})
|
||||||
|
|
||||||
if auto_update and not plexpy.DOCKER:
|
if scheduler and plexpy.CONFIG.PLEXPY_AUTO_UPDATE and not plexpy.DOCKER:
|
||||||
logger.info('Running automatic update.')
|
logger.info('Running automatic update.')
|
||||||
plexpy.shutdown(restart=True, update=True)
|
plexpy.shutdown(restart=True, update=True)
|
||||||
|
|
||||||
|
|
|
@ -1936,6 +1936,10 @@ class WebInterface(object):
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
"""
|
"""
|
||||||
|
# For backwards compatibility
|
||||||
|
if 'id' in kwargs:
|
||||||
|
row_id = kwargs['id']
|
||||||
|
|
||||||
data_factory = datafactory.DataFactory()
|
data_factory = datafactory.DataFactory()
|
||||||
stream_data = data_factory.get_stream_details(row_id, session_key)
|
stream_data = data_factory.get_stream_details(row_id, session_key)
|
||||||
|
|
||||||
|
@ -2993,6 +2997,7 @@ class WebInterface(object):
|
||||||
"notify_recently_added_delay": plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY,
|
"notify_recently_added_delay": plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY,
|
||||||
"notify_concurrent_by_ip": checked(plexpy.CONFIG.NOTIFY_CONCURRENT_BY_IP),
|
"notify_concurrent_by_ip": checked(plexpy.CONFIG.NOTIFY_CONCURRENT_BY_IP),
|
||||||
"notify_concurrent_threshold": plexpy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD,
|
"notify_concurrent_threshold": plexpy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD,
|
||||||
|
"notify_continued_session_threshold": plexpy.CONFIG.NOTIFY_CONTINUED_SESSION_THRESHOLD,
|
||||||
"home_sections": json.dumps(plexpy.CONFIG.HOME_SECTIONS),
|
"home_sections": json.dumps(plexpy.CONFIG.HOME_SECTIONS),
|
||||||
"home_stats_cards": json.dumps(plexpy.CONFIG.HOME_STATS_CARDS),
|
"home_stats_cards": json.dumps(plexpy.CONFIG.HOME_STATS_CARDS),
|
||||||
"home_library_cards": json.dumps(plexpy.CONFIG.HOME_LIBRARY_CARDS),
|
"home_library_cards": json.dumps(plexpy.CONFIG.HOME_LIBRARY_CARDS),
|
||||||
|
@ -3024,11 +3029,7 @@ class WebInterface(object):
|
||||||
"newsletter_password": plexpy.CONFIG.NEWSLETTER_PASSWORD,
|
"newsletter_password": plexpy.CONFIG.NEWSLETTER_PASSWORD,
|
||||||
"newsletter_inline_styles": checked(plexpy.CONFIG.NEWSLETTER_INLINE_STYLES),
|
"newsletter_inline_styles": checked(plexpy.CONFIG.NEWSLETTER_INLINE_STYLES),
|
||||||
"newsletter_custom_dir": plexpy.CONFIG.NEWSLETTER_CUSTOM_DIR,
|
"newsletter_custom_dir": plexpy.CONFIG.NEWSLETTER_CUSTOM_DIR,
|
||||||
"win_sys_tray": checked(plexpy.CONFIG.WIN_SYS_TRAY),
|
"win_sys_tray": checked(plexpy.CONFIG.WIN_SYS_TRAY)
|
||||||
"maxmind_license_key": plexpy.CONFIG.MAXMIND_LICENSE_KEY,
|
|
||||||
"geoip_db": plexpy.CONFIG.GEOIP_DB,
|
|
||||||
"geoip_db_installed": plexpy.CONFIG.GEOIP_DB_INSTALLED,
|
|
||||||
"geoip_db_update_days": plexpy.CONFIG.GEOIP_DB_UPDATE_DAYS
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return serve_template(templatename="settings.html", title="Settings", config=config, kwargs=kwargs)
|
return serve_template(templatename="settings.html", title="Settings", config=config, kwargs=kwargs)
|
||||||
|
@ -3260,36 +3261,6 @@ class WebInterface(object):
|
||||||
else:
|
else:
|
||||||
return {'result': 'error', 'message': 'Database backup failed.'}
|
return {'result': 'error', 'message': 'Database backup failed.'}
|
||||||
|
|
||||||
@cherrypy.expose
|
|
||||||
@cherrypy.tools.json_out()
|
|
||||||
@requireAuth(member_of("admin"))
|
|
||||||
@addtoapi()
|
|
||||||
def install_geoip_db(self, update=False, **kwargs):
|
|
||||||
""" Downloads and installs the GeoLite2 database """
|
|
||||||
|
|
||||||
update = helpers.bool_true(update)
|
|
||||||
|
|
||||||
result = helpers.install_geoip_db(update=update)
|
|
||||||
|
|
||||||
if result:
|
|
||||||
return {'result': 'success', 'message': 'GeoLite2 database installed successful.', 'updated': result}
|
|
||||||
else:
|
|
||||||
return {'result': 'error', 'message': 'GeoLite2 database install failed.', 'updated': 0}
|
|
||||||
|
|
||||||
@cherrypy.expose
|
|
||||||
@cherrypy.tools.json_out()
|
|
||||||
@requireAuth(member_of("admin"))
|
|
||||||
@addtoapi()
|
|
||||||
def uninstall_geoip_db(self, **kwargs):
|
|
||||||
""" Uninstalls the GeoLite2 database """
|
|
||||||
|
|
||||||
result = helpers.uninstall_geoip_db()
|
|
||||||
|
|
||||||
if result:
|
|
||||||
return {'result': 'success', 'message': 'GeoLite2 database uninstalled successfully.'}
|
|
||||||
else:
|
|
||||||
return {'result': 'error', 'message': 'GeoLite2 database uninstall failed.'}
|
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
@cherrypy.tools.json_out()
|
@cherrypy.tools.json_out()
|
||||||
@requireAuth(member_of("admin"))
|
@requireAuth(member_of("admin"))
|
||||||
|
@ -5807,7 +5778,7 @@ class WebInterface(object):
|
||||||
@requireAuth()
|
@requireAuth()
|
||||||
@addtoapi()
|
@addtoapi()
|
||||||
def get_geoip_lookup(self, ip_address='', **kwargs):
|
def get_geoip_lookup(self, ip_address='', **kwargs):
|
||||||
""" Get the geolocation info for an IP address. The GeoLite2 database must be installed.
|
""" Get the geolocation info for an IP address.
|
||||||
|
|
||||||
```
|
```
|
||||||
Required parameters:
|
Required parameters:
|
||||||
|
@ -5818,7 +5789,7 @@ class WebInterface(object):
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
json:
|
json:
|
||||||
{"continent": "North America",
|
{"code": 'US",
|
||||||
"country": "United States",
|
"country": "United States",
|
||||||
"region": "California",
|
"region": "California",
|
||||||
"city": "Mountain View",
|
"city": "Mountain View",
|
||||||
|
@ -5828,15 +5799,24 @@ class WebInterface(object):
|
||||||
"longitude": -122.0838,
|
"longitude": -122.0838,
|
||||||
"accuracy": 1000
|
"accuracy": 1000
|
||||||
}
|
}
|
||||||
json:
|
|
||||||
{"error": "The address 127.0.0.1 is not in the database."
|
|
||||||
}
|
|
||||||
```
|
```
|
||||||
"""
|
"""
|
||||||
geo_info = helpers.geoip_lookup(ip_address)
|
message = ''
|
||||||
if isinstance(geo_info, str):
|
if not ip_address:
|
||||||
return {'error': geo_info}
|
message = 'No IP address provided.'
|
||||||
return geo_info
|
elif not helpers.is_valid_ip(ip_address):
|
||||||
|
message = 'Invalid IP address provided: %s' % ip_address
|
||||||
|
elif not helpers.is_public_ip(ip_address):
|
||||||
|
message = 'Non-public IP address provided: %s' % ip_address
|
||||||
|
|
||||||
|
if message:
|
||||||
|
return {'result': 'error', 'message': message}
|
||||||
|
|
||||||
|
plex_tv = plextv.PlexTV()
|
||||||
|
geo_info = plex_tv.get_geoip_lookup(ip_address)
|
||||||
|
if geo_info:
|
||||||
|
return {'result': 'success', 'data': geo_info}
|
||||||
|
return {'result': 'error', 'message': 'Failed to lookup GeoIP info for address: %s' % ip_address}
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
@cherrypy.tools.json_out()
|
@cherrypy.tools.json_out()
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue