diff --git a/lib/geoip2/__init__.py b/lib/geoip2/__init__.py
new file mode 100644
index 00000000..590124a9
--- /dev/null
+++ b/lib/geoip2/__init__.py
@@ -0,0 +1,7 @@
+# pylint:disable=C0111
+
+__title__ = 'geoip2'
+__version__ = '2.4.0'
+__author__ = 'Gregory Oschwald'
+__license__ = 'Apache License, Version 2.0'
+__copyright__ = 'Copyright (c) 2013-2016 Maxmind, Inc.'
diff --git a/lib/geoip2/compat.py b/lib/geoip2/compat.py
new file mode 100644
index 00000000..67c5fa65
--- /dev/null
+++ b/lib/geoip2/compat.py
@@ -0,0 +1,17 @@
+"""Intended for internal use only."""
+import sys
+
+import ipaddress
+
+# pylint: skip-file
+
+if sys.version_info[0] == 2:
+ def compat_ip_address(address):
+ """Intended for internal use only."""
+ if isinstance(address, bytes):
+ address = address.decode()
+ return ipaddress.ip_address(address)
+else:
+ def compat_ip_address(address):
+ """Intended for internal use only."""
+ return ipaddress.ip_address(address)
diff --git a/lib/geoip2/database.py b/lib/geoip2/database.py
new file mode 100644
index 00000000..ed21d6d4
--- /dev/null
+++ b/lib/geoip2/database.py
@@ -0,0 +1,199 @@
+"""
+======================
+GeoIP2 Database Reader
+======================
+
+"""
+import inspect
+
+import maxminddb
+# pylint: disable=unused-import
+from maxminddb import (MODE_AUTO, MODE_MMAP, MODE_MMAP_EXT, MODE_FILE,
+ MODE_MEMORY)
+
+import geoip2
+import geoip2.models
+import geoip2.errors
+
+
+class Reader(object):
+ """GeoIP2 database Reader object.
+
+ Instances of this class provide a reader for the GeoIP2 database format.
+ IP addresses can be looked up using the ``country`` and ``city`` methods.
+
+ The basic API for this class is the same for every database. First, you
+ create a reader object, specifying a file name. You then call the method
+ corresponding to the specific database, passing it the IP address you want
+ to look up.
+
+ If the request succeeds, the method call will return a model class for the
+ method you called. This model in turn contains multiple record classes,
+ each of which represents part of the data returned by the database. If the
+ database does not contain the requested information, the attributes on the
+ record class will have a ``None`` value.
+
+ If the address is not in the database, an
+ ``geoip2.errors.AddressNotFoundError`` exception will be thrown. If the
+ database is corrupt or invalid, a ``maxminddb.InvalidDatabaseError`` will
+ be thrown.
+
+"""
+
+ def __init__(self, filename, locales=None, mode=MODE_AUTO):
+ """Create GeoIP2 Reader.
+
+ :param filename: The path to the GeoIP2 database.
+ :param locales: This is list of locale codes. This argument will be
+ passed on to record classes to use when their name properties are
+ called. The default value is ['en'].
+
+ The order of the locales is significant. When a record class has
+ multiple names (country, city, etc.), its name property will return
+ the name in the first locale that has one.
+
+ Note that the only locale which is always present in the GeoIP2
+ data is "en". If you do not include this locale, the name property
+ may end up returning None even when the record has an English name.
+
+ Currently, the valid locale codes are:
+
+ * de -- German
+ * en -- English names may still include accented characters if that
+ is the accepted spelling in English. In other words, English does
+ not mean ASCII.
+ * es -- Spanish
+ * fr -- French
+ * ja -- Japanese
+ * pt-BR -- Brazilian Portuguese
+ * ru -- Russian
+ * zh-CN -- Simplified Chinese.
+ :param mode: The mode to open the database with. Valid mode are:
+ * MODE_MMAP_EXT - use the C extension with memory map.
+ * MODE_MMAP - read from memory map. Pure Python.
+ * MODE_FILE - read database as standard file. Pure Python.
+ * MODE_MEMORY - load database into memory. Pure Python.
+ * MODE_AUTO - try MODE_MMAP_EXT, MODE_MMAP, MODE_FILE in that order.
+ Default.
+
+ """
+ if locales is None:
+ locales = ['en']
+ self._db_reader = maxminddb.open_database(filename, mode)
+ self._locales = locales
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.close()
+
+ def country(self, ip_address):
+ """Get the Country object for the IP address.
+
+ :param ip_address: IPv4 or IPv6 address as a string.
+
+ :returns: :py:class:`geoip2.models.Country` object
+
+ """
+
+ return self._model_for(geoip2.models.Country, 'Country', ip_address)
+
+ def city(self, ip_address):
+ """Get the City object for the IP address.
+
+ :param ip_address: IPv4 or IPv6 address as a string.
+
+ :returns: :py:class:`geoip2.models.City` object
+
+ """
+ return self._model_for(geoip2.models.City, 'City', ip_address)
+
+ def anonymous_ip(self, ip_address):
+ """Get the AnonymousIP object for the IP address.
+
+ :param ip_address: IPv4 or IPv6 address as a string.
+
+ :returns: :py:class:`geoip2.models.AnonymousIP` object
+
+ """
+ return self._flat_model_for(geoip2.models.AnonymousIP,
+ 'GeoIP2-Anonymous-IP', ip_address)
+
+ def connection_type(self, ip_address):
+ """Get the ConnectionType object for the IP address.
+
+ :param ip_address: IPv4 or IPv6 address as a string.
+
+ :returns: :py:class:`geoip2.models.ConnectionType` object
+
+ """
+ return self._flat_model_for(geoip2.models.ConnectionType,
+ 'GeoIP2-Connection-Type', ip_address)
+
+ def domain(self, ip_address):
+ """Get the Domain object for the IP address.
+
+ :param ip_address: IPv4 or IPv6 address as a string.
+
+ :returns: :py:class:`geoip2.models.Domain` object
+
+ """
+ return self._flat_model_for(geoip2.models.Domain, 'GeoIP2-Domain',
+ ip_address)
+
+ def enterprise(self, ip_address):
+ """Get the Enterprise object for the IP address.
+
+ :param ip_address: IPv4 or IPv6 address as a string.
+
+ :returns: :py:class:`geoip2.models.Enterprise` object
+
+ """
+ return self._model_for(geoip2.models.Enterprise, 'Enterprise',
+ ip_address)
+
+ def isp(self, ip_address):
+ """Get the ISP object for the IP address.
+
+ :param ip_address: IPv4 or IPv6 address as a string.
+
+ :returns: :py:class:`geoip2.models.ISP` object
+
+ """
+ return self._flat_model_for(geoip2.models.ISP, 'GeoIP2-ISP',
+ ip_address)
+
+ def _get(self, database_type, ip_address):
+ if database_type not in self.metadata().database_type:
+ caller = inspect.stack()[2][3]
+ raise TypeError("The %s method cannot be used with the "
+ "%s database" %
+ (caller, self.metadata().database_type))
+ record = self._db_reader.get(ip_address)
+ if record is None:
+ raise geoip2.errors.AddressNotFoundError(
+ "The address %s is not in the database." % ip_address)
+ return record
+
+ def _model_for(self, model_class, types, ip_address):
+ record = self._get(types, ip_address)
+ record.setdefault('traits', {})['ip_address'] = ip_address
+ return model_class(record, locales=self._locales)
+
+ def _flat_model_for(self, model_class, types, ip_address):
+ record = self._get(types, ip_address)
+ record['ip_address'] = ip_address
+ return model_class(record)
+
+ def metadata(self):
+ """The metadata for the open database.
+
+ :returns: :py:class:`maxminddb.reader.Metadata` object
+ """
+ return self._db_reader.metadata()
+
+ def close(self):
+ """Closes the GeoIP2 database."""
+
+ self._db_reader.close()
diff --git a/lib/geoip2/errors.py b/lib/geoip2/errors.py
new file mode 100644
index 00000000..468b5858
--- /dev/null
+++ b/lib/geoip2/errors.py
@@ -0,0 +1,51 @@
+"""
+Errors
+======
+
+"""
+
+
+class GeoIP2Error(RuntimeError):
+ """There was a generic error in GeoIP2.
+
+ This class represents a generic error. It extends :py:exc:`RuntimeError`
+ and does not add any additional attributes.
+
+ """
+
+
+class AddressNotFoundError(GeoIP2Error):
+ """The address you were looking up was not found."""
+
+
+class AuthenticationError(GeoIP2Error):
+ """There was a problem authenticating the request."""
+
+
+class HTTPError(GeoIP2Error):
+ """There was an error when making your HTTP request.
+
+ This class represents an HTTP transport error. It extends
+ :py:exc:`GeoIP2Error` and adds attributes of its own.
+
+ :ivar http_status: The HTTP status code returned
+ :ivar uri: The URI queried
+
+ """
+
+ def __init__(self, message, http_status=None, uri=None):
+ super(HTTPError, self).__init__(message)
+ self.http_status = http_status
+ self.uri = uri
+
+
+class InvalidRequestError(GeoIP2Error):
+ """The request was invalid."""
+
+
+class OutOfQueriesError(GeoIP2Error):
+ """Your account is out of funds for the service queried."""
+
+
+class PermissionRequiredError(GeoIP2Error):
+ """Your account does not have permission to access this service."""
diff --git a/lib/geoip2/mixins.py b/lib/geoip2/mixins.py
new file mode 100644
index 00000000..7fb4c275
--- /dev/null
+++ b/lib/geoip2/mixins.py
@@ -0,0 +1,16 @@
+"""This package contains utility mixins"""
+# pylint: disable=too-few-public-methods
+from abc import ABCMeta
+
+
+class SimpleEquality(object):
+ """Naive __dict__ equality mixin"""
+
+ __metaclass__ = ABCMeta
+
+ def __eq__(self, other):
+ return (isinstance(other, self.__class__) and
+ self.__dict__ == other.__dict__)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
diff --git a/lib/geoip2/models.py b/lib/geoip2/models.py
new file mode 100644
index 00000000..15e951b0
--- /dev/null
+++ b/lib/geoip2/models.py
@@ -0,0 +1,472 @@
+"""
+Models
+======
+
+These classes provide models for the data returned by the GeoIP2
+web service and databases.
+
+The only difference between the City and Insights model classes is which
+fields in each record may be populated. See
+http://dev.maxmind.com/geoip/geoip2/web-services for more details.
+
+"""
+# pylint: disable=too-many-instance-attributes,too-few-public-methods
+from abc import ABCMeta
+
+import geoip2.records
+from geoip2.mixins import SimpleEquality
+
+
+class Country(SimpleEquality):
+ """Model for the GeoIP2 Precision: Country and the GeoIP2 Country database.
+
+ This class provides the following attributes:
+
+ .. attribute:: continent
+
+ Continent object for the requested IP address.
+
+ :type: :py:class:`geoip2.records.Continent`
+
+ .. attribute:: country
+
+ Country object for the requested IP address. This record represents the
+ country where MaxMind believes the IP is located.
+
+ :type: :py:class:`geoip2.records.Country`
+
+ .. attribute:: maxmind
+
+ Information related to your MaxMind account.
+
+ :type: :py:class:`geoip2.records.MaxMind`
+
+ .. attribute:: registered_country
+
+ The registered country object for the requested IP address. This record
+ represents the country where the ISP has registered a given IP block in
+ and may differ from the user's country.
+
+ :type: :py:class:`geoip2.records.Country`
+
+ .. attribute:: represented_country
+
+ Object for the country represented by the users of the IP address
+ when that country is different than the country in ``country``. For
+ instance, the country represented by an overseas military base.
+
+ :type: :py:class:`geoip2.records.RepresentedCountry`
+
+ .. attribute:: traits
+
+ Object with the traits of the requested IP address.
+
+ :type: :py:class:`geoip2.records.Traits`
+
+ """
+
+ def __init__(self, raw_response, locales=None):
+ if locales is None:
+ locales = ['en']
+ self._locales = locales
+ self.continent = \
+ geoip2.records.Continent(locales,
+ **raw_response.get('continent', {}))
+ self.country = \
+ geoip2.records.Country(locales,
+ **raw_response.get('country', {}))
+ self.registered_country = \
+ geoip2.records.Country(locales,
+ **raw_response.get('registered_country',
+ {}))
+ self.represented_country \
+ = geoip2.records.RepresentedCountry(locales,
+ **raw_response.get(
+ 'represented_country', {}))
+
+ self.maxmind = \
+ geoip2.records.MaxMind(**raw_response.get('maxmind', {}))
+
+ self.traits = geoip2.records.Traits(**raw_response.get('traits', {}))
+ self.raw = raw_response
+
+ def __repr__(self):
+ return '{module}.{class_name}({data}, {locales})'.format(
+ module=self.__module__,
+ class_name=self.__class__.__name__,
+ data=self.raw,
+ locales=self._locales)
+
+
+class City(Country):
+ """Model for the GeoIP2 Precision: City and the GeoIP2 City database.
+
+ .. attribute:: city
+
+ City object for the requested IP address.
+
+ :type: :py:class:`geoip2.records.City`
+
+ .. attribute:: continent
+
+ Continent object for the requested IP address.
+
+ :type: :py:class:`geoip2.records.Continent`
+
+ .. attribute:: country
+
+ Country object for the requested IP address. This record represents the
+ country where MaxMind believes the IP is located.
+
+ :type: :py:class:`geoip2.records.Country`
+
+ .. attribute:: location
+
+ Location object for the requested IP address.
+
+ .. attribute:: maxmind
+
+ Information related to your MaxMind account.
+
+ :type: :py:class:`geoip2.records.MaxMind`
+
+ .. attribute:: registered_country
+
+ The registered country object for the requested IP address. This record
+ represents the country where the ISP has registered a given IP block in
+ and may differ from the user's country.
+
+ :type: :py:class:`geoip2.records.Country`
+
+ .. attribute:: represented_country
+
+ Object for the country represented by the users of the IP address
+ when that country is different than the country in ``country``. For
+ instance, the country represented by an overseas military base.
+
+ :type: :py:class:`geoip2.records.RepresentedCountry`
+
+ .. attribute:: subdivisions
+
+ Object (tuple) representing the subdivisions of the country to which
+ the location of the requested IP address belongs.
+
+ :type: :py:class:`geoip2.records.Subdivisions`
+
+ .. attribute:: traits
+
+ Object with the traits of the requested IP address.
+
+ :type: :py:class:`geoip2.records.Traits`
+
+ """
+
+ def __init__(self, raw_response, locales=None):
+ super(City, self).__init__(raw_response, locales)
+ self.city = \
+ geoip2.records.City(locales, **raw_response.get('city', {}))
+ self.location = \
+ geoip2.records.Location(**raw_response.get('location', {}))
+ self.postal = \
+ geoip2.records.Postal(**raw_response.get('postal', {}))
+ self.subdivisions = \
+ geoip2.records.Subdivisions(locales,
+ *raw_response.get('subdivisions', []))
+
+
+class Insights(City):
+ """Model for the GeoIP2 Precision: Insights web service endpoint.
+
+ .. attribute:: city
+
+ City object for the requested IP address.
+
+ :type: :py:class:`geoip2.records.City`
+
+ .. attribute:: continent
+
+ Continent object for the requested IP address.
+
+ :type: :py:class:`geoip2.records.Continent`
+
+ .. attribute:: country
+
+ Country object for the requested IP address. This record represents the
+ country where MaxMind believes the IP is located.
+
+ :type: :py:class:`geoip2.records.Country`
+
+ .. attribute:: location
+
+ Location object for the requested IP address.
+
+ .. attribute:: maxmind
+
+ Information related to your MaxMind account.
+
+ :type: :py:class:`geoip2.records.MaxMind`
+
+ .. attribute:: registered_country
+
+ The registered country object for the requested IP address. This record
+ represents the country where the ISP has registered a given IP block in
+ and may differ from the user's country.
+
+ :type: :py:class:`geoip2.records.Country`
+
+ .. attribute:: represented_country
+
+ Object for the country represented by the users of the IP address
+ when that country is different than the country in ``country``. For
+ instance, the country represented by an overseas military base.
+
+ :type: :py:class:`geoip2.records.RepresentedCountry`
+
+ .. attribute:: subdivisions
+
+ Object (tuple) representing the subdivisions of the country to which
+ the location of the requested IP address belongs.
+
+ :type: :py:class:`geoip2.records.Subdivisions`
+
+ .. attribute:: traits
+
+ Object with the traits of the requested IP address.
+
+ :type: :py:class:`geoip2.records.Traits`
+
+ """
+
+
+class Enterprise(City):
+ """Model for the GeoIP2 Enterprise database.
+
+ .. attribute:: city
+
+ City object for the requested IP address.
+
+ :type: :py:class:`geoip2.records.City`
+
+ .. attribute:: continent
+
+ Continent object for the requested IP address.
+
+ :type: :py:class:`geoip2.records.Continent`
+
+ .. attribute:: country
+
+ Country object for the requested IP address. This record represents the
+ country where MaxMind believes the IP is located.
+
+ :type: :py:class:`geoip2.records.Country`
+
+ .. attribute:: location
+
+ Location object for the requested IP address.
+
+ .. attribute:: maxmind
+
+ Information related to your MaxMind account.
+
+ :type: :py:class:`geoip2.records.MaxMind`
+
+ .. attribute:: registered_country
+
+ The registered country object for the requested IP address. This record
+ represents the country where the ISP has registered a given IP block in
+ and may differ from the user's country.
+
+ :type: :py:class:`geoip2.records.Country`
+
+ .. attribute:: represented_country
+
+ Object for the country represented by the users of the IP address
+ when that country is different than the country in ``country``. For
+ instance, the country represented by an overseas military base.
+
+ :type: :py:class:`geoip2.records.RepresentedCountry`
+
+ .. attribute:: subdivisions
+
+ Object (tuple) representing the subdivisions of the country to which
+ the location of the requested IP address belongs.
+
+ :type: :py:class:`geoip2.records.Subdivisions`
+
+ .. attribute:: traits
+
+ Object with the traits of the requested IP address.
+
+ :type: :py:class:`geoip2.records.Traits`
+
+ """
+
+
+class SimpleModel(SimpleEquality):
+ """Provides basic methods for non-location models"""
+
+ __metaclass__ = ABCMeta
+
+ def __repr__(self):
+ # pylint: disable=no-member
+ return '{module}.{class_name}({data})'.format(
+ module=self.__module__,
+ class_name=self.__class__.__name__,
+ data=str(self.raw))
+
+
+class AnonymousIP(SimpleModel):
+ """Model class for the GeoIP2 Anonymous IP.
+
+ This class provides the following attribute:
+
+ .. attribute:: is_anonymous
+
+ This is true if the IP address belongs to any sort of anonymous network.
+
+ :type: bool
+
+ .. attribute:: is_anonymous_vpn
+
+ This is true if the IP address belongs to an anonymous VPN system.
+
+ :type: bool
+
+ .. attribute:: is_hosting_provider
+
+ This is true if the IP address belongs to a hosting provider.
+
+ :type: bool
+
+ .. attribute:: is_public_proxy
+
+ This is true if the IP address belongs to a public proxy.
+
+ :type: bool
+
+ .. attribute:: is_tor_exit_node
+
+ This is true if the IP address is a Tor exit node.
+
+ :type: bool
+
+ .. attribute:: ip_address
+
+ The IP address used in the lookup.
+
+ :type: unicode
+ """
+
+ def __init__(self, raw):
+ self.is_anonymous = raw.get('is_anonymous', False)
+ self.is_anonymous_vpn = raw.get('is_anonymous_vpn', False)
+ self.is_hosting_provider = raw.get('is_hosting_provider', False)
+ self.is_public_proxy = raw.get('is_public_proxy', False)
+ self.is_tor_exit_node = raw.get('is_tor_exit_node', False)
+
+ self.ip_address = raw.get('ip_address')
+ self.raw = raw
+
+
+class ConnectionType(SimpleModel):
+ """Model class for the GeoIP2 Connection-Type.
+
+ This class provides the following attribute:
+
+ .. attribute:: connection_type
+
+ The connection type may take the following values:
+
+ - Dialup
+ - Cable/DSL
+ - Corporate
+ - Cellular
+
+ Additional values may be added in the future.
+
+ :type: unicode
+
+ .. attribute:: ip_address
+
+ The IP address used in the lookup.
+
+ :type: unicode
+ """
+
+ def __init__(self, raw):
+ self.connection_type = raw.get('connection_type')
+ self.ip_address = raw.get('ip_address')
+ self.raw = raw
+
+
+class Domain(SimpleModel):
+ """Model class for the GeoIP2 Domain.
+
+ This class provides the following attribute:
+
+ .. attribute:: domain
+
+ The domain associated with the IP address.
+
+ :type: unicode
+
+ .. attribute:: ip_address
+
+ The IP address used in the lookup.
+
+ :type: unicode
+
+ """
+
+ def __init__(self, raw):
+ self.domain = raw.get('domain')
+ self.ip_address = raw.get('ip_address')
+ self.raw = raw
+
+
+class ISP(SimpleModel):
+ """Model class for the GeoIP2 ISP.
+
+ This class provides the following attribute:
+
+ .. attribute:: autonomous_system_number
+
+ The autonomous system number associated with the IP address.
+
+ :type: int
+
+ .. attribute:: autonomous_system_organization
+
+ The organization associated with the registered autonomous system number
+ for the IP address.
+
+ :type: unicode
+
+ .. attribute:: isp
+
+ The name of the ISP associated with the IP address.
+
+ :type: unicode
+
+ .. attribute:: organization
+
+ The name of the organization associated with the IP address.
+
+ :type: unicode
+
+ .. attribute:: ip_address
+
+ The IP address used in the lookup.
+
+ :type: unicode
+ """
+
+ # pylint:disable=too-many-arguments
+ def __init__(self, raw):
+ self.autonomous_system_number = raw.get('autonomous_system_number')
+ self.autonomous_system_organization = raw.get(
+ 'autonomous_system_organization')
+ self.isp = raw.get('isp')
+ self.organization = raw.get('organization')
+ self.ip_address = raw.get('ip_address')
+ self.raw = raw
diff --git a/lib/geoip2/records.py b/lib/geoip2/records.py
new file mode 100644
index 00000000..7f99d121
--- /dev/null
+++ b/lib/geoip2/records.py
@@ -0,0 +1,605 @@
+"""
+
+Records
+=======
+
+"""
+
+# pylint:disable=R0903
+from abc import ABCMeta
+
+from geoip2.mixins import SimpleEquality
+
+
+class Record(SimpleEquality):
+ """All records are subclasses of the abstract class ``Record``."""
+
+ __metaclass__ = ABCMeta
+
+ _valid_attributes = set()
+
+ def __init__(self, **kwargs):
+ valid_args = dict((k, kwargs.get(k)) for k in self._valid_attributes)
+ self.__dict__.update(valid_args)
+
+ def __setattr__(self, name, value):
+ raise AttributeError("can't set attribute")
+
+ def __repr__(self):
+ args = ', '.join('%s=%r' % x for x in self.__dict__.items())
+ return '{module}.{class_name}({data})'.format(
+ module=self.__module__,
+ class_name=self.__class__.__name__,
+ data=args)
+
+
+class PlaceRecord(Record):
+ """All records with :py:attr:`names` subclass :py:class:`PlaceRecord`."""
+
+ __metaclass__ = ABCMeta
+
+ def __init__(self, locales=None, **kwargs):
+ if locales is None:
+ locales = ['en']
+ if kwargs.get('names') is None:
+ kwargs['names'] = {}
+ object.__setattr__(self, '_locales', locales)
+ super(PlaceRecord, self).__init__(**kwargs)
+
+ @property
+ def name(self):
+ """Dict with locale codes as keys and localized name as value."""
+ # pylint:disable=E1101
+ return next(
+ (self.names.get(x) for x in self._locales
+ if x in self.names), None)
+
+
+class City(PlaceRecord):
+ """Contains data for the city record associated with an IP address.
+
+ This class contains the city-level data associated with an IP address.
+
+ This record is returned by ``city``, ``enterprise``, and ``insights``.
+
+ Attributes:
+
+ .. attribute:: confidence
+
+ A value from 0-100 indicating MaxMind's
+ confidence that the city is correct. This attribute is only available
+ from the Insights end point and the GeoIP2 Enterprise database.
+
+ :type: int
+
+ .. attribute:: geoname_id
+
+ The GeoName ID for the city.
+
+ :type: int
+
+ .. attribute:: name
+
+ The name of the city based on the locales list passed to the
+ constructor.
+
+ :type: unicode
+
+ .. attribute:: names
+
+ A dictionary where the keys are locale codes
+ and the values are names.
+
+ :type: dict
+
+ """
+
+ _valid_attributes = set(['confidence', 'geoname_id', 'names'])
+
+
+class Continent(PlaceRecord):
+ """Contains data for the continent record associated with an IP address.
+
+ This class contains the continent-level data associated with an IP
+ address.
+
+ Attributes:
+
+
+ .. attribute:: code
+
+ A two character continent code like "NA" (North America)
+ or "OC" (Oceania).
+
+ :type: unicode
+
+ .. attribute:: geoname_id
+
+ The GeoName ID for the continent.
+
+ :type: int
+
+ .. attribute:: name
+
+ Returns the name of the continent based on the locales list passed to
+ the constructor.
+
+ :type: unicode
+
+ .. attribute:: names
+
+ A dictionary where the keys are locale codes
+ and the values are names.
+
+ :type: dict
+
+ """
+
+ _valid_attributes = set(['code', 'geoname_id', 'names'])
+
+
+class Country(PlaceRecord):
+ """Contains data for the country record associated with an IP address.
+
+ This class contains the country-level data associated with an IP address.
+
+ Attributes:
+
+
+ .. attribute:: confidence
+
+ A value from 0-100 indicating MaxMind's confidence that
+ the country is correct. This attribute is only available from the
+ Insights end point and the GeoIP2 Enterprise database.
+
+ :type: int
+
+ .. attribute:: geoname_id
+
+ The GeoName ID for the country.
+
+ :type: int
+
+ .. attribute:: iso_code
+
+ The two-character `ISO 3166-1
+
`_ alpha code for the
+ country.
+
+ :type: unicode
+
+ .. attribute:: name
+
+ The name of the country based on the locales list passed to the
+ constructor.
+
+ :type: unicode
+
+ .. attribute:: names
+
+ A dictionary where the keys are locale codes and the values
+ are names.
+
+ :type: dict
+
+ """
+
+ _valid_attributes = set(['confidence', 'geoname_id', 'iso_code', 'names'])
+
+
+class RepresentedCountry(Country):
+ """Contains data for the represented country associated with an IP address.
+
+ This class contains the country-level data associated with an IP address
+ for the IP's represented country. The represented country is the country
+ represented by something like a military base.
+
+ Attributes:
+
+
+ .. attribute:: confidence
+
+ A value from 0-100 indicating MaxMind's confidence that
+ the country is correct. This attribute is only available from the
+ Insights end point and the GeoIP2 Enterprise database.
+
+ :type: int
+
+ .. attribute:: geoname_id
+
+ The GeoName ID for the country.
+
+ :type: int
+
+ .. attribute:: iso_code
+
+ The two-character `ISO 3166-1
+ `_ alpha code for the country.
+
+ :type: unicode
+
+ .. attribute:: name
+
+ The name of the country based on the locales list passed to the
+ constructor.
+
+ :type: unicode
+
+ .. attribute:: names
+
+ A dictionary where the keys are locale codes and the values
+ are names.
+
+ :type: dict
+
+
+ .. attribute:: type
+
+ A string indicating the type of entity that is representing the
+ country. Currently we only return ``military`` but this could expand to
+ include other types in the future.
+
+ :type: unicode
+
+ """
+
+ _valid_attributes = set(['confidence', 'geoname_id', 'iso_code', 'names',
+ 'type'])
+
+
+class Location(Record):
+ """Contains data for the location record associated with an IP address.
+
+ This class contains the location data associated with an IP address.
+
+ This record is returned by ``city``, ``enterprise``, and ``insights``.
+
+ Attributes:
+
+ .. attribute:: average_income
+
+ The average income in US dollars associated with the requested IP
+ address. This attribute is only available from the Insights end point.
+
+ :type: int
+
+ .. attribute:: accuracy_radius
+
+ The radius in kilometers around the specified location where the IP
+ address is likely to be.
+
+ :type: int
+
+ .. attribute:: latitude
+
+ The approximate latitude of the location associated with the IP
+ address. This value is not precise and should not be used to identify a
+ particular address or household.
+
+ :type: float
+
+ .. attribute:: longitude
+
+ The approximate longitude of the location associated with the IP
+ address. This value is not precise and should not be used to identify a
+ particular address or household.
+
+ :type: float
+
+ .. attribute:: metro_code
+
+ The metro code of the location if the
+ location is in the US. MaxMind returns the same metro codes as the
+ `Google AdWords API
+ `_.
+
+ :type: int
+
+ .. attribute:: population_density
+
+ The estimated population per square kilometer associated with the IP
+ address. This attribute is only available from the Insights end point.
+
+ :type: int
+
+ .. attribute:: time_zone
+
+ The time zone associated with location, as specified by the `IANA Time
+ Zone Database `_, e.g.,
+ "America/New_York".
+
+ :type: unicode
+
+ """
+
+ _valid_attributes = set(['average_income', 'accuracy_radius', 'latitude',
+ 'longitude', 'metro_code', 'population_density',
+ 'postal_code', 'postal_confidence', 'time_zone'])
+
+
+class MaxMind(Record):
+ """Contains data related to your MaxMind account.
+
+ Attributes:
+
+ .. attribute:: queries_remaining
+
+ The number of remaining queries you have
+ for the end point you are calling.
+
+ :type: int
+
+ """
+
+ _valid_attributes = set(['queries_remaining'])
+
+
+class Postal(Record):
+ """Contains data for the postal record associated with an IP address.
+
+ This class contains the postal data associated with an IP address.
+
+ This attribute is returned by ``city``, ``enterprise``, and ``insights``.
+
+ Attributes:
+
+ .. attribute:: code
+
+ The postal code of the location. Postal
+ codes are not available for all countries. In some countries, this will
+ only contain part of the postal code.
+
+ :type: unicode
+
+ .. attribute:: confidence
+
+ A value from 0-100 indicating
+ MaxMind's confidence that the postal code is correct. This attribute is
+ only available from the Insights end point and the GeoIP2 Enterprise
+ database.
+
+ :type: int
+
+ """
+
+ _valid_attributes = set(['code', 'confidence'])
+
+
+class Subdivision(PlaceRecord):
+ """Contains data for the subdivisions associated with an IP address.
+
+ This class contains the subdivision data associated with an IP address.
+
+ This attribute is returned by ``city``, ``enterprise``, and ``insights``.
+
+ Attributes:
+
+ .. attribute:: confidence
+
+ This is a value from 0-100 indicating MaxMind's
+ confidence that the subdivision is correct. This attribute is only
+ available from the Insights end point and the GeoIP2 Enterprise
+ database.
+
+ :type: int
+
+ .. attribute:: geoname_id
+
+ This is a GeoName ID for the subdivision.
+
+ :type: int
+
+ .. attribute:: iso_code
+
+ This is a string up to three characters long
+ contain the subdivision portion of the `ISO 3166-2 code
+ `_.
+
+ :type: unicode
+
+ .. attribute:: name
+
+ The name of the subdivision based on the locales list passed to the
+ constructor.
+
+ :type: unicode
+
+ .. attribute:: names
+
+ A dictionary where the keys are locale codes and the
+ values are names
+
+ :type: dict
+
+ """
+
+ _valid_attributes = set(['confidence', 'geoname_id', 'iso_code', 'names'])
+
+
+class Subdivisions(tuple):
+ """A tuple-like collection of subdivisions associated with an IP address.
+
+ This class contains the subdivisions of the country associated with the
+ IP address from largest to smallest.
+
+ For instance, the response for Oxford in the United Kingdom would have
+ England as the first element and Oxfordshire as the second element.
+
+ This attribute is returned by ``city``, ``enterprise``, and ``insights``.
+ """
+
+ def __new__(cls, locales, *subdivisions):
+ subdivisions = [Subdivision(locales, **x) for x in subdivisions]
+ obj = super(cls, Subdivisions).__new__(cls, subdivisions)
+ return obj
+
+ def __init__(self, locales, *subdivisions): # pylint:disable=W0613
+ self._locales = locales
+ super(Subdivisions, self).__init__()
+
+ @property
+ def most_specific(self):
+ """The most specific (smallest) subdivision available.
+
+ If there are no :py:class:`Subdivision` objects for the response,
+ this returns an empty :py:class:`Subdivision`.
+
+ :type: :py:class:`Subdivision`
+ """
+ try:
+ return self[-1]
+ except IndexError:
+ return Subdivision(self._locales)
+
+
+class Traits(Record):
+ """Contains data for the traits record associated with an IP address.
+
+ This class contains the traits data associated with an IP address.
+
+ This class has the following attributes:
+
+
+ .. attribute:: autonomous_system_number
+
+ The `autonomous system
+ number `_
+ associated with the IP address. This attribute is only available from
+ the City and Insights web service end points and the GeoIP2 Enterprise
+ database.
+
+ :type: int
+
+ .. attribute:: autonomous_system_organization
+
+ The organization associated with the registered `autonomous system
+ number `_ for
+ the IP address. This attribute is only available from the City and
+ Insights web service end points and the GeoIP2 Enterprise database.
+
+ :type: unicode
+
+ .. attribute:: connection_type
+
+ The connection type may take the following values:
+
+ - Dialup
+ - Cable/DSL
+ - Corporate
+ - Cellular
+
+ Additional values may be added in the future.
+
+ This attribute is only available in the GeoIP2 Enterprise database.
+
+ :type: unicode
+
+ .. attribute:: domain
+
+ The second level domain associated with the
+ IP address. This will be something like "example.com" or
+ "example.co.uk", not "foo.example.com". This attribute is only available
+ from the City and Insights web service end points and the GeoIP2
+ Enterprise database.
+
+ :type: unicode
+
+ .. attribute:: ip_address
+
+ The IP address that the data in the model
+ is for. If you performed a "me" lookup against the web service, this
+ will be the externally routable IP address for the system the code is
+ running on. If the system is behind a NAT, this may differ from the IP
+ address locally assigned to it.
+
+ :type: unicode
+
+ .. attribute:: is_anonymous_proxy
+
+ This is true if the IP is an anonymous
+ proxy. See http://dev.maxmind.com/faq/geoip#anonproxy for further
+ details.
+
+ :type: bool
+
+ .. deprecated:: 2.2.0
+ Use our our `GeoIP2 Anonymous IP database
+ `_
+ instead.
+
+ .. attribute:: is_legitimate_proxy
+
+ This attribute is true if MaxMind believes this IP address to be a
+ legitimate proxy, such as an internal VPN used by a corporation. This
+ attribute is only available in the GeoIP2 Enterprise database.
+
+ :type: bool
+
+ .. attribute:: is_satellite_provider
+
+ This is true if the IP address is from a satellite provider that
+ provides service to multiple countries.
+
+ :type: bool
+
+ .. deprecated:: 2.2.0
+ Due to the increased coverage by mobile carriers, very few
+ satellite providers now serve multiple countries. As a result, the
+ output does not provide sufficiently relevant data for us to maintain
+ it.
+
+ .. attribute:: isp
+
+ The name of the ISP associated with the IP address. This attribute is
+ only available from the City and Insights web service end points and the
+ GeoIP2 Enterprise database.
+
+ :type: unicode
+
+ .. attribute:: organization
+
+ The name of the organization associated with the IP address. This
+ attribute is only available from the City and Insights web service end
+ points and the GeoIP2 Enterprise database.
+
+ :type: unicode
+
+ .. attribute:: user_type
+
+ The user type associated with the IP
+ address. This can be one of the following values:
+
+ * business
+ * cafe
+ * cellular
+ * college
+ * content_delivery_network
+ * dialup
+ * government
+ * hosting
+ * library
+ * military
+ * residential
+ * router
+ * school
+ * search_engine_spider
+ * traveler
+
+ This attribute is only available from the Insights end point and the
+ GeoIP2 Enterprise database.
+
+ :type: unicode
+
+ """
+
+ _valid_attributes = set(
+ ['autonomous_system_number', 'autonomous_system_organization',
+ 'connection_type', 'domain', 'is_anonymous_proxy',
+ 'is_legitimate_proxy', 'is_satellite_provider', 'isp', 'ip_address',
+ 'organization', 'user_type'])
+
+ def __init__(self, **kwargs):
+ for k in ['is_anonymous_proxy', 'is_legitimate_proxy',
+ 'is_satellite_provider']:
+ kwargs[k] = bool(kwargs.get(k, False))
+ super(Traits, self).__init__(**kwargs)
diff --git a/lib/geoip2/webservice.py b/lib/geoip2/webservice.py
new file mode 100644
index 00000000..c64f1b80
--- /dev/null
+++ b/lib/geoip2/webservice.py
@@ -0,0 +1,219 @@
+"""
+============================
+WebServices Client API
+============================
+
+This class provides a client API for all the GeoIP2 Precision web service end
+points. The end points are Country, City, and Insights. Each end point returns
+a different set of data about an IP address, with Country returning the least
+data and Insights the most.
+
+Each web service end point is represented by a different model class, and
+these model classes in turn contain multiple record classes. The record
+classes have attributes which contain data about the IP address.
+
+If the web service does not return a particular piece of data for an IP
+address, the associated attribute is not populated.
+
+The web service may not return any information for an entire record, in which
+case all of the attributes for that record class will be empty.
+
+SSL
+---
+
+Requests to the GeoIP2 Precision web service are always made with SSL.
+
+"""
+
+import requests
+
+from requests.utils import default_user_agent
+
+import geoip2
+import geoip2.models
+
+from .compat import compat_ip_address
+
+from .errors import (AddressNotFoundError, AuthenticationError, GeoIP2Error,
+ HTTPError, InvalidRequestError, OutOfQueriesError,
+ PermissionRequiredError)
+
+
+class Client(object):
+ """Creates a new client object.
+
+ It accepts the following required arguments:
+
+ :param user_id: Your MaxMind User ID.
+ :param license_key: Your MaxMind license key.
+
+ Go to https://www.maxmind.com/en/my_license_key to see your MaxMind
+ User ID and license key.
+
+ The following keyword arguments are also accepted:
+
+ :param host: The hostname to make a request against. This defaults to
+ "geoip.maxmind.com". In most cases, you should not need to set this
+ explicitly.
+ :param locales: This is list of locale codes. This argument will be
+ passed on to record classes to use when their name properties are
+ called. The default value is ['en'].
+
+ The order of the locales is significant. When a record class has
+ multiple names (country, city, etc.), its name property will return
+ the name in the first locale that has one.
+
+ Note that the only locale which is always present in the GeoIP2
+ data is "en". If you do not include this locale, the name property
+ may end up returning None even when the record has an English name.
+
+ Currently, the valid locale codes are:
+
+ * de -- German
+ * en -- English names may still include accented characters if that is
+ the accepted spelling in English. In other words, English does not
+ mean ASCII.
+ * es -- Spanish
+ * fr -- French
+ * ja -- Japanese
+ * pt-BR -- Brazilian Portuguese
+ * ru -- Russian
+ * zh-CN -- Simplified Chinese.
+
+ """
+
+ def __init__(self,
+ user_id,
+ license_key,
+ host='geoip.maxmind.com',
+ locales=None,
+ timeout=None):
+ """Construct a Client."""
+ # pylint: disable=too-many-arguments
+ if locales is None:
+ locales = ['en']
+ self._locales = locales
+ self._user_id = user_id
+ self._license_key = license_key
+ self._base_uri = 'https://%s/geoip/v2.1' % host
+ self._timeout = timeout
+
+ def city(self, ip_address='me'):
+ """Call GeoIP2 Precision City endpoint with the specified IP.
+
+ :param ip_address: IPv4 or IPv6 address as a string. If no
+ address is provided, the address that the web service is
+ called from will be used.
+
+ :returns: :py:class:`geoip2.models.City` object
+
+ """
+ return self._response_for('city', geoip2.models.City, ip_address)
+
+ def country(self, ip_address='me'):
+ """Call the GeoIP2 Country endpoint with the specified IP.
+
+ :param ip_address: IPv4 or IPv6 address as a string. If no address
+ is provided, the address that the web service is called from will
+ be used.
+
+ :returns: :py:class:`geoip2.models.Country` object
+
+ """
+ return self._response_for('country', geoip2.models.Country, ip_address)
+
+ def insights(self, ip_address='me'):
+ """Call the GeoIP2 Precision: Insights endpoint with the specified IP.
+
+ :param ip_address: IPv4 or IPv6 address as a string. If no address
+ is provided, the address that the web service is called from will
+ be used.
+
+ :returns: :py:class:`geoip2.models.Insights` object
+
+ """
+ return self._response_for('insights', geoip2.models.Insights,
+ ip_address)
+
+ def _response_for(self, path, model_class, ip_address):
+ if ip_address != 'me':
+ ip_address = str(compat_ip_address(ip_address))
+ uri = '/'.join([self._base_uri, path, ip_address])
+ response = requests.get(uri,
+ auth=(self._user_id, self._license_key),
+ headers={'Accept': 'application/json',
+ 'User-Agent': self._user_agent()},
+ timeout=self._timeout)
+ if response.status_code == 200:
+ body = self._handle_success(response, uri)
+ return model_class(body, locales=self._locales)
+ else:
+ self._handle_error(response, uri)
+
+ def _user_agent(self):
+ return 'GeoIP2 Python Client v%s (%s)' % (geoip2.__version__,
+ default_user_agent())
+
+ def _handle_success(self, response, uri):
+ try:
+ return response.json()
+ except ValueError as ex:
+ raise GeoIP2Error('Received a 200 response for %(uri)s'
+ ' but could not decode the response as '
+ 'JSON: ' % locals() + ', '.join(ex.args), 200,
+ uri)
+
+ def _handle_error(self, response, uri):
+ status = response.status_code
+
+ if 400 <= status < 500:
+ self._handle_4xx_status(response, status, uri)
+ elif 500 <= status < 600:
+ self._handle_5xx_status(status, uri)
+ else:
+ self._handle_non_200_status(status, uri)
+
+ def _handle_4xx_status(self, response, status, uri):
+ if not response.content:
+ raise HTTPError('Received a %(status)i error for %(uri)s '
+ 'with no body.' % locals(), status, uri)
+ elif response.headers['Content-Type'].find('json') == -1:
+ raise HTTPError('Received a %i for %s with the following '
+ 'body: %s' % (status, uri, response.content),
+ status, uri)
+ try:
+ body = response.json()
+ except ValueError as ex:
+ raise HTTPError(
+ 'Received a %(status)i error for %(uri)s but it did'
+ ' not include the expected JSON body: ' % locals() +
+ ', '.join(ex.args), status, uri)
+ else:
+ if 'code' in body and 'error' in body:
+ self._handle_web_service_error(
+ body.get('error'), body.get('code'), status, uri)
+ else:
+ raise HTTPError(
+ 'Response contains JSON but it does not specify '
+ 'code or error keys', status, uri)
+
+ def _handle_web_service_error(self, message, code, status, uri):
+ if code in ('IP_ADDRESS_NOT_FOUND', 'IP_ADDRESS_RESERVED'):
+ raise AddressNotFoundError(message)
+ elif code in ('AUTHORIZATION_INVALID', 'LICENSE_KEY_REQUIRED',
+ 'USER_ID_REQUIRED', 'USER_ID_UNKNOWN'):
+ raise AuthenticationError(message)
+ elif code in ('INSUFFICIENT_FUNDS', 'OUT_OF_QUERIES'):
+ raise OutOfQueriesError(message)
+ elif code == 'PERMISSION_REQUIRED':
+ raise PermissionRequiredError(message)
+
+ raise InvalidRequestError(message, code, status, uri)
+
+ def _handle_5xx_status(self, status, uri):
+ raise HTTPError('Received a server error (%(status)i) for '
+ '%(uri)s' % locals(), status, uri)
+
+ def _handle_non_200_status(self, status, uri):
+ raise HTTPError('Received a very surprising HTTP status '
+ '(%(status)i) for %(uri)s' % locals(), status, uri)
diff --git a/lib/ipaddress.py b/lib/ipaddress.py
new file mode 100644
index 00000000..7657fc8f
--- /dev/null
+++ b/lib/ipaddress.py
@@ -0,0 +1,2417 @@
+# Copyright 2007 Google Inc.
+# Licensed to PSF under a Contributor Agreement.
+
+"""A fast, lightweight IPv4/IPv6 manipulation library in Python.
+
+This library is used to create/poke/manipulate IPv4 and IPv6 addresses
+and networks.
+
+"""
+
+from __future__ import unicode_literals
+
+
+import itertools
+import struct
+
+__version__ = '1.0.16'
+
+# Compatibility functions
+_compat_int_types = (int,)
+try:
+ _compat_int_types = (int, long)
+except NameError:
+ pass
+try:
+ _compat_str = unicode
+except NameError:
+ _compat_str = str
+ assert bytes != str
+if b'\0'[0] == 0: # Python 3 semantics
+ def _compat_bytes_to_byte_vals(byt):
+ return byt
+else:
+ def _compat_bytes_to_byte_vals(byt):
+ return [struct.unpack(b'!B', b)[0] for b in byt]
+try:
+ _compat_int_from_byte_vals = int.from_bytes
+except AttributeError:
+ def _compat_int_from_byte_vals(bytvals, endianess):
+ assert endianess == 'big'
+ res = 0
+ for bv in bytvals:
+ assert isinstance(bv, _compat_int_types)
+ res = (res << 8) + bv
+ return res
+
+
+def _compat_to_bytes(intval, length, endianess):
+ assert isinstance(intval, _compat_int_types)
+ assert endianess == 'big'
+ if length == 4:
+ if intval < 0 or intval >= 2 ** 32:
+ raise struct.error("integer out of range for 'I' format code")
+ return struct.pack(b'!I', intval)
+ elif length == 16:
+ if intval < 0 or intval >= 2 ** 128:
+ raise struct.error("integer out of range for 'QQ' format code")
+ return struct.pack(b'!QQ', intval >> 64, intval & 0xffffffffffffffff)
+ else:
+ raise NotImplementedError()
+if hasattr(int, 'bit_length'):
+ # Not int.bit_length , since that won't work in 2.7 where long exists
+ def _compat_bit_length(i):
+ return i.bit_length()
+else:
+ def _compat_bit_length(i):
+ for res in itertools.count():
+ if i >> res == 0:
+ return res
+
+
+def _compat_range(start, end, step=1):
+ assert step > 0
+ i = start
+ while i < end:
+ yield i
+ i += step
+
+
+class _TotalOrderingMixin(object):
+ __slots__ = ()
+
+ # Helper that derives the other comparison operations from
+ # __lt__ and __eq__
+ # We avoid functools.total_ordering because it doesn't handle
+ # NotImplemented correctly yet (http://bugs.python.org/issue10042)
+ def __eq__(self, other):
+ raise NotImplementedError
+
+ def __ne__(self, other):
+ equal = self.__eq__(other)
+ if equal is NotImplemented:
+ return NotImplemented
+ return not equal
+
+ def __lt__(self, other):
+ raise NotImplementedError
+
+ def __le__(self, other):
+ less = self.__lt__(other)
+ if less is NotImplemented or not less:
+ return self.__eq__(other)
+ return less
+
+ def __gt__(self, other):
+ less = self.__lt__(other)
+ if less is NotImplemented:
+ return NotImplemented
+ equal = self.__eq__(other)
+ if equal is NotImplemented:
+ return NotImplemented
+ return not (less or equal)
+
+ def __ge__(self, other):
+ less = self.__lt__(other)
+ if less is NotImplemented:
+ return NotImplemented
+ return not less
+
+
+IPV4LENGTH = 32
+IPV6LENGTH = 128
+
+
+class AddressValueError(ValueError):
+ """A Value Error related to the address."""
+
+
+class NetmaskValueError(ValueError):
+ """A Value Error related to the netmask."""
+
+
+def ip_address(address):
+ """Take an IP string/int and return an object of the correct type.
+
+ Args:
+ address: A string or integer, the IP address. Either IPv4 or
+ IPv6 addresses may be supplied; integers less than 2**32 will
+ be considered to be IPv4 by default.
+
+ Returns:
+ An IPv4Address or IPv6Address object.
+
+ Raises:
+ ValueError: if the *address* passed isn't either a v4 or a v6
+ address
+
+ """
+ try:
+ return IPv4Address(address)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ try:
+ return IPv6Address(address)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ if isinstance(address, bytes):
+ raise AddressValueError(
+ '%r does not appear to be an IPv4 or IPv6 address. '
+ 'Did you pass in a bytes (str in Python 2) instead of'
+ ' a unicode object?' % address)
+
+ raise ValueError('%r does not appear to be an IPv4 or IPv6 address' %
+ address)
+
+
+def ip_network(address, strict=True):
+ """Take an IP string/int and return an object of the correct type.
+
+ Args:
+ address: A string or integer, the IP network. Either IPv4 or
+ IPv6 networks may be supplied; integers less than 2**32 will
+ be considered to be IPv4 by default.
+
+ Returns:
+ An IPv4Network or IPv6Network object.
+
+ Raises:
+ ValueError: if the string passed isn't either a v4 or a v6
+ address. Or if the network has host bits set.
+
+ """
+ try:
+ return IPv4Network(address, strict)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ try:
+ return IPv6Network(address, strict)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ if isinstance(address, bytes):
+ raise AddressValueError(
+ '%r does not appear to be an IPv4 or IPv6 network. '
+ 'Did you pass in a bytes (str in Python 2) instead of'
+ ' a unicode object?' % address)
+
+ raise ValueError('%r does not appear to be an IPv4 or IPv6 network' %
+ address)
+
+
+def ip_interface(address):
+ """Take an IP string/int and return an object of the correct type.
+
+ Args:
+ address: A string or integer, the IP address. Either IPv4 or
+ IPv6 addresses may be supplied; integers less than 2**32 will
+ be considered to be IPv4 by default.
+
+ Returns:
+ An IPv4Interface or IPv6Interface object.
+
+ Raises:
+ ValueError: if the string passed isn't either a v4 or a v6
+ address.
+
+ Notes:
+ The IPv?Interface classes describe an Address on a particular
+ Network, so they're basically a combination of both the Address
+ and Network classes.
+
+ """
+ try:
+ return IPv4Interface(address)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ try:
+ return IPv6Interface(address)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ raise ValueError('%r does not appear to be an IPv4 or IPv6 interface' %
+ address)
+
+
+def v4_int_to_packed(address):
+ """Represent an address as 4 packed bytes in network (big-endian) order.
+
+ Args:
+ address: An integer representation of an IPv4 IP address.
+
+ Returns:
+ The integer address packed as 4 bytes in network (big-endian) order.
+
+ Raises:
+ ValueError: If the integer is negative or too large to be an
+ IPv4 IP address.
+
+ """
+ try:
+ return _compat_to_bytes(address, 4, 'big')
+ except (struct.error, OverflowError):
+ raise ValueError("Address negative or too large for IPv4")
+
+
+def v6_int_to_packed(address):
+ """Represent an address as 16 packed bytes in network (big-endian) order.
+
+ Args:
+ address: An integer representation of an IPv6 IP address.
+
+ Returns:
+ The integer address packed as 16 bytes in network (big-endian) order.
+
+ """
+ try:
+ return _compat_to_bytes(address, 16, 'big')
+ except (struct.error, OverflowError):
+ raise ValueError("Address negative or too large for IPv6")
+
+
+def _split_optional_netmask(address):
+ """Helper to split the netmask and raise AddressValueError if needed"""
+ addr = _compat_str(address).split('/')
+ if len(addr) > 2:
+ raise AddressValueError("Only one '/' permitted in %r" % address)
+ return addr
+
+
+def _find_address_range(addresses):
+ """Find a sequence of sorted deduplicated IPv#Address.
+
+ Args:
+ addresses: a list of IPv#Address objects.
+
+ Yields:
+ A tuple containing the first and last IP addresses in the sequence.
+
+ """
+ it = iter(addresses)
+ first = last = next(it)
+ for ip in it:
+ if ip._ip != last._ip + 1:
+ yield first, last
+ first = ip
+ last = ip
+ yield first, last
+
+
+def _count_righthand_zero_bits(number, bits):
+ """Count the number of zero bits on the right hand side.
+
+ Args:
+ number: an integer.
+ bits: maximum number of bits to count.
+
+ Returns:
+ The number of zero bits on the right hand side of the number.
+
+ """
+ if number == 0:
+ return bits
+ return min(bits, _compat_bit_length(~number & (number - 1)))
+
+
+def summarize_address_range(first, last):
+ """Summarize a network range given the first and last IP addresses.
+
+ Example:
+ >>> list(summarize_address_range(IPv4Address('192.0.2.0'),
+ ... IPv4Address('192.0.2.130')))
+ ... #doctest: +NORMALIZE_WHITESPACE
+ [IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'),
+ IPv4Network('192.0.2.130/32')]
+
+ Args:
+ first: the first IPv4Address or IPv6Address in the range.
+ last: the last IPv4Address or IPv6Address in the range.
+
+ Returns:
+ An iterator of the summarized IPv(4|6) network objects.
+
+ Raise:
+ TypeError:
+ If the first and last objects are not IP addresses.
+ If the first and last objects are not the same version.
+ ValueError:
+ If the last object is not greater than the first.
+ If the version of the first address is not 4 or 6.
+
+ """
+ if (not (isinstance(first, _BaseAddress) and
+ isinstance(last, _BaseAddress))):
+ raise TypeError('first and last must be IP addresses, not networks')
+ if first.version != last.version:
+ raise TypeError("%s and %s are not of the same version" % (
+ first, last))
+ if first > last:
+ raise ValueError('last IP address must be greater than first')
+
+ if first.version == 4:
+ ip = IPv4Network
+ elif first.version == 6:
+ ip = IPv6Network
+ else:
+ raise ValueError('unknown IP version')
+
+ ip_bits = first._max_prefixlen
+ first_int = first._ip
+ last_int = last._ip
+ while first_int <= last_int:
+ nbits = min(_count_righthand_zero_bits(first_int, ip_bits),
+ _compat_bit_length(last_int - first_int + 1) - 1)
+ net = ip((first_int, ip_bits - nbits))
+ yield net
+ first_int += 1 << nbits
+ if first_int - 1 == ip._ALL_ONES:
+ break
+
+
+def _collapse_addresses_internal(addresses):
+ """Loops through the addresses, collapsing concurrent netblocks.
+
+ Example:
+
+ ip1 = IPv4Network('192.0.2.0/26')
+ ip2 = IPv4Network('192.0.2.64/26')
+ ip3 = IPv4Network('192.0.2.128/26')
+ ip4 = IPv4Network('192.0.2.192/26')
+
+ _collapse_addresses_internal([ip1, ip2, ip3, ip4]) ->
+ [IPv4Network('192.0.2.0/24')]
+
+ This shouldn't be called directly; it is called via
+ collapse_addresses([]).
+
+ Args:
+ addresses: A list of IPv4Network's or IPv6Network's
+
+ Returns:
+ A list of IPv4Network's or IPv6Network's depending on what we were
+ passed.
+
+ """
+ # First merge
+ to_merge = list(addresses)
+ subnets = {}
+ while to_merge:
+ net = to_merge.pop()
+ supernet = net.supernet()
+ existing = subnets.get(supernet)
+ if existing is None:
+ subnets[supernet] = net
+ elif existing != net:
+ # Merge consecutive subnets
+ del subnets[supernet]
+ to_merge.append(supernet)
+ # Then iterate over resulting networks, skipping subsumed subnets
+ last = None
+ for net in sorted(subnets.values()):
+ if last is not None:
+ # Since they are sorted,
+ # last.network_address <= net.network_address is a given.
+ if last.broadcast_address >= net.broadcast_address:
+ continue
+ yield net
+ last = net
+
+
+def collapse_addresses(addresses):
+ """Collapse a list of IP objects.
+
+ Example:
+ collapse_addresses([IPv4Network('192.0.2.0/25'),
+ IPv4Network('192.0.2.128/25')]) ->
+ [IPv4Network('192.0.2.0/24')]
+
+ Args:
+ addresses: An iterator of IPv4Network or IPv6Network objects.
+
+ Returns:
+ An iterator of the collapsed IPv(4|6)Network objects.
+
+ Raises:
+ TypeError: If passed a list of mixed version objects.
+
+ """
+ addrs = []
+ ips = []
+ nets = []
+
+ # split IP addresses and networks
+ for ip in addresses:
+ if isinstance(ip, _BaseAddress):
+ if ips and ips[-1]._version != ip._version:
+ raise TypeError("%s and %s are not of the same version" % (
+ ip, ips[-1]))
+ ips.append(ip)
+ elif ip._prefixlen == ip._max_prefixlen:
+ if ips and ips[-1]._version != ip._version:
+ raise TypeError("%s and %s are not of the same version" % (
+ ip, ips[-1]))
+ try:
+ ips.append(ip.ip)
+ except AttributeError:
+ ips.append(ip.network_address)
+ else:
+ if nets and nets[-1]._version != ip._version:
+ raise TypeError("%s and %s are not of the same version" % (
+ ip, nets[-1]))
+ nets.append(ip)
+
+ # sort and dedup
+ ips = sorted(set(ips))
+
+ # find consecutive address ranges in the sorted sequence and summarize them
+ if ips:
+ for first, last in _find_address_range(ips):
+ addrs.extend(summarize_address_range(first, last))
+
+ return _collapse_addresses_internal(addrs + nets)
+
+
+def get_mixed_type_key(obj):
+ """Return a key suitable for sorting between networks and addresses.
+
+ Address and Network objects are not sortable by default; they're
+ fundamentally different so the expression
+
+ IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24')
+
+ doesn't make any sense. There are some times however, where you may wish
+ to have ipaddress sort these for you anyway. If you need to do this, you
+ can use this function as the key= argument to sorted().
+
+ Args:
+ obj: either a Network or Address object.
+ Returns:
+ appropriate key.
+
+ """
+ if isinstance(obj, _BaseNetwork):
+ return obj._get_networks_key()
+ elif isinstance(obj, _BaseAddress):
+ return obj._get_address_key()
+ return NotImplemented
+
+
+class _IPAddressBase(_TotalOrderingMixin):
+
+ """The mother class."""
+
+ __slots__ = ()
+
+ @property
+ def exploded(self):
+ """Return the longhand version of the IP address as a string."""
+ return self._explode_shorthand_ip_string()
+
+ @property
+ def compressed(self):
+ """Return the shorthand version of the IP address as a string."""
+ return _compat_str(self)
+
+ @property
+ def reverse_pointer(self):
+ """The name of the reverse DNS pointer for the IP address, e.g.:
+ >>> ipaddress.ip_address("127.0.0.1").reverse_pointer
+ '1.0.0.127.in-addr.arpa'
+ >>> ipaddress.ip_address("2001:db8::1").reverse_pointer
+ '1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa'
+
+ """
+ return self._reverse_pointer()
+
+ @property
+ def version(self):
+ msg = '%200s has no version specified' % (type(self),)
+ raise NotImplementedError(msg)
+
+ def _check_int_address(self, address):
+ if address < 0:
+ msg = "%d (< 0) is not permitted as an IPv%d address"
+ raise AddressValueError(msg % (address, self._version))
+ if address > self._ALL_ONES:
+ msg = "%d (>= 2**%d) is not permitted as an IPv%d address"
+ raise AddressValueError(msg % (address, self._max_prefixlen,
+ self._version))
+
+ def _check_packed_address(self, address, expected_len):
+ address_len = len(address)
+ if address_len != expected_len:
+ msg = (
+ '%r (len %d != %d) is not permitted as an IPv%d address. '
+ 'Did you pass in a bytes (str in Python 2) instead of'
+ ' a unicode object?'
+ )
+ raise AddressValueError(msg % (address, address_len,
+ expected_len, self._version))
+
+ @classmethod
+ def _ip_int_from_prefix(cls, prefixlen):
+ """Turn the prefix length into a bitwise netmask
+
+ Args:
+ prefixlen: An integer, the prefix length.
+
+ Returns:
+ An integer.
+
+ """
+ return cls._ALL_ONES ^ (cls._ALL_ONES >> prefixlen)
+
+ @classmethod
+ def _prefix_from_ip_int(cls, ip_int):
+ """Return prefix length from the bitwise netmask.
+
+ Args:
+ ip_int: An integer, the netmask in expanded bitwise format
+
+ Returns:
+ An integer, the prefix length.
+
+ Raises:
+ ValueError: If the input intermingles zeroes & ones
+ """
+ trailing_zeroes = _count_righthand_zero_bits(ip_int,
+ cls._max_prefixlen)
+ prefixlen = cls._max_prefixlen - trailing_zeroes
+ leading_ones = ip_int >> trailing_zeroes
+ all_ones = (1 << prefixlen) - 1
+ if leading_ones != all_ones:
+ byteslen = cls._max_prefixlen // 8
+ details = _compat_to_bytes(ip_int, byteslen, 'big')
+ msg = 'Netmask pattern %r mixes zeroes & ones'
+ raise ValueError(msg % details)
+ return prefixlen
+
+ @classmethod
+ def _report_invalid_netmask(cls, netmask_str):
+ msg = '%r is not a valid netmask' % netmask_str
+ raise NetmaskValueError(msg)
+
+ @classmethod
+ def _prefix_from_prefix_string(cls, prefixlen_str):
+ """Return prefix length from a numeric string
+
+ Args:
+ prefixlen_str: The string to be converted
+
+ Returns:
+ An integer, the prefix length.
+
+ Raises:
+ NetmaskValueError: If the input is not a valid netmask
+ """
+ # int allows a leading +/- as well as surrounding whitespace,
+ # so we ensure that isn't the case
+ if not _BaseV4._DECIMAL_DIGITS.issuperset(prefixlen_str):
+ cls._report_invalid_netmask(prefixlen_str)
+ try:
+ prefixlen = int(prefixlen_str)
+ except ValueError:
+ cls._report_invalid_netmask(prefixlen_str)
+ if not (0 <= prefixlen <= cls._max_prefixlen):
+ cls._report_invalid_netmask(prefixlen_str)
+ return prefixlen
+
+ @classmethod
+ def _prefix_from_ip_string(cls, ip_str):
+ """Turn a netmask/hostmask string into a prefix length
+
+ Args:
+ ip_str: The netmask/hostmask to be converted
+
+ Returns:
+ An integer, the prefix length.
+
+ Raises:
+ NetmaskValueError: If the input is not a valid netmask/hostmask
+ """
+ # Parse the netmask/hostmask like an IP address.
+ try:
+ ip_int = cls._ip_int_from_string(ip_str)
+ except AddressValueError:
+ cls._report_invalid_netmask(ip_str)
+
+ # Try matching a netmask (this would be /1*0*/ as a bitwise regexp).
+ # Note that the two ambiguous cases (all-ones and all-zeroes) are
+ # treated as netmasks.
+ try:
+ return cls._prefix_from_ip_int(ip_int)
+ except ValueError:
+ pass
+
+ # Invert the bits, and try matching a /0+1+/ hostmask instead.
+ ip_int ^= cls._ALL_ONES
+ try:
+ return cls._prefix_from_ip_int(ip_int)
+ except ValueError:
+ cls._report_invalid_netmask(ip_str)
+
+ def __reduce__(self):
+ return self.__class__, (_compat_str(self),)
+
+
+class _BaseAddress(_IPAddressBase):
+
+ """A generic IP object.
+
+ This IP class contains the version independent methods which are
+ used by single IP addresses.
+ """
+
+ __slots__ = ()
+
+ def __int__(self):
+ return self._ip
+
+ def __eq__(self, other):
+ try:
+ return (self._ip == other._ip and
+ self._version == other._version)
+ except AttributeError:
+ return NotImplemented
+
+ def __lt__(self, other):
+ if not isinstance(other, _IPAddressBase):
+ return NotImplemented
+ if not isinstance(other, _BaseAddress):
+ raise TypeError('%s and %s are not of the same type' % (
+ self, other))
+ if self._version != other._version:
+ raise TypeError('%s and %s are not of the same version' % (
+ self, other))
+ if self._ip != other._ip:
+ return self._ip < other._ip
+ return False
+
+ # Shorthand for Integer addition and subtraction. This is not
+ # meant to ever support addition/subtraction of addresses.
+ def __add__(self, other):
+ if not isinstance(other, _compat_int_types):
+ return NotImplemented
+ return self.__class__(int(self) + other)
+
+ def __sub__(self, other):
+ if not isinstance(other, _compat_int_types):
+ return NotImplemented
+ return self.__class__(int(self) - other)
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__class__.__name__, _compat_str(self))
+
+ def __str__(self):
+ return _compat_str(self._string_from_ip_int(self._ip))
+
+ def __hash__(self):
+ return hash(hex(int(self._ip)))
+
+ def _get_address_key(self):
+ return (self._version, self)
+
+ def __reduce__(self):
+ return self.__class__, (self._ip,)
+
+
+class _BaseNetwork(_IPAddressBase):
+
+ """A generic IP network object.
+
+ This IP class contains the version independent methods which are
+ used by networks.
+
+ """
+ def __init__(self, address):
+ self._cache = {}
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__class__.__name__, _compat_str(self))
+
+ def __str__(self):
+ return '%s/%d' % (self.network_address, self.prefixlen)
+
+ def hosts(self):
+ """Generate Iterator over usable hosts in a network.
+
+ This is like __iter__ except it doesn't return the network
+ or broadcast addresses.
+
+ """
+ network = int(self.network_address)
+ broadcast = int(self.broadcast_address)
+ for x in _compat_range(network + 1, broadcast):
+ yield self._address_class(x)
+
+ def __iter__(self):
+ network = int(self.network_address)
+ broadcast = int(self.broadcast_address)
+ for x in _compat_range(network, broadcast + 1):
+ yield self._address_class(x)
+
+ def __getitem__(self, n):
+ network = int(self.network_address)
+ broadcast = int(self.broadcast_address)
+ if n >= 0:
+ if network + n > broadcast:
+ raise IndexError
+ return self._address_class(network + n)
+ else:
+ n += 1
+ if broadcast + n < network:
+ raise IndexError
+ return self._address_class(broadcast + n)
+
+ def __lt__(self, other):
+ if not isinstance(other, _IPAddressBase):
+ return NotImplemented
+ if not isinstance(other, _BaseNetwork):
+ raise TypeError('%s and %s are not of the same type' % (
+ self, other))
+ if self._version != other._version:
+ raise TypeError('%s and %s are not of the same version' % (
+ self, other))
+ if self.network_address != other.network_address:
+ return self.network_address < other.network_address
+ if self.netmask != other.netmask:
+ return self.netmask < other.netmask
+ return False
+
+ def __eq__(self, other):
+ try:
+ return (self._version == other._version and
+ self.network_address == other.network_address and
+ int(self.netmask) == int(other.netmask))
+ except AttributeError:
+ return NotImplemented
+
+ def __hash__(self):
+ return hash(int(self.network_address) ^ int(self.netmask))
+
+ def __contains__(self, other):
+ # always false if one is v4 and the other is v6.
+ if self._version != other._version:
+ return False
+ # dealing with another network.
+ if isinstance(other, _BaseNetwork):
+ return False
+ # dealing with another address
+ else:
+ # address
+ return (int(self.network_address) <= int(other._ip) <=
+ int(self.broadcast_address))
+
+ def overlaps(self, other):
+ """Tell if self is partly contained in other."""
+ return self.network_address in other or (
+ self.broadcast_address in other or (
+ other.network_address in self or (
+ other.broadcast_address in self)))
+
+ @property
+ def broadcast_address(self):
+ x = self._cache.get('broadcast_address')
+ if x is None:
+ x = self._address_class(int(self.network_address) |
+ int(self.hostmask))
+ self._cache['broadcast_address'] = x
+ return x
+
+ @property
+ def hostmask(self):
+ x = self._cache.get('hostmask')
+ if x is None:
+ x = self._address_class(int(self.netmask) ^ self._ALL_ONES)
+ self._cache['hostmask'] = x
+ return x
+
+ @property
+ def with_prefixlen(self):
+ return '%s/%d' % (self.network_address, self._prefixlen)
+
+ @property
+ def with_netmask(self):
+ return '%s/%s' % (self.network_address, self.netmask)
+
+ @property
+ def with_hostmask(self):
+ return '%s/%s' % (self.network_address, self.hostmask)
+
+ @property
+ def num_addresses(self):
+ """Number of hosts in the current subnet."""
+ return int(self.broadcast_address) - int(self.network_address) + 1
+
+ @property
+ def _address_class(self):
+ # Returning bare address objects (rather than interfaces) allows for
+ # more consistent behaviour across the network address, broadcast
+ # address and individual host addresses.
+ msg = '%200s has no associated address class' % (type(self),)
+ raise NotImplementedError(msg)
+
+ @property
+ def prefixlen(self):
+ return self._prefixlen
+
+ def address_exclude(self, other):
+ """Remove an address from a larger block.
+
+ For example:
+
+ addr1 = ip_network('192.0.2.0/28')
+ addr2 = ip_network('192.0.2.1/32')
+ addr1.address_exclude(addr2) =
+ [IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'),
+ IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')]
+
+ or IPv6:
+
+ addr1 = ip_network('2001:db8::1/32')
+ addr2 = ip_network('2001:db8::1/128')
+ addr1.address_exclude(addr2) =
+ [ip_network('2001:db8::1/128'),
+ ip_network('2001:db8::2/127'),
+ ip_network('2001:db8::4/126'),
+ ip_network('2001:db8::8/125'),
+ ...
+ ip_network('2001:db8:8000::/33')]
+
+ Args:
+ other: An IPv4Network or IPv6Network object of the same type.
+
+ Returns:
+ An iterator of the IPv(4|6)Network objects which is self
+ minus other.
+
+ Raises:
+ TypeError: If self and other are of differing address
+ versions, or if other is not a network object.
+ ValueError: If other is not completely contained by self.
+
+ """
+ if not self._version == other._version:
+ raise TypeError("%s and %s are not of the same version" % (
+ self, other))
+
+ if not isinstance(other, _BaseNetwork):
+ raise TypeError("%s is not a network object" % other)
+
+ if not other.subnet_of(self):
+ raise ValueError('%s not contained in %s' % (other, self))
+ if other == self:
+ return
+
+ # Make sure we're comparing the network of other.
+ other = other.__class__('%s/%s' % (other.network_address,
+ other.prefixlen))
+
+ s1, s2 = self.subnets()
+ while s1 != other and s2 != other:
+ if other.subnet_of(s1):
+ yield s2
+ s1, s2 = s1.subnets()
+ elif other.subnet_of(s2):
+ yield s1
+ s1, s2 = s2.subnets()
+ else:
+ # If we got here, there's a bug somewhere.
+ raise AssertionError('Error performing exclusion: '
+ 's1: %s s2: %s other: %s' %
+ (s1, s2, other))
+ if s1 == other:
+ yield s2
+ elif s2 == other:
+ yield s1
+ else:
+ # If we got here, there's a bug somewhere.
+ raise AssertionError('Error performing exclusion: '
+ 's1: %s s2: %s other: %s' %
+ (s1, s2, other))
+
+ def compare_networks(self, other):
+ """Compare two IP objects.
+
+ This is only concerned about the comparison of the integer
+ representation of the network addresses. This means that the
+ host bits aren't considered at all in this method. If you want
+ to compare host bits, you can easily enough do a
+ 'HostA._ip < HostB._ip'
+
+ Args:
+ other: An IP object.
+
+ Returns:
+ If the IP versions of self and other are the same, returns:
+
+ -1 if self < other:
+ eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25')
+ IPv6Network('2001:db8::1000/124') <
+ IPv6Network('2001:db8::2000/124')
+ 0 if self == other
+ eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24')
+ IPv6Network('2001:db8::1000/124') ==
+ IPv6Network('2001:db8::1000/124')
+ 1 if self > other
+ eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25')
+ IPv6Network('2001:db8::2000/124') >
+ IPv6Network('2001:db8::1000/124')
+
+ Raises:
+ TypeError if the IP versions are different.
+
+ """
+ # does this need to raise a ValueError?
+ if self._version != other._version:
+ raise TypeError('%s and %s are not of the same type' % (
+ self, other))
+ # self._version == other._version below here:
+ if self.network_address < other.network_address:
+ return -1
+ if self.network_address > other.network_address:
+ return 1
+ # self.network_address == other.network_address below here:
+ if self.netmask < other.netmask:
+ return -1
+ if self.netmask > other.netmask:
+ return 1
+ return 0
+
+ def _get_networks_key(self):
+ """Network-only key function.
+
+ Returns an object that identifies this address' network and
+ netmask. This function is a suitable "key" argument for sorted()
+ and list.sort().
+
+ """
+ return (self._version, self.network_address, self.netmask)
+
+ def subnets(self, prefixlen_diff=1, new_prefix=None):
+ """The subnets which join to make the current subnet.
+
+ In the case that self contains only one IP
+ (self._prefixlen == 32 for IPv4 or self._prefixlen == 128
+ for IPv6), yield an iterator with just ourself.
+
+ Args:
+ prefixlen_diff: An integer, the amount the prefix length
+ should be increased by. This should not be set if
+ new_prefix is also set.
+ new_prefix: The desired new prefix length. This must be a
+ larger number (smaller prefix) than the existing prefix.
+ This should not be set if prefixlen_diff is also set.
+
+ Returns:
+ An iterator of IPv(4|6) objects.
+
+ Raises:
+ ValueError: The prefixlen_diff is too small or too large.
+ OR
+ prefixlen_diff and new_prefix are both set or new_prefix
+ is a smaller number than the current prefix (smaller
+ number means a larger network)
+
+ """
+ if self._prefixlen == self._max_prefixlen:
+ yield self
+ return
+
+ if new_prefix is not None:
+ if new_prefix < self._prefixlen:
+ raise ValueError('new prefix must be longer')
+ if prefixlen_diff != 1:
+ raise ValueError('cannot set prefixlen_diff and new_prefix')
+ prefixlen_diff = new_prefix - self._prefixlen
+
+ if prefixlen_diff < 0:
+ raise ValueError('prefix length diff must be > 0')
+ new_prefixlen = self._prefixlen + prefixlen_diff
+
+ if new_prefixlen > self._max_prefixlen:
+ raise ValueError(
+ 'prefix length diff %d is invalid for netblock %s' % (
+ new_prefixlen, self))
+
+ start = int(self.network_address)
+ end = int(self.broadcast_address)
+ step = (int(self.hostmask) + 1) >> prefixlen_diff
+ for new_addr in _compat_range(start, end, step):
+ current = self.__class__((new_addr, new_prefixlen))
+ yield current
+
+ def supernet(self, prefixlen_diff=1, new_prefix=None):
+ """The supernet containing the current network.
+
+ Args:
+ prefixlen_diff: An integer, the amount the prefix length of
+ the network should be decreased by. For example, given a
+ /24 network and a prefixlen_diff of 3, a supernet with a
+ /21 netmask is returned.
+
+ Returns:
+ An IPv4 network object.
+
+ Raises:
+ ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have
+ a negative prefix length.
+ OR
+ If prefixlen_diff and new_prefix are both set or new_prefix is a
+ larger number than the current prefix (larger number means a
+ smaller network)
+
+ """
+ if self._prefixlen == 0:
+ return self
+
+ if new_prefix is not None:
+ if new_prefix > self._prefixlen:
+ raise ValueError('new prefix must be shorter')
+ if prefixlen_diff != 1:
+ raise ValueError('cannot set prefixlen_diff and new_prefix')
+ prefixlen_diff = self._prefixlen - new_prefix
+
+ new_prefixlen = self.prefixlen - prefixlen_diff
+ if new_prefixlen < 0:
+ raise ValueError(
+ 'current prefixlen is %d, cannot have a prefixlen_diff of %d' %
+ (self.prefixlen, prefixlen_diff))
+ return self.__class__((
+ int(self.network_address) & (int(self.netmask) << prefixlen_diff),
+ new_prefixlen
+ ))
+
+ @property
+ def is_multicast(self):
+ """Test if the address is reserved for multicast use.
+
+ Returns:
+ A boolean, True if the address is a multicast address.
+ See RFC 2373 2.7 for details.
+
+ """
+ return (self.network_address.is_multicast and
+ self.broadcast_address.is_multicast)
+
+ def subnet_of(self, other):
+ # always false if one is v4 and the other is v6.
+ if self._version != other._version:
+ return False
+ # dealing with another network.
+ if (hasattr(other, 'network_address') and
+ hasattr(other, 'broadcast_address')):
+ return (other.network_address <= self.network_address and
+ other.broadcast_address >= self.broadcast_address)
+ # dealing with another address
+ else:
+ raise TypeError('Unable to test subnet containment with element '
+ 'of type %s' % type(other))
+
+ def supernet_of(self, other):
+ # always false if one is v4 and the other is v6.
+ if self._version != other._version:
+ return False
+ # dealing with another network.
+ if (hasattr(other, 'network_address') and
+ hasattr(other, 'broadcast_address')):
+ return (other.network_address >= self.network_address and
+ other.broadcast_address <= self.broadcast_address)
+ # dealing with another address
+ else:
+ raise TypeError('Unable to test subnet containment with element '
+ 'of type %s' % type(other))
+
+ @property
+ def is_reserved(self):
+ """Test if the address is otherwise IETF reserved.
+
+ Returns:
+ A boolean, True if the address is within one of the
+ reserved IPv6 Network ranges.
+
+ """
+ return (self.network_address.is_reserved and
+ self.broadcast_address.is_reserved)
+
+ @property
+ def is_link_local(self):
+ """Test if the address is reserved for link-local.
+
+ Returns:
+ A boolean, True if the address is reserved per RFC 4291.
+
+ """
+ return (self.network_address.is_link_local and
+ self.broadcast_address.is_link_local)
+
+ @property
+ def is_private(self):
+ """Test if this address is allocated for private networks.
+
+ Returns:
+ A boolean, True if the address is reserved per
+ iana-ipv4-special-registry or iana-ipv6-special-registry.
+
+ """
+ return (self.network_address.is_private and
+ self.broadcast_address.is_private)
+
+ @property
+ def is_global(self):
+ """Test if this address is allocated for public networks.
+
+ Returns:
+ A boolean, True if the address is not reserved per
+ iana-ipv4-special-registry or iana-ipv6-special-registry.
+
+ """
+ return not self.is_private
+
+ @property
+ def is_unspecified(self):
+ """Test if the address is unspecified.
+
+ Returns:
+ A boolean, True if this is the unspecified address as defined in
+ RFC 2373 2.5.2.
+
+ """
+ return (self.network_address.is_unspecified and
+ self.broadcast_address.is_unspecified)
+
+ @property
+ def is_loopback(self):
+ """Test if the address is a loopback address.
+
+ Returns:
+ A boolean, True if the address is a loopback address as defined in
+ RFC 2373 2.5.3.
+
+ """
+ return (self.network_address.is_loopback and
+ self.broadcast_address.is_loopback)
+
+
+class _BaseV4(object):
+
+ """Base IPv4 object.
+
+ The following methods are used by IPv4 objects in both single IP
+ addresses and networks.
+
+ """
+
+ __slots__ = ()
+ _version = 4
+ # Equivalent to 255.255.255.255 or 32 bits of 1's.
+ _ALL_ONES = (2 ** IPV4LENGTH) - 1
+ _DECIMAL_DIGITS = frozenset('0123456789')
+
+ # the valid octets for host and netmasks. only useful for IPv4.
+ _valid_mask_octets = frozenset([255, 254, 252, 248, 240, 224, 192, 128, 0])
+
+ _max_prefixlen = IPV4LENGTH
+ # There are only a handful of valid v4 netmasks, so we cache them all
+ # when constructed (see _make_netmask()).
+ _netmask_cache = {}
+
+ def _explode_shorthand_ip_string(self):
+ return _compat_str(self)
+
+ @classmethod
+ def _make_netmask(cls, arg):
+ """Make a (netmask, prefix_len) tuple from the given argument.
+
+ Argument can be:
+ - an integer (the prefix length)
+ - a string representing the prefix length (e.g. "24")
+ - a string representing the prefix netmask (e.g. "255.255.255.0")
+ """
+ if arg not in cls._netmask_cache:
+ if isinstance(arg, _compat_int_types):
+ prefixlen = arg
+ else:
+ try:
+ # Check for a netmask in prefix length form
+ prefixlen = cls._prefix_from_prefix_string(arg)
+ except NetmaskValueError:
+ # Check for a netmask or hostmask in dotted-quad form.
+ # This may raise NetmaskValueError.
+ prefixlen = cls._prefix_from_ip_string(arg)
+ netmask = IPv4Address(cls._ip_int_from_prefix(prefixlen))
+ cls._netmask_cache[arg] = netmask, prefixlen
+ return cls._netmask_cache[arg]
+
+ @classmethod
+ def _ip_int_from_string(cls, ip_str):
+ """Turn the given IP string into an integer for comparison.
+
+ Args:
+ ip_str: A string, the IP ip_str.
+
+ Returns:
+ The IP ip_str as an integer.
+
+ Raises:
+ AddressValueError: if ip_str isn't a valid IPv4 Address.
+
+ """
+ if not ip_str:
+ raise AddressValueError('Address cannot be empty')
+
+ octets = ip_str.split('.')
+ if len(octets) != 4:
+ raise AddressValueError("Expected 4 octets in %r" % ip_str)
+
+ try:
+ return _compat_int_from_byte_vals(
+ map(cls._parse_octet, octets), 'big')
+ except ValueError as exc:
+ raise AddressValueError("%s in %r" % (exc, ip_str))
+
+ @classmethod
+ def _parse_octet(cls, octet_str):
+ """Convert a decimal octet into an integer.
+
+ Args:
+ octet_str: A string, the number to parse.
+
+ Returns:
+ The octet as an integer.
+
+ Raises:
+ ValueError: if the octet isn't strictly a decimal from [0..255].
+
+ """
+ if not octet_str:
+ raise ValueError("Empty octet not permitted")
+ # Whitelist the characters, since int() allows a lot of bizarre stuff.
+ if not cls._DECIMAL_DIGITS.issuperset(octet_str):
+ msg = "Only decimal digits permitted in %r"
+ raise ValueError(msg % octet_str)
+ # We do the length check second, since the invalid character error
+ # is likely to be more informative for the user
+ if len(octet_str) > 3:
+ msg = "At most 3 characters permitted in %r"
+ raise ValueError(msg % octet_str)
+ # Convert to integer (we know digits are legal)
+ octet_int = int(octet_str, 10)
+ # Any octets that look like they *might* be written in octal,
+ # and which don't look exactly the same in both octal and
+ # decimal are rejected as ambiguous
+ if octet_int > 7 and octet_str[0] == '0':
+ msg = "Ambiguous (octal/decimal) value in %r not permitted"
+ raise ValueError(msg % octet_str)
+ if octet_int > 255:
+ raise ValueError("Octet %d (> 255) not permitted" % octet_int)
+ return octet_int
+
+ @classmethod
+ def _string_from_ip_int(cls, ip_int):
+ """Turns a 32-bit integer into dotted decimal notation.
+
+ Args:
+ ip_int: An integer, the IP address.
+
+ Returns:
+ The IP address as a string in dotted decimal notation.
+
+ """
+ return '.'.join(_compat_str(struct.unpack(b'!B', b)[0]
+ if isinstance(b, bytes)
+ else b)
+ for b in _compat_to_bytes(ip_int, 4, 'big'))
+
+ def _is_hostmask(self, ip_str):
+ """Test if the IP string is a hostmask (rather than a netmask).
+
+ Args:
+ ip_str: A string, the potential hostmask.
+
+ Returns:
+ A boolean, True if the IP string is a hostmask.
+
+ """
+ bits = ip_str.split('.')
+ try:
+ parts = [x for x in map(int, bits) if x in self._valid_mask_octets]
+ except ValueError:
+ return False
+ if len(parts) != len(bits):
+ return False
+ if parts[0] < parts[-1]:
+ return True
+ return False
+
+ def _reverse_pointer(self):
+ """Return the reverse DNS pointer name for the IPv4 address.
+
+ This implements the method described in RFC1035 3.5.
+
+ """
+ reverse_octets = _compat_str(self).split('.')[::-1]
+ return '.'.join(reverse_octets) + '.in-addr.arpa'
+
+ @property
+ def max_prefixlen(self):
+ return self._max_prefixlen
+
+ @property
+ def version(self):
+ return self._version
+
+
+class IPv4Address(_BaseV4, _BaseAddress):
+
+ """Represent and manipulate single IPv4 Addresses."""
+
+ __slots__ = ('_ip', '__weakref__')
+
+ def __init__(self, address):
+
+ """
+ Args:
+ address: A string or integer representing the IP
+
+ Additionally, an integer can be passed, so
+ IPv4Address('192.0.2.1') == IPv4Address(3221225985).
+ or, more generally
+ IPv4Address(int(IPv4Address('192.0.2.1'))) ==
+ IPv4Address('192.0.2.1')
+
+ Raises:
+ AddressValueError: If ipaddress isn't a valid IPv4 address.
+
+ """
+ # Efficient constructor from integer.
+ if isinstance(address, _compat_int_types):
+ self._check_int_address(address)
+ self._ip = address
+ return
+
+ # Constructing from a packed address
+ if isinstance(address, bytes):
+ self._check_packed_address(address, 4)
+ bvs = _compat_bytes_to_byte_vals(address)
+ self._ip = _compat_int_from_byte_vals(bvs, 'big')
+ return
+
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP string.
+ addr_str = _compat_str(address)
+ if '/' in addr_str:
+ raise AddressValueError("Unexpected '/' in %r" % address)
+ self._ip = self._ip_int_from_string(addr_str)
+
+ @property
+ def packed(self):
+ """The binary representation of this address."""
+ return v4_int_to_packed(self._ip)
+
+ @property
+ def is_reserved(self):
+ """Test if the address is otherwise IETF reserved.
+
+ Returns:
+ A boolean, True if the address is within the
+ reserved IPv4 Network range.
+
+ """
+ return self in self._constants._reserved_network
+
+ @property
+ def is_private(self):
+ """Test if this address is allocated for private networks.
+
+ Returns:
+ A boolean, True if the address is reserved per
+ iana-ipv4-special-registry.
+
+ """
+ return any(self in net for net in self._constants._private_networks)
+
+ @property
+ def is_multicast(self):
+ """Test if the address is reserved for multicast use.
+
+ Returns:
+ A boolean, True if the address is multicast.
+ See RFC 3171 for details.
+
+ """
+ return self in self._constants._multicast_network
+
+ @property
+ def is_unspecified(self):
+ """Test if the address is unspecified.
+
+ Returns:
+ A boolean, True if this is the unspecified address as defined in
+ RFC 5735 3.
+
+ """
+ return self == self._constants._unspecified_address
+
+ @property
+ def is_loopback(self):
+ """Test if the address is a loopback address.
+
+ Returns:
+ A boolean, True if the address is a loopback per RFC 3330.
+
+ """
+ return self in self._constants._loopback_network
+
+ @property
+ def is_link_local(self):
+ """Test if the address is reserved for link-local.
+
+ Returns:
+ A boolean, True if the address is link-local per RFC 3927.
+
+ """
+ return self in self._constants._linklocal_network
+
+
+class IPv4Interface(IPv4Address):
+
+ def __init__(self, address):
+ if isinstance(address, (bytes, _compat_int_types)):
+ IPv4Address.__init__(self, address)
+ self.network = IPv4Network(self._ip)
+ self._prefixlen = self._max_prefixlen
+ return
+
+ if isinstance(address, tuple):
+ IPv4Address.__init__(self, address[0])
+ if len(address) > 1:
+ self._prefixlen = int(address[1])
+ else:
+ self._prefixlen = self._max_prefixlen
+
+ self.network = IPv4Network(address, strict=False)
+ self.netmask = self.network.netmask
+ self.hostmask = self.network.hostmask
+ return
+
+ addr = _split_optional_netmask(address)
+ IPv4Address.__init__(self, addr[0])
+
+ self.network = IPv4Network(address, strict=False)
+ self._prefixlen = self.network._prefixlen
+
+ self.netmask = self.network.netmask
+ self.hostmask = self.network.hostmask
+
+ def __str__(self):
+ return '%s/%d' % (self._string_from_ip_int(self._ip),
+ self.network.prefixlen)
+
+ def __eq__(self, other):
+ address_equal = IPv4Address.__eq__(self, other)
+ if not address_equal or address_equal is NotImplemented:
+ return address_equal
+ try:
+ return self.network == other.network
+ except AttributeError:
+ # An interface with an associated network is NOT the
+ # same as an unassociated address. That's why the hash
+ # takes the extra info into account.
+ return False
+
+ def __lt__(self, other):
+ address_less = IPv4Address.__lt__(self, other)
+ if address_less is NotImplemented:
+ return NotImplemented
+ try:
+ return self.network < other.network
+ except AttributeError:
+ # We *do* allow addresses and interfaces to be sorted. The
+ # unassociated address is considered less than all interfaces.
+ return False
+
+ def __hash__(self):
+ return self._ip ^ self._prefixlen ^ int(self.network.network_address)
+
+ __reduce__ = _IPAddressBase.__reduce__
+
+ @property
+ def ip(self):
+ return IPv4Address(self._ip)
+
+ @property
+ def with_prefixlen(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self._prefixlen)
+
+ @property
+ def with_netmask(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self.netmask)
+
+ @property
+ def with_hostmask(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self.hostmask)
+
+
+class IPv4Network(_BaseV4, _BaseNetwork):
+
+ """This class represents and manipulates 32-bit IPv4 network + addresses..
+
+ Attributes: [examples for IPv4Network('192.0.2.0/27')]
+ .network_address: IPv4Address('192.0.2.0')
+ .hostmask: IPv4Address('0.0.0.31')
+ .broadcast_address: IPv4Address('192.0.2.32')
+ .netmask: IPv4Address('255.255.255.224')
+ .prefixlen: 27
+
+ """
+ # Class to use when creating address objects
+ _address_class = IPv4Address
+
+ def __init__(self, address, strict=True):
+
+ """Instantiate a new IPv4 network object.
+
+ Args:
+ address: A string or integer representing the IP [& network].
+ '192.0.2.0/24'
+ '192.0.2.0/255.255.255.0'
+ '192.0.0.2/0.0.0.255'
+ are all functionally the same in IPv4. Similarly,
+ '192.0.2.1'
+ '192.0.2.1/255.255.255.255'
+ '192.0.2.1/32'
+ are also functionally equivalent. That is to say, failing to
+ provide a subnetmask will create an object with a mask of /32.
+
+ If the mask (portion after the / in the argument) is given in
+ dotted quad form, it is treated as a netmask if it starts with a
+ non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it
+ starts with a zero field (e.g. 0.255.255.255 == /8), with the
+ single exception of an all-zero mask which is treated as a
+ netmask == /0. If no mask is given, a default of /32 is used.
+
+ Additionally, an integer can be passed, so
+ IPv4Network('192.0.2.1') == IPv4Network(3221225985)
+ or, more generally
+ IPv4Interface(int(IPv4Interface('192.0.2.1'))) ==
+ IPv4Interface('192.0.2.1')
+
+ Raises:
+ AddressValueError: If ipaddress isn't a valid IPv4 address.
+ NetmaskValueError: If the netmask isn't valid for
+ an IPv4 address.
+ ValueError: If strict is True and a network address is not
+ supplied.
+
+ """
+ _BaseNetwork.__init__(self, address)
+
+ # Constructing from a packed address or integer
+ if isinstance(address, (_compat_int_types, bytes)):
+ self.network_address = IPv4Address(address)
+ self.netmask, self._prefixlen = self._make_netmask(
+ self._max_prefixlen)
+ # fixme: address/network test here.
+ return
+
+ if isinstance(address, tuple):
+ if len(address) > 1:
+ arg = address[1]
+ else:
+ # We weren't given an address[1]
+ arg = self._max_prefixlen
+ self.network_address = IPv4Address(address[0])
+ self.netmask, self._prefixlen = self._make_netmask(arg)
+ packed = int(self.network_address)
+ if packed & int(self.netmask) != packed:
+ if strict:
+ raise ValueError('%s has host bits set' % self)
+ else:
+ self.network_address = IPv4Address(packed &
+ int(self.netmask))
+ return
+
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP prefix string.
+ addr = _split_optional_netmask(address)
+ self.network_address = IPv4Address(self._ip_int_from_string(addr[0]))
+
+ if len(addr) == 2:
+ arg = addr[1]
+ else:
+ arg = self._max_prefixlen
+ self.netmask, self._prefixlen = self._make_netmask(arg)
+
+ if strict:
+ if (IPv4Address(int(self.network_address) & int(self.netmask)) !=
+ self.network_address):
+ raise ValueError('%s has host bits set' % self)
+ self.network_address = IPv4Address(int(self.network_address) &
+ int(self.netmask))
+
+ if self._prefixlen == (self._max_prefixlen - 1):
+ self.hosts = self.__iter__
+
+ @property
+ def is_global(self):
+ """Test if this address is allocated for public networks.
+
+ Returns:
+ A boolean, True if the address is not reserved per
+ iana-ipv4-special-registry.
+
+ """
+ return (not (self.network_address in IPv4Network('100.64.0.0/10') and
+ self.broadcast_address in IPv4Network('100.64.0.0/10')) and
+ not self.is_private)
+
+
+class _IPv4Constants(object):
+
+ _linklocal_network = IPv4Network('169.254.0.0/16')
+
+ _loopback_network = IPv4Network('127.0.0.0/8')
+
+ _multicast_network = IPv4Network('224.0.0.0/4')
+
+ _private_networks = [
+ IPv4Network('0.0.0.0/8'),
+ IPv4Network('10.0.0.0/8'),
+ IPv4Network('127.0.0.0/8'),
+ IPv4Network('169.254.0.0/16'),
+ IPv4Network('172.16.0.0/12'),
+ IPv4Network('192.0.0.0/29'),
+ IPv4Network('192.0.0.170/31'),
+ IPv4Network('192.0.2.0/24'),
+ IPv4Network('192.168.0.0/16'),
+ IPv4Network('198.18.0.0/15'),
+ IPv4Network('198.51.100.0/24'),
+ IPv4Network('203.0.113.0/24'),
+ IPv4Network('240.0.0.0/4'),
+ IPv4Network('255.255.255.255/32'),
+ ]
+
+ _reserved_network = IPv4Network('240.0.0.0/4')
+
+ _unspecified_address = IPv4Address('0.0.0.0')
+
+
+IPv4Address._constants = _IPv4Constants
+
+
+class _BaseV6(object):
+
+ """Base IPv6 object.
+
+ The following methods are used by IPv6 objects in both single IP
+ addresses and networks.
+
+ """
+
+ __slots__ = ()
+ _version = 6
+ _ALL_ONES = (2 ** IPV6LENGTH) - 1
+ _HEXTET_COUNT = 8
+ _HEX_DIGITS = frozenset('0123456789ABCDEFabcdef')
+ _max_prefixlen = IPV6LENGTH
+
+ # There are only a bunch of valid v6 netmasks, so we cache them all
+ # when constructed (see _make_netmask()).
+ _netmask_cache = {}
+
+ @classmethod
+ def _make_netmask(cls, arg):
+ """Make a (netmask, prefix_len) tuple from the given argument.
+
+ Argument can be:
+ - an integer (the prefix length)
+ - a string representing the prefix length (e.g. "24")
+ - a string representing the prefix netmask (e.g. "255.255.255.0")
+ """
+ if arg not in cls._netmask_cache:
+ if isinstance(arg, _compat_int_types):
+ prefixlen = arg
+ else:
+ prefixlen = cls._prefix_from_prefix_string(arg)
+ netmask = IPv6Address(cls._ip_int_from_prefix(prefixlen))
+ cls._netmask_cache[arg] = netmask, prefixlen
+ return cls._netmask_cache[arg]
+
+ @classmethod
+ def _ip_int_from_string(cls, ip_str):
+ """Turn an IPv6 ip_str into an integer.
+
+ Args:
+ ip_str: A string, the IPv6 ip_str.
+
+ Returns:
+ An int, the IPv6 address
+
+ Raises:
+ AddressValueError: if ip_str isn't a valid IPv6 Address.
+
+ """
+ if not ip_str:
+ raise AddressValueError('Address cannot be empty')
+
+ parts = ip_str.split(':')
+
+ # An IPv6 address needs at least 2 colons (3 parts).
+ _min_parts = 3
+ if len(parts) < _min_parts:
+ msg = "At least %d parts expected in %r" % (_min_parts, ip_str)
+ raise AddressValueError(msg)
+
+ # If the address has an IPv4-style suffix, convert it to hexadecimal.
+ if '.' in parts[-1]:
+ try:
+ ipv4_int = IPv4Address(parts.pop())._ip
+ except AddressValueError as exc:
+ raise AddressValueError("%s in %r" % (exc, ip_str))
+ parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF))
+ parts.append('%x' % (ipv4_int & 0xFFFF))
+
+ # An IPv6 address can't have more than 8 colons (9 parts).
+ # The extra colon comes from using the "::" notation for a single
+ # leading or trailing zero part.
+ _max_parts = cls._HEXTET_COUNT + 1
+ if len(parts) > _max_parts:
+ msg = "At most %d colons permitted in %r" % (
+ _max_parts - 1, ip_str)
+ raise AddressValueError(msg)
+
+ # Disregarding the endpoints, find '::' with nothing in between.
+ # This indicates that a run of zeroes has been skipped.
+ skip_index = None
+ for i in _compat_range(1, len(parts) - 1):
+ if not parts[i]:
+ if skip_index is not None:
+ # Can't have more than one '::'
+ msg = "At most one '::' permitted in %r" % ip_str
+ raise AddressValueError(msg)
+ skip_index = i
+
+ # parts_hi is the number of parts to copy from above/before the '::'
+ # parts_lo is the number of parts to copy from below/after the '::'
+ if skip_index is not None:
+ # If we found a '::', then check if it also covers the endpoints.
+ parts_hi = skip_index
+ parts_lo = len(parts) - skip_index - 1
+ if not parts[0]:
+ parts_hi -= 1
+ if parts_hi:
+ msg = "Leading ':' only permitted as part of '::' in %r"
+ raise AddressValueError(msg % ip_str) # ^: requires ^::
+ if not parts[-1]:
+ parts_lo -= 1
+ if parts_lo:
+ msg = "Trailing ':' only permitted as part of '::' in %r"
+ raise AddressValueError(msg % ip_str) # :$ requires ::$
+ parts_skipped = cls._HEXTET_COUNT - (parts_hi + parts_lo)
+ if parts_skipped < 1:
+ msg = "Expected at most %d other parts with '::' in %r"
+ raise AddressValueError(msg % (cls._HEXTET_COUNT - 1, ip_str))
+ else:
+ # Otherwise, allocate the entire address to parts_hi. The
+ # endpoints could still be empty, but _parse_hextet() will check
+ # for that.
+ if len(parts) != cls._HEXTET_COUNT:
+ msg = "Exactly %d parts expected without '::' in %r"
+ raise AddressValueError(msg % (cls._HEXTET_COUNT, ip_str))
+ if not parts[0]:
+ msg = "Leading ':' only permitted as part of '::' in %r"
+ raise AddressValueError(msg % ip_str) # ^: requires ^::
+ if not parts[-1]:
+ msg = "Trailing ':' only permitted as part of '::' in %r"
+ raise AddressValueError(msg % ip_str) # :$ requires ::$
+ parts_hi = len(parts)
+ parts_lo = 0
+ parts_skipped = 0
+
+ try:
+ # Now, parse the hextets into a 128-bit integer.
+ ip_int = 0
+ for i in range(parts_hi):
+ ip_int <<= 16
+ ip_int |= cls._parse_hextet(parts[i])
+ ip_int <<= 16 * parts_skipped
+ for i in range(-parts_lo, 0):
+ ip_int <<= 16
+ ip_int |= cls._parse_hextet(parts[i])
+ return ip_int
+ except ValueError as exc:
+ raise AddressValueError("%s in %r" % (exc, ip_str))
+
+ @classmethod
+ def _parse_hextet(cls, hextet_str):
+ """Convert an IPv6 hextet string into an integer.
+
+ Args:
+ hextet_str: A string, the number to parse.
+
+ Returns:
+ The hextet as an integer.
+
+ Raises:
+ ValueError: if the input isn't strictly a hex number from
+ [0..FFFF].
+
+ """
+ # Whitelist the characters, since int() allows a lot of bizarre stuff.
+ if not cls._HEX_DIGITS.issuperset(hextet_str):
+ raise ValueError("Only hex digits permitted in %r" % hextet_str)
+ # We do the length check second, since the invalid character error
+ # is likely to be more informative for the user
+ if len(hextet_str) > 4:
+ msg = "At most 4 characters permitted in %r"
+ raise ValueError(msg % hextet_str)
+ # Length check means we can skip checking the integer value
+ return int(hextet_str, 16)
+
+ @classmethod
+ def _compress_hextets(cls, hextets):
+ """Compresses a list of hextets.
+
+ Compresses a list of strings, replacing the longest continuous
+ sequence of "0" in the list with "" and adding empty strings at
+ the beginning or at the end of the string such that subsequently
+ calling ":".join(hextets) will produce the compressed version of
+ the IPv6 address.
+
+ Args:
+ hextets: A list of strings, the hextets to compress.
+
+ Returns:
+ A list of strings.
+
+ """
+ best_doublecolon_start = -1
+ best_doublecolon_len = 0
+ doublecolon_start = -1
+ doublecolon_len = 0
+ for index, hextet in enumerate(hextets):
+ if hextet == '0':
+ doublecolon_len += 1
+ if doublecolon_start == -1:
+ # Start of a sequence of zeros.
+ doublecolon_start = index
+ if doublecolon_len > best_doublecolon_len:
+ # This is the longest sequence of zeros so far.
+ best_doublecolon_len = doublecolon_len
+ best_doublecolon_start = doublecolon_start
+ else:
+ doublecolon_len = 0
+ doublecolon_start = -1
+
+ if best_doublecolon_len > 1:
+ best_doublecolon_end = (best_doublecolon_start +
+ best_doublecolon_len)
+ # For zeros at the end of the address.
+ if best_doublecolon_end == len(hextets):
+ hextets += ['']
+ hextets[best_doublecolon_start:best_doublecolon_end] = ['']
+ # For zeros at the beginning of the address.
+ if best_doublecolon_start == 0:
+ hextets = [''] + hextets
+
+ return hextets
+
+ @classmethod
+ def _string_from_ip_int(cls, ip_int=None):
+ """Turns a 128-bit integer into hexadecimal notation.
+
+ Args:
+ ip_int: An integer, the IP address.
+
+ Returns:
+ A string, the hexadecimal representation of the address.
+
+ Raises:
+ ValueError: The address is bigger than 128 bits of all ones.
+
+ """
+ if ip_int is None:
+ ip_int = int(cls._ip)
+
+ if ip_int > cls._ALL_ONES:
+ raise ValueError('IPv6 address is too large')
+
+ hex_str = '%032x' % ip_int
+ hextets = ['%x' % int(hex_str[x:x + 4], 16) for x in range(0, 32, 4)]
+
+ hextets = cls._compress_hextets(hextets)
+ return ':'.join(hextets)
+
+ def _explode_shorthand_ip_string(self):
+ """Expand a shortened IPv6 address.
+
+ Args:
+ ip_str: A string, the IPv6 address.
+
+ Returns:
+ A string, the expanded IPv6 address.
+
+ """
+ if isinstance(self, IPv6Network):
+ ip_str = _compat_str(self.network_address)
+ elif isinstance(self, IPv6Interface):
+ ip_str = _compat_str(self.ip)
+ else:
+ ip_str = _compat_str(self)
+
+ ip_int = self._ip_int_from_string(ip_str)
+ hex_str = '%032x' % ip_int
+ parts = [hex_str[x:x + 4] for x in range(0, 32, 4)]
+ if isinstance(self, (_BaseNetwork, IPv6Interface)):
+ return '%s/%d' % (':'.join(parts), self._prefixlen)
+ return ':'.join(parts)
+
+ def _reverse_pointer(self):
+ """Return the reverse DNS pointer name for the IPv6 address.
+
+ This implements the method described in RFC3596 2.5.
+
+ """
+ reverse_chars = self.exploded[::-1].replace(':', '')
+ return '.'.join(reverse_chars) + '.ip6.arpa'
+
+ @property
+ def max_prefixlen(self):
+ return self._max_prefixlen
+
+ @property
+ def version(self):
+ return self._version
+
+
+class IPv6Address(_BaseV6, _BaseAddress):
+
+ """Represent and manipulate single IPv6 Addresses."""
+
+ __slots__ = ('_ip', '__weakref__')
+
+ def __init__(self, address):
+ """Instantiate a new IPv6 address object.
+
+ Args:
+ address: A string or integer representing the IP
+
+ Additionally, an integer can be passed, so
+ IPv6Address('2001:db8::') ==
+ IPv6Address(42540766411282592856903984951653826560)
+ or, more generally
+ IPv6Address(int(IPv6Address('2001:db8::'))) ==
+ IPv6Address('2001:db8::')
+
+ Raises:
+ AddressValueError: If address isn't a valid IPv6 address.
+
+ """
+ # Efficient constructor from integer.
+ if isinstance(address, _compat_int_types):
+ self._check_int_address(address)
+ self._ip = address
+ return
+
+ # Constructing from a packed address
+ if isinstance(address, bytes):
+ self._check_packed_address(address, 16)
+ bvs = _compat_bytes_to_byte_vals(address)
+ self._ip = _compat_int_from_byte_vals(bvs, 'big')
+ return
+
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP string.
+ addr_str = _compat_str(address)
+ if '/' in addr_str:
+ raise AddressValueError("Unexpected '/' in %r" % address)
+ self._ip = self._ip_int_from_string(addr_str)
+
+ @property
+ def packed(self):
+ """The binary representation of this address."""
+ return v6_int_to_packed(self._ip)
+
+ @property
+ def is_multicast(self):
+ """Test if the address is reserved for multicast use.
+
+ Returns:
+ A boolean, True if the address is a multicast address.
+ See RFC 2373 2.7 for details.
+
+ """
+ return self in self._constants._multicast_network
+
+ @property
+ def is_reserved(self):
+ """Test if the address is otherwise IETF reserved.
+
+ Returns:
+ A boolean, True if the address is within one of the
+ reserved IPv6 Network ranges.
+
+ """
+ return any(self in x for x in self._constants._reserved_networks)
+
+ @property
+ def is_link_local(self):
+ """Test if the address is reserved for link-local.
+
+ Returns:
+ A boolean, True if the address is reserved per RFC 4291.
+
+ """
+ return self in self._constants._linklocal_network
+
+ @property
+ def is_site_local(self):
+ """Test if the address is reserved for site-local.
+
+ Note that the site-local address space has been deprecated by RFC 3879.
+ Use is_private to test if this address is in the space of unique local
+ addresses as defined by RFC 4193.
+
+ Returns:
+ A boolean, True if the address is reserved per RFC 3513 2.5.6.
+
+ """
+ return self in self._constants._sitelocal_network
+
+ @property
+ def is_private(self):
+ """Test if this address is allocated for private networks.
+
+ Returns:
+ A boolean, True if the address is reserved per
+ iana-ipv6-special-registry.
+
+ """
+ return any(self in net for net in self._constants._private_networks)
+
+ @property
+ def is_global(self):
+ """Test if this address is allocated for public networks.
+
+ Returns:
+ A boolean, true if the address is not reserved per
+ iana-ipv6-special-registry.
+
+ """
+ return not self.is_private
+
+ @property
+ def is_unspecified(self):
+ """Test if the address is unspecified.
+
+ Returns:
+ A boolean, True if this is the unspecified address as defined in
+ RFC 2373 2.5.2.
+
+ """
+ return self._ip == 0
+
+ @property
+ def is_loopback(self):
+ """Test if the address is a loopback address.
+
+ Returns:
+ A boolean, True if the address is a loopback address as defined in
+ RFC 2373 2.5.3.
+
+ """
+ return self._ip == 1
+
+ @property
+ def ipv4_mapped(self):
+ """Return the IPv4 mapped address.
+
+ Returns:
+ If the IPv6 address is a v4 mapped address, return the
+ IPv4 mapped address. Return None otherwise.
+
+ """
+ if (self._ip >> 32) != 0xFFFF:
+ return None
+ return IPv4Address(self._ip & 0xFFFFFFFF)
+
+ @property
+ def teredo(self):
+ """Tuple of embedded teredo IPs.
+
+ Returns:
+ Tuple of the (server, client) IPs or None if the address
+ doesn't appear to be a teredo address (doesn't start with
+ 2001::/32)
+
+ """
+ if (self._ip >> 96) != 0x20010000:
+ return None
+ return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF),
+ IPv4Address(~self._ip & 0xFFFFFFFF))
+
+ @property
+ def sixtofour(self):
+ """Return the IPv4 6to4 embedded address.
+
+ Returns:
+ The IPv4 6to4-embedded address if present or None if the
+ address doesn't appear to contain a 6to4 embedded address.
+
+ """
+ if (self._ip >> 112) != 0x2002:
+ return None
+ return IPv4Address((self._ip >> 80) & 0xFFFFFFFF)
+
+
+class IPv6Interface(IPv6Address):
+
+ def __init__(self, address):
+ if isinstance(address, (bytes, _compat_int_types)):
+ IPv6Address.__init__(self, address)
+ self.network = IPv6Network(self._ip)
+ self._prefixlen = self._max_prefixlen
+ return
+ if isinstance(address, tuple):
+ IPv6Address.__init__(self, address[0])
+ if len(address) > 1:
+ self._prefixlen = int(address[1])
+ else:
+ self._prefixlen = self._max_prefixlen
+ self.network = IPv6Network(address, strict=False)
+ self.netmask = self.network.netmask
+ self.hostmask = self.network.hostmask
+ return
+
+ addr = _split_optional_netmask(address)
+ IPv6Address.__init__(self, addr[0])
+ self.network = IPv6Network(address, strict=False)
+ self.netmask = self.network.netmask
+ self._prefixlen = self.network._prefixlen
+ self.hostmask = self.network.hostmask
+
+ def __str__(self):
+ return '%s/%d' % (self._string_from_ip_int(self._ip),
+ self.network.prefixlen)
+
+ def __eq__(self, other):
+ address_equal = IPv6Address.__eq__(self, other)
+ if not address_equal or address_equal is NotImplemented:
+ return address_equal
+ try:
+ return self.network == other.network
+ except AttributeError:
+ # An interface with an associated network is NOT the
+ # same as an unassociated address. That's why the hash
+ # takes the extra info into account.
+ return False
+
+ def __lt__(self, other):
+ address_less = IPv6Address.__lt__(self, other)
+ if address_less is NotImplemented:
+ return NotImplemented
+ try:
+ return self.network < other.network
+ except AttributeError:
+ # We *do* allow addresses and interfaces to be sorted. The
+ # unassociated address is considered less than all interfaces.
+ return False
+
+ def __hash__(self):
+ return self._ip ^ self._prefixlen ^ int(self.network.network_address)
+
+ __reduce__ = _IPAddressBase.__reduce__
+
+ @property
+ def ip(self):
+ return IPv6Address(self._ip)
+
+ @property
+ def with_prefixlen(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self._prefixlen)
+
+ @property
+ def with_netmask(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self.netmask)
+
+ @property
+ def with_hostmask(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self.hostmask)
+
+ @property
+ def is_unspecified(self):
+ return self._ip == 0 and self.network.is_unspecified
+
+ @property
+ def is_loopback(self):
+ return self._ip == 1 and self.network.is_loopback
+
+
+class IPv6Network(_BaseV6, _BaseNetwork):
+
+ """This class represents and manipulates 128-bit IPv6 networks.
+
+ Attributes: [examples for IPv6('2001:db8::1000/124')]
+ .network_address: IPv6Address('2001:db8::1000')
+ .hostmask: IPv6Address('::f')
+ .broadcast_address: IPv6Address('2001:db8::100f')
+ .netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0')
+ .prefixlen: 124
+
+ """
+
+ # Class to use when creating address objects
+ _address_class = IPv6Address
+
+ def __init__(self, address, strict=True):
+ """Instantiate a new IPv6 Network object.
+
+ Args:
+ address: A string or integer representing the IPv6 network or the
+ IP and prefix/netmask.
+ '2001:db8::/128'
+ '2001:db8:0000:0000:0000:0000:0000:0000/128'
+ '2001:db8::'
+ are all functionally the same in IPv6. That is to say,
+ failing to provide a subnetmask will create an object with
+ a mask of /128.
+
+ Additionally, an integer can be passed, so
+ IPv6Network('2001:db8::') ==
+ IPv6Network(42540766411282592856903984951653826560)
+ or, more generally
+ IPv6Network(int(IPv6Network('2001:db8::'))) ==
+ IPv6Network('2001:db8::')
+
+ strict: A boolean. If true, ensure that we have been passed
+ A true network address, eg, 2001:db8::1000/124 and not an
+ IP address on a network, eg, 2001:db8::1/124.
+
+ Raises:
+ AddressValueError: If address isn't a valid IPv6 address.
+ NetmaskValueError: If the netmask isn't valid for
+ an IPv6 address.
+ ValueError: If strict was True and a network address was not
+ supplied.
+
+ """
+ _BaseNetwork.__init__(self, address)
+
+ # Efficient constructor from integer or packed address
+ if isinstance(address, (bytes, _compat_int_types)):
+ self.network_address = IPv6Address(address)
+ self.netmask, self._prefixlen = self._make_netmask(
+ self._max_prefixlen)
+ return
+
+ if isinstance(address, tuple):
+ if len(address) > 1:
+ arg = address[1]
+ else:
+ arg = self._max_prefixlen
+ self.netmask, self._prefixlen = self._make_netmask(arg)
+ self.network_address = IPv6Address(address[0])
+ packed = int(self.network_address)
+ if packed & int(self.netmask) != packed:
+ if strict:
+ raise ValueError('%s has host bits set' % self)
+ else:
+ self.network_address = IPv6Address(packed &
+ int(self.netmask))
+ return
+
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP prefix string.
+ addr = _split_optional_netmask(address)
+
+ self.network_address = IPv6Address(self._ip_int_from_string(addr[0]))
+
+ if len(addr) == 2:
+ arg = addr[1]
+ else:
+ arg = self._max_prefixlen
+ self.netmask, self._prefixlen = self._make_netmask(arg)
+
+ if strict:
+ if (IPv6Address(int(self.network_address) & int(self.netmask)) !=
+ self.network_address):
+ raise ValueError('%s has host bits set' % self)
+ self.network_address = IPv6Address(int(self.network_address) &
+ int(self.netmask))
+
+ if self._prefixlen == (self._max_prefixlen - 1):
+ self.hosts = self.__iter__
+
+ def hosts(self):
+ """Generate Iterator over usable hosts in a network.
+
+ This is like __iter__ except it doesn't return the
+ Subnet-Router anycast address.
+
+ """
+ network = int(self.network_address)
+ broadcast = int(self.broadcast_address)
+ for x in _compat_range(network + 1, broadcast + 1):
+ yield self._address_class(x)
+
+ @property
+ def is_site_local(self):
+ """Test if the address is reserved for site-local.
+
+ Note that the site-local address space has been deprecated by RFC 3879.
+ Use is_private to test if this address is in the space of unique local
+ addresses as defined by RFC 4193.
+
+ Returns:
+ A boolean, True if the address is reserved per RFC 3513 2.5.6.
+
+ """
+ return (self.network_address.is_site_local and
+ self.broadcast_address.is_site_local)
+
+
+class _IPv6Constants(object):
+
+ _linklocal_network = IPv6Network('fe80::/10')
+
+ _multicast_network = IPv6Network('ff00::/8')
+
+ _private_networks = [
+ IPv6Network('::1/128'),
+ IPv6Network('::/128'),
+ IPv6Network('::ffff:0:0/96'),
+ IPv6Network('100::/64'),
+ IPv6Network('2001::/23'),
+ IPv6Network('2001:2::/48'),
+ IPv6Network('2001:db8::/32'),
+ IPv6Network('2001:10::/28'),
+ IPv6Network('fc00::/7'),
+ IPv6Network('fe80::/10'),
+ ]
+
+ _reserved_networks = [
+ IPv6Network('::/8'), IPv6Network('100::/8'),
+ IPv6Network('200::/7'), IPv6Network('400::/6'),
+ IPv6Network('800::/5'), IPv6Network('1000::/4'),
+ IPv6Network('4000::/3'), IPv6Network('6000::/3'),
+ IPv6Network('8000::/3'), IPv6Network('A000::/3'),
+ IPv6Network('C000::/3'), IPv6Network('E000::/4'),
+ IPv6Network('F000::/5'), IPv6Network('F800::/6'),
+ IPv6Network('FE00::/9'),
+ ]
+
+ _sitelocal_network = IPv6Network('fec0::/10')
+
+
+IPv6Address._constants = _IPv6Constants
diff --git a/lib/maxminddb/__init__.py b/lib/maxminddb/__init__.py
new file mode 100644
index 00000000..7c6008b3
--- /dev/null
+++ b/lib/maxminddb/__init__.py
@@ -0,0 +1,46 @@
+# pylint:disable=C0111
+import os
+
+import maxminddb.reader
+
+try:
+ import maxminddb.extension
+except ImportError:
+ maxminddb.extension = None
+
+from maxminddb.const import (MODE_AUTO, MODE_MMAP, MODE_MMAP_EXT, MODE_FILE,
+ MODE_MEMORY)
+from maxminddb.decoder import InvalidDatabaseError
+
+
+def open_database(database, mode=MODE_AUTO):
+ """Open a Maxmind DB database
+
+ Arguments:
+ database -- A path to a valid MaxMind DB file such as a GeoIP2
+ database file.
+ mode -- mode to open the database with. Valid mode are:
+ * MODE_MMAP_EXT - use the C extension with memory map.
+ * MODE_MMAP - read from memory map. Pure Python.
+ * MODE_FILE - read database as standard file. Pure Python.
+ * MODE_MEMORY - load database into memory. Pure Python.
+ * MODE_AUTO - tries MODE_MMAP_EXT, MODE_MMAP, MODE_FILE in that
+ order. Default mode.
+ """
+ if (mode == MODE_AUTO and maxminddb.extension and
+ hasattr(maxminddb.extension, 'Reader')) or mode == MODE_MMAP_EXT:
+ return maxminddb.extension.Reader(database)
+ elif mode in (MODE_AUTO, MODE_MMAP, MODE_FILE, MODE_MEMORY):
+ return maxminddb.reader.Reader(database, mode)
+ raise ValueError('Unsupported open mode: {0}'.format(mode))
+
+
+def Reader(database): # pylint: disable=invalid-name
+ """This exists for backwards compatibility. Use open_database instead"""
+ return open_database(database)
+
+__title__ = 'maxminddb'
+__version__ = '1.2.1'
+__author__ = 'Gregory Oschwald'
+__license__ = 'Apache License, Version 2.0'
+__copyright__ = 'Copyright 2014 Maxmind, Inc.'
diff --git a/lib/maxminddb/compat.py b/lib/maxminddb/compat.py
new file mode 100644
index 00000000..8e2a81c5
--- /dev/null
+++ b/lib/maxminddb/compat.py
@@ -0,0 +1,33 @@
+import sys
+
+import ipaddress
+
+# pylint: skip-file
+
+if sys.version_info[0] == 2:
+ def compat_ip_address(address):
+ if isinstance(address, bytes):
+ address = address.decode()
+ return ipaddress.ip_address(address)
+
+ int_from_byte = ord
+
+ FileNotFoundError = IOError
+
+ def int_from_bytes(b):
+ if b:
+ return int(b.encode("hex"), 16)
+ return 0
+
+ byte_from_int = chr
+else:
+ def compat_ip_address(address):
+ return ipaddress.ip_address(address)
+
+ int_from_byte = lambda x: x
+
+ FileNotFoundError = FileNotFoundError
+
+ int_from_bytes = lambda x: int.from_bytes(x, 'big')
+
+ byte_from_int = lambda x: bytes([x])
diff --git a/lib/maxminddb/const.py b/lib/maxminddb/const.py
new file mode 100644
index 00000000..59ea84b6
--- /dev/null
+++ b/lib/maxminddb/const.py
@@ -0,0 +1,7 @@
+"""Constants used in the API"""
+
+MODE_AUTO = 0
+MODE_MMAP_EXT = 1
+MODE_MMAP = 2
+MODE_FILE = 4
+MODE_MEMORY = 8
diff --git a/lib/maxminddb/decoder.py b/lib/maxminddb/decoder.py
new file mode 100644
index 00000000..e8f223a8
--- /dev/null
+++ b/lib/maxminddb/decoder.py
@@ -0,0 +1,173 @@
+"""
+maxminddb.decoder
+~~~~~~~~~~~~~~~~~
+
+This package contains code for decoding the MaxMind DB data section.
+
+"""
+from __future__ import unicode_literals
+
+import struct
+
+from maxminddb.compat import byte_from_int, int_from_bytes
+from maxminddb.errors import InvalidDatabaseError
+
+
+class Decoder(object): # pylint: disable=too-few-public-methods
+
+ """Decoder for the data section of the MaxMind DB"""
+
+ def __init__(self, database_buffer, pointer_base=0, pointer_test=False):
+ """Created a Decoder for a MaxMind DB
+
+ Arguments:
+ database_buffer -- an mmap'd MaxMind DB file.
+ pointer_base -- the base number to use when decoding a pointer
+ pointer_test -- used for internal unit testing of pointer code
+ """
+ self._pointer_test = pointer_test
+ self._buffer = database_buffer
+ self._pointer_base = pointer_base
+
+ def _decode_array(self, size, offset):
+ array = []
+ for _ in range(size):
+ (value, offset) = self.decode(offset)
+ array.append(value)
+ return array, offset
+
+ def _decode_boolean(self, size, offset):
+ return size != 0, offset
+
+ def _decode_bytes(self, size, offset):
+ new_offset = offset + size
+ return self._buffer[offset:new_offset], new_offset
+
+ # pylint: disable=no-self-argument
+ # |-> I am open to better ways of doing this as long as it doesn't involve
+ # lots of code duplication.
+ def _decode_packed_type(type_code, type_size, pad=False):
+ # pylint: disable=protected-access, missing-docstring
+ def unpack_type(self, size, offset):
+ if not pad:
+ self._verify_size(size, type_size)
+ new_offset = offset + type_size
+ packed_bytes = self._buffer[offset:new_offset]
+ if pad:
+ packed_bytes = packed_bytes.rjust(type_size, b'\x00')
+ (value,) = struct.unpack(type_code, packed_bytes)
+ return value, new_offset
+ return unpack_type
+
+ def _decode_map(self, size, offset):
+ container = {}
+ for _ in range(size):
+ (key, offset) = self.decode(offset)
+ (value, offset) = self.decode(offset)
+ container[key] = value
+ return container, offset
+
+ _pointer_value_offset = {
+ 1: 0,
+ 2: 2048,
+ 3: 526336,
+ 4: 0,
+ }
+
+ def _decode_pointer(self, size, offset):
+ pointer_size = ((size >> 3) & 0x3) + 1
+ new_offset = offset + pointer_size
+ pointer_bytes = self._buffer[offset:new_offset]
+ packed = pointer_bytes if pointer_size == 4 else struct.pack(
+ b'!c', byte_from_int(size & 0x7)) + pointer_bytes
+ unpacked = int_from_bytes(packed)
+ pointer = unpacked + self._pointer_base + \
+ self._pointer_value_offset[pointer_size]
+ if self._pointer_test:
+ return pointer, new_offset
+ (value, _) = self.decode(pointer)
+ return value, new_offset
+
+ def _decode_uint(self, size, offset):
+ new_offset = offset + size
+ uint_bytes = self._buffer[offset:new_offset]
+ return int_from_bytes(uint_bytes), new_offset
+
+ def _decode_utf8_string(self, size, offset):
+ new_offset = offset + size
+ return self._buffer[offset:new_offset].decode('utf-8'), new_offset
+
+ _type_decoder = {
+ 1: _decode_pointer,
+ 2: _decode_utf8_string,
+ 3: _decode_packed_type(b'!d', 8), # double,
+ 4: _decode_bytes,
+ 5: _decode_uint, # uint16
+ 6: _decode_uint, # uint32
+ 7: _decode_map,
+ 8: _decode_packed_type(b'!i', 4, pad=True), # int32
+ 9: _decode_uint, # uint64
+ 10: _decode_uint, # uint128
+ 11: _decode_array,
+ 14: _decode_boolean,
+ 15: _decode_packed_type(b'!f', 4), # float,
+ }
+
+ def decode(self, offset):
+ """Decode a section of the data section starting at offset
+
+ Arguments:
+ offset -- the location of the data structure to decode
+ """
+ new_offset = offset + 1
+ (ctrl_byte,) = struct.unpack(b'!B', self._buffer[offset:new_offset])
+ type_num = ctrl_byte >> 5
+ # Extended type
+ if not type_num:
+ (type_num, new_offset) = self._read_extended(new_offset)
+
+ if type_num not in self._type_decoder:
+ raise InvalidDatabaseError('Unexpected type number ({type}) '
+ 'encountered'.format(type=type_num))
+
+ (size, new_offset) = self._size_from_ctrl_byte(
+ ctrl_byte, new_offset, type_num)
+ return self._type_decoder[type_num](self, size, new_offset)
+
+ def _read_extended(self, offset):
+ (next_byte,) = struct.unpack(b'!B', self._buffer[offset:offset + 1])
+ type_num = next_byte + 7
+ if type_num < 7:
+ raise InvalidDatabaseError(
+ 'Something went horribly wrong in the decoder. An '
+ 'extended type resolved to a type number < 8 '
+ '({type})'.format(type=type_num))
+ return type_num, offset + 1
+
+ def _verify_size(self, expected, actual):
+ if expected != actual:
+ raise InvalidDatabaseError(
+ 'The MaxMind DB file\'s data section contains bad data '
+ '(unknown data type or corrupt data)'
+ )
+
+ def _size_from_ctrl_byte(self, ctrl_byte, offset, type_num):
+ size = ctrl_byte & 0x1f
+ if type_num == 1:
+ return size, offset
+ bytes_to_read = 0 if size < 29 else size - 28
+
+ new_offset = offset + bytes_to_read
+ size_bytes = self._buffer[offset:new_offset]
+
+ # Using unpack rather than int_from_bytes as it is about 200 lookups
+ # per second faster here.
+ if size == 29:
+ size = 29 + struct.unpack(b'!B', size_bytes)[0]
+ elif size == 30:
+ size = 285 + struct.unpack(b'!H', size_bytes)[0]
+ elif size > 30:
+ size = struct.unpack(
+ b'!I', size_bytes.rjust(4, b'\x00'))[0] + 65821
+
+ return size, new_offset
diff --git a/lib/maxminddb/errors.py b/lib/maxminddb/errors.py
new file mode 100644
index 00000000..f04ff028
--- /dev/null
+++ b/lib/maxminddb/errors.py
@@ -0,0 +1,11 @@
+"""
+maxminddb.errors
+~~~~~~~~~~~~~~~~
+
+This module contains custom errors for the MaxMind DB reader
+"""
+
+
+class InvalidDatabaseError(RuntimeError):
+
+ """This error is thrown when unexpected data is found in the database."""
diff --git a/lib/maxminddb/extension/maxminddb.c b/lib/maxminddb/extension/maxminddb.c
new file mode 100644
index 00000000..9e4d45e2
--- /dev/null
+++ b/lib/maxminddb/extension/maxminddb.c
@@ -0,0 +1,570 @@
+#include
+#include
+#include "structmember.h"
+
+#define __STDC_FORMAT_MACROS
+#include
+
+static PyTypeObject Reader_Type;
+static PyTypeObject Metadata_Type;
+static PyObject *MaxMindDB_error;
+
+typedef struct {
+ PyObject_HEAD /* no semicolon */
+ MMDB_s *mmdb;
+} Reader_obj;
+
+typedef struct {
+ PyObject_HEAD /* no semicolon */
+ PyObject *binary_format_major_version;
+ PyObject *binary_format_minor_version;
+ PyObject *build_epoch;
+ PyObject *database_type;
+ PyObject *description;
+ PyObject *ip_version;
+ PyObject *languages;
+ PyObject *node_count;
+ PyObject *record_size;
+} Metadata_obj;
+
+static PyObject *from_entry_data_list(MMDB_entry_data_list_s **entry_data_list);
+static PyObject *from_map(MMDB_entry_data_list_s **entry_data_list);
+static PyObject *from_array(MMDB_entry_data_list_s **entry_data_list);
+static PyObject *from_uint128(const MMDB_entry_data_list_s *entry_data_list);
+
+#if PY_MAJOR_VERSION >= 3
+ #define MOD_INIT(name) PyMODINIT_FUNC PyInit_ ## name(void)
+ #define RETURN_MOD_INIT(m) return (m)
+ #define FILE_NOT_FOUND_ERROR PyExc_FileNotFoundError
+#else
+ #define MOD_INIT(name) PyMODINIT_FUNC init ## name(void)
+ #define RETURN_MOD_INIT(m) return
+ #define PyInt_FromLong PyLong_FromLong
+ #define FILE_NOT_FOUND_ERROR PyExc_IOError
+#endif
+
+#ifdef __GNUC__
+ # define UNUSED(x) UNUSED_ ## x __attribute__((__unused__))
+#else
+ # define UNUSED(x) UNUSED_ ## x
+#endif
+
+static int Reader_init(PyObject *self, PyObject *args, PyObject *kwds)
+{
+ char *filename;
+ int mode = 0;
+
+ static char *kwlist[] = {"database", "mode", NULL};
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|i", kwlist, &filename, &mode)) {
+ return -1;
+ }
+
+ if (mode != 0 && mode != 1) {
+ PyErr_Format(PyExc_ValueError, "Unsupported open mode (%i). Only "
+ "MODE_AUTO and MODE_MMAP_EXT are supported by this extension.",
+ mode);
+ return -1;
+ }
+
+ if (0 != access(filename, R_OK)) {
+ PyErr_Format(FILE_NOT_FOUND_ERROR,
+ "No such file or directory: '%s'",
+ filename);
+ return -1;
+ }
+
+ MMDB_s *mmdb = (MMDB_s *)malloc(sizeof(MMDB_s));
+ if (NULL == mmdb) {
+ PyErr_NoMemory();
+ return -1;
+ }
+
+ Reader_obj *mmdb_obj = (Reader_obj *)self;
+ if (!mmdb_obj) {
+ free(mmdb);
+ PyErr_NoMemory();
+ return -1;
+ }
+
+ uint16_t status = MMDB_open(filename, MMDB_MODE_MMAP, mmdb);
+
+ if (MMDB_SUCCESS != status) {
+ free(mmdb);
+ PyErr_Format(
+ MaxMindDB_error,
+ "Error opening database file (%s). Is this a valid MaxMind DB file?",
+ filename
+ );
+ return -1;
+ }
+
+ mmdb_obj->mmdb = mmdb;
+ return 0;
+}
+
+static PyObject *Reader_get(PyObject *self, PyObject *args)
+{
+ char *ip_address = NULL;
+
+ Reader_obj *mmdb_obj = (Reader_obj *)self;
+ if (!PyArg_ParseTuple(args, "s", &ip_address)) {
+ return NULL;
+ }
+
+ MMDB_s *mmdb = mmdb_obj->mmdb;
+
+ if (NULL == mmdb) {
+ PyErr_SetString(PyExc_ValueError,
+ "Attempt to read from a closed MaxMind DB.");
+ return NULL;
+ }
+
+ int gai_error = 0;
+ int mmdb_error = MMDB_SUCCESS;
+ MMDB_lookup_result_s result =
+ MMDB_lookup_string(mmdb, ip_address, &gai_error,
+ &mmdb_error);
+
+ if (0 != gai_error) {
+ PyErr_Format(PyExc_ValueError,
+ "'%s' does not appear to be an IPv4 or IPv6 address.",
+ ip_address);
+ return NULL;
+ }
+
+ if (MMDB_SUCCESS != mmdb_error) {
+ PyObject *exception;
+ if (MMDB_IPV6_LOOKUP_IN_IPV4_DATABASE_ERROR == mmdb_error) {
+ exception = PyExc_ValueError;
+ } else {
+ exception = MaxMindDB_error;
+ }
+ PyErr_Format(exception, "Error looking up %s. %s",
+ ip_address, MMDB_strerror(mmdb_error));
+ return NULL;
+ }
+
+ if (!result.found_entry) {
+ Py_RETURN_NONE;
+ }
+
+ MMDB_entry_data_list_s *entry_data_list = NULL;
+ int status = MMDB_get_entry_data_list(&result.entry, &entry_data_list);
+ if (MMDB_SUCCESS != status) {
+ PyErr_Format(MaxMindDB_error,
+ "Error while looking up data for %s. %s",
+ ip_address, MMDB_strerror(status));
+ MMDB_free_entry_data_list(entry_data_list);
+ return NULL;
+ }
+
+ MMDB_entry_data_list_s *original_entry_data_list = entry_data_list;
+ PyObject *py_obj = from_entry_data_list(&entry_data_list);
+ MMDB_free_entry_data_list(original_entry_data_list);
+ return py_obj;
+}
+
+static PyObject *Reader_metadata(PyObject *self, PyObject *UNUSED(args))
+{
+ Reader_obj *mmdb_obj = (Reader_obj *)self;
+
+ if (NULL == mmdb_obj->mmdb) {
+ PyErr_SetString(PyExc_IOError,
+ "Attempt to read from a closed MaxMind DB.");
+ return NULL;
+ }
+
+ MMDB_entry_data_list_s *entry_data_list;
+ MMDB_get_metadata_as_entry_data_list(mmdb_obj->mmdb, &entry_data_list);
+ MMDB_entry_data_list_s *original_entry_data_list = entry_data_list;
+
+ PyObject *metadata_dict = from_entry_data_list(&entry_data_list);
+ MMDB_free_entry_data_list(original_entry_data_list);
+ if (NULL == metadata_dict || !PyDict_Check(metadata_dict)) {
+ PyErr_SetString(MaxMindDB_error,
+ "Error decoding metadata.");
+ return NULL;
+ }
+
+ PyObject *args = PyTuple_New(0);
+ if (NULL == args) {
+ Py_DECREF(metadata_dict);
+ return NULL;
+ }
+
+ PyObject *metadata = PyObject_Call((PyObject *)&Metadata_Type, args,
+ metadata_dict);
+
+ Py_DECREF(metadata_dict);
+ return metadata;
+}
+
+static PyObject *Reader_close(PyObject *self, PyObject *UNUSED(args))
+{
+ Reader_obj *mmdb_obj = (Reader_obj *)self;
+
+ if (NULL != mmdb_obj->mmdb) {
+ MMDB_close(mmdb_obj->mmdb);
+ free(mmdb_obj->mmdb);
+ mmdb_obj->mmdb = NULL;
+ }
+
+ Py_RETURN_NONE;
+}
+
+static void Reader_dealloc(PyObject *self)
+{
+ Reader_obj *obj = (Reader_obj *)self;
+ if (NULL != obj->mmdb) {
+ Reader_close(self, NULL);
+ }
+
+ PyObject_Del(self);
+}
+
+static int Metadata_init(PyObject *self, PyObject *args, PyObject *kwds)
+{
+
+ PyObject
+ *binary_format_major_version,
+ *binary_format_minor_version,
+ *build_epoch,
+ *database_type,
+ *description,
+ *ip_version,
+ *languages,
+ *node_count,
+ *record_size;
+
+ static char *kwlist[] = {
+ "binary_format_major_version",
+ "binary_format_minor_version",
+ "build_epoch",
+ "database_type",
+ "description",
+ "ip_version",
+ "languages",
+ "node_count",
+ "record_size",
+ NULL
+ };
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOOOOOOOO", kwlist,
+ &binary_format_major_version,
+ &binary_format_minor_version,
+ &build_epoch,
+ &database_type,
+ &description,
+ &ip_version,
+ &languages,
+ &node_count,
+ &record_size)) {
+ return -1;
+ }
+
+ Metadata_obj *obj = (Metadata_obj *)self;
+
+ obj->binary_format_major_version = binary_format_major_version;
+ obj->binary_format_minor_version = binary_format_minor_version;
+ obj->build_epoch = build_epoch;
+ obj->database_type = database_type;
+ obj->description = description;
+ obj->ip_version = ip_version;
+ obj->languages = languages;
+ obj->node_count = node_count;
+ obj->record_size = record_size;
+
+ Py_INCREF(obj->binary_format_major_version);
+ Py_INCREF(obj->binary_format_minor_version);
+ Py_INCREF(obj->build_epoch);
+ Py_INCREF(obj->database_type);
+ Py_INCREF(obj->description);
+ Py_INCREF(obj->ip_version);
+ Py_INCREF(obj->languages);
+ Py_INCREF(obj->node_count);
+ Py_INCREF(obj->record_size);
+
+ return 0;
+}
+
+static void Metadata_dealloc(PyObject *self)
+{
+ Metadata_obj *obj = (Metadata_obj *)self;
+ Py_DECREF(obj->binary_format_major_version);
+ Py_DECREF(obj->binary_format_minor_version);
+ Py_DECREF(obj->build_epoch);
+ Py_DECREF(obj->database_type);
+ Py_DECREF(obj->description);
+ Py_DECREF(obj->ip_version);
+ Py_DECREF(obj->languages);
+ Py_DECREF(obj->node_count);
+ Py_DECREF(obj->record_size);
+ PyObject_Del(self);
+}
+
+static PyObject *from_entry_data_list(MMDB_entry_data_list_s **entry_data_list)
+{
+ if (NULL == entry_data_list || NULL == *entry_data_list) {
+ PyErr_SetString(
+ MaxMindDB_error,
+ "Error while looking up data. Your database may be corrupt or you have found a bug in libmaxminddb."
+ );
+ return NULL;
+ }
+
+ switch ((*entry_data_list)->entry_data.type) {
+ case MMDB_DATA_TYPE_MAP:
+ return from_map(entry_data_list);
+ case MMDB_DATA_TYPE_ARRAY:
+ return from_array(entry_data_list);
+ case MMDB_DATA_TYPE_UTF8_STRING:
+ return PyUnicode_FromStringAndSize(
+ (*entry_data_list)->entry_data.utf8_string,
+ (*entry_data_list)->entry_data.data_size
+ );
+ case MMDB_DATA_TYPE_BYTES:
+ return PyByteArray_FromStringAndSize(
+ (const char *)(*entry_data_list)->entry_data.bytes,
+ (Py_ssize_t)(*entry_data_list)->entry_data.data_size);
+ case MMDB_DATA_TYPE_DOUBLE:
+ return PyFloat_FromDouble((*entry_data_list)->entry_data.double_value);
+ case MMDB_DATA_TYPE_FLOAT:
+ return PyFloat_FromDouble((*entry_data_list)->entry_data.float_value);
+ case MMDB_DATA_TYPE_UINT16:
+ return PyLong_FromLong( (*entry_data_list)->entry_data.uint16);
+ case MMDB_DATA_TYPE_UINT32:
+ return PyLong_FromLong((*entry_data_list)->entry_data.uint32);
+ case MMDB_DATA_TYPE_BOOLEAN:
+ return PyBool_FromLong((*entry_data_list)->entry_data.boolean);
+ case MMDB_DATA_TYPE_UINT64:
+ return PyLong_FromUnsignedLongLong(
+ (*entry_data_list)->entry_data.uint64);
+ case MMDB_DATA_TYPE_UINT128:
+ return from_uint128(*entry_data_list);
+ case MMDB_DATA_TYPE_INT32:
+ return PyLong_FromLong((*entry_data_list)->entry_data.int32);
+ default:
+ PyErr_Format(MaxMindDB_error,
+ "Invalid data type arguments: %d",
+ (*entry_data_list)->entry_data.type);
+ return NULL;
+ }
+ return NULL;
+}
+
+static PyObject *from_map(MMDB_entry_data_list_s **entry_data_list)
+{
+ PyObject *py_obj = PyDict_New();
+ if (NULL == py_obj) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+
+ const uint32_t map_size = (*entry_data_list)->entry_data.data_size;
+
+ uint i;
+ // entry_data_list cannot start out NULL (see from_entry_data_list). We
+ // check it in the loop because it may become NULL.
+ // coverity[check_after_deref]
+ for (i = 0; i < map_size && entry_data_list; i++) {
+ *entry_data_list = (*entry_data_list)->next;
+
+ PyObject *key = PyUnicode_FromStringAndSize(
+ (char *)(*entry_data_list)->entry_data.utf8_string,
+ (*entry_data_list)->entry_data.data_size
+ );
+
+ *entry_data_list = (*entry_data_list)->next;
+
+ PyObject *value = from_entry_data_list(entry_data_list);
+ if (NULL == value) {
+ Py_DECREF(key);
+ Py_DECREF(py_obj);
+ return NULL;
+ }
+ PyDict_SetItem(py_obj, key, value);
+ Py_DECREF(value);
+ Py_DECREF(key);
+ }
+
+ return py_obj;
+}
+
+static PyObject *from_array(MMDB_entry_data_list_s **entry_data_list)
+{
+ const uint32_t size = (*entry_data_list)->entry_data.data_size;
+
+ PyObject *py_obj = PyList_New(size);
+ if (NULL == py_obj) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+
+ uint i;
+ // entry_data_list cannot start out NULL (see from_entry_data_list). We
+ // check it in the loop because it may become NULL.
+ // coverity[check_after_deref]
+ for (i = 0; i < size && entry_data_list; i++) {
+ *entry_data_list = (*entry_data_list)->next;
+ PyObject *value = from_entry_data_list(entry_data_list);
+ if (NULL == value) {
+ Py_DECREF(py_obj);
+ return NULL;
+ }
+ // PyList_SetItem 'steals' the reference
+ PyList_SetItem(py_obj, i, value);
+ }
+ return py_obj;
+}
+
+static PyObject *from_uint128(const MMDB_entry_data_list_s *entry_data_list)
+{
+ uint64_t high = 0;
+ uint64_t low = 0;
+#if MMDB_UINT128_IS_BYTE_ARRAY
+ int i;
+ for (i = 0; i < 8; i++) {
+ high = (high << 8) | entry_data_list->entry_data.uint128[i];
+ }
+
+ for (i = 8; i < 16; i++) {
+ low = (low << 8) | entry_data_list->entry_data.uint128[i];
+ }
+#else
+ high = entry_data_list->entry_data.uint128 >> 64;
+ low = (uint64_t)entry_data_list->entry_data.uint128;
+#endif
+
+ char *num_str = malloc(33);
+ if (NULL == num_str) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+
+ snprintf(num_str, 33, "%016" PRIX64 "%016" PRIX64, high, low);
+
+ PyObject *py_obj = PyLong_FromString(num_str, NULL, 16);
+
+ free(num_str);
+ return py_obj;
+}
+
+static PyMethodDef Reader_methods[] = {
+ { "get", Reader_get, METH_VARARGS,
+ "Get record for IP address" },
+ { "metadata", Reader_metadata, METH_NOARGS,
+ "Returns metadata object for database" },
+ { "close", Reader_close, METH_NOARGS, "Closes database"},
+ { NULL, NULL, 0, NULL }
+};
+
+static PyTypeObject Reader_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ .tp_basicsize = sizeof(Reader_obj),
+ .tp_dealloc = Reader_dealloc,
+ .tp_doc = "Reader object",
+ .tp_flags = Py_TPFLAGS_DEFAULT,
+ .tp_methods = Reader_methods,
+ .tp_name = "Reader",
+ .tp_init = Reader_init,
+};
+
+static PyMethodDef Metadata_methods[] = {
+ { NULL, NULL, 0, NULL }
+};
+
+/* *INDENT-OFF* */
+static PyMemberDef Metadata_members[] = {
+ { "binary_format_major_version", T_OBJECT, offsetof(
+ Metadata_obj, binary_format_major_version), READONLY, NULL },
+ { "binary_format_minor_version", T_OBJECT, offsetof(
+ Metadata_obj, binary_format_minor_version), READONLY, NULL },
+ { "build_epoch", T_OBJECT, offsetof(Metadata_obj, build_epoch),
+ READONLY, NULL },
+ { "database_type", T_OBJECT, offsetof(Metadata_obj, database_type),
+ READONLY, NULL },
+ { "description", T_OBJECT, offsetof(Metadata_obj, description),
+ READONLY, NULL },
+ { "ip_version", T_OBJECT, offsetof(Metadata_obj, ip_version),
+ READONLY, NULL },
+ { "languages", T_OBJECT, offsetof(Metadata_obj, languages), READONLY,
+ NULL },
+ { "node_count", T_OBJECT, offsetof(Metadata_obj, node_count),
+ READONLY, NULL },
+ { "record_size", T_OBJECT, offsetof(Metadata_obj, record_size),
+ READONLY, NULL },
+ { NULL, 0, 0, 0, NULL }
+};
+/* *INDENT-ON* */
+
+static PyTypeObject Metadata_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ .tp_basicsize = sizeof(Metadata_obj),
+ .tp_dealloc = Metadata_dealloc,
+ .tp_doc = "Metadata object",
+ .tp_flags = Py_TPFLAGS_DEFAULT,
+ .tp_members = Metadata_members,
+ .tp_methods = Metadata_methods,
+ .tp_name = "Metadata",
+ .tp_init = Metadata_init
+};
+
+static PyMethodDef MaxMindDB_methods[] = {
+ { NULL, NULL, 0, NULL }
+};
+
+
+#if PY_MAJOR_VERSION >= 3
+static struct PyModuleDef MaxMindDB_module = {
+ PyModuleDef_HEAD_INIT,
+ .m_name = "extension",
+ .m_doc = "This is a C extension to read MaxMind DB file format",
+ .m_methods = MaxMindDB_methods,
+};
+#endif
+
+MOD_INIT(extension){
+ PyObject *m;
+
+#if PY_MAJOR_VERSION >= 3
+ m = PyModule_Create(&MaxMindDB_module);
+#else
+ m = Py_InitModule("extension", MaxMindDB_methods);
+#endif
+
+ if (!m) {
+ RETURN_MOD_INIT(NULL);
+ }
+
+ Reader_Type.tp_new = PyType_GenericNew;
+ if (PyType_Ready(&Reader_Type)) {
+ RETURN_MOD_INIT(NULL);
+ }
+ Py_INCREF(&Reader_Type);
+ PyModule_AddObject(m, "Reader", (PyObject *)&Reader_Type);
+
+ Metadata_Type.tp_new = PyType_GenericNew;
+ if (PyType_Ready(&Metadata_Type)) {
+ RETURN_MOD_INIT(NULL);
+ }
+ PyModule_AddObject(m, "extension", (PyObject *)&Metadata_Type);
+
+ PyObject* error_mod = PyImport_ImportModule("maxminddb.errors");
+ if (error_mod == NULL) {
+ RETURN_MOD_INIT(NULL);
+ }
+
+ MaxMindDB_error = PyObject_GetAttrString(error_mod, "InvalidDatabaseError");
+ Py_DECREF(error_mod);
+
+ if (MaxMindDB_error == NULL) {
+ RETURN_MOD_INIT(NULL);
+ }
+
+ Py_INCREF(MaxMindDB_error);
+
+ /* We primarily add it to the module for backwards compatibility */
+ PyModule_AddObject(m, "InvalidDatabaseError", MaxMindDB_error);
+
+ RETURN_MOD_INIT(m);
+}
diff --git a/lib/maxminddb/file.py b/lib/maxminddb/file.py
new file mode 100644
index 00000000..2e01e756
--- /dev/null
+++ b/lib/maxminddb/file.py
@@ -0,0 +1,66 @@
+"""For internal use only. It provides a slice-like file reader."""
+
+import os
+
+try:
+ # pylint: disable=no-name-in-module
+ from multiprocessing import Lock
+except ImportError:
+ from threading import Lock
+
+
+class FileBuffer(object):
+
+ """A slice-able file reader"""
+
+ def __init__(self, database):
+ self._handle = open(database, 'rb')
+ self._size = os.fstat(self._handle.fileno()).st_size
+ if not hasattr(os, 'pread'):
+ self._lock = Lock()
+
+ def __getitem__(self, key):
+ if isinstance(key, slice):
+ return self._read(key.stop - key.start, key.start)
+ elif isinstance(key, int):
+ return self._read(1, key)
+ else:
+ raise TypeError("Invalid argument type.")
+
+ def rfind(self, needle, start):
+ """Reverse find needle from start"""
+ pos = self._read(self._size - start - 1, start).rfind(needle)
+ if pos == -1:
+ return pos
+ return start + pos
+
+ def size(self):
+ """Size of file"""
+ return self._size
+
+ def close(self):
+ """Close file"""
+ self._handle.close()
+
+ if hasattr(os, 'pread'):
+
+ def _read(self, buffersize, offset):
+ """read that uses pread"""
+ # pylint: disable=no-member
+ return os.pread(self._handle.fileno(), buffersize, offset)
+
+ else:
+
+ def _read(self, buffersize, offset):
+ """read with a lock
+
+ This lock is necessary as after a fork, the different processes
+ will share the same file table entry, even if we dup the fd, and
+ as such the same offsets. There does not appear to be a way to
+ duplicate the file table entry and we cannot re-open based on the
+ original path as that file may have replaced with another or
+ unlinked.
+ """
+ with self._lock:
+ self._handle.seek(offset)
+ return self._handle.read(buffersize)
diff --git a/lib/maxminddb/reader.py b/lib/maxminddb/reader.py
new file mode 100644
index 00000000..b45f31e2
--- /dev/null
+++ b/lib/maxminddb/reader.py
@@ -0,0 +1,223 @@
+"""
+maxminddb.reader
+~~~~~~~~~~~~~~~~
+
+This module contains the pure Python database reader and related classes.
+
+"""
+from __future__ import unicode_literals
+
+try:
+ import mmap
+except ImportError:
+ # pylint: disable=invalid-name
+ mmap = None
+
+import struct
+
+from maxminddb.compat import byte_from_int, int_from_byte, compat_ip_address
+from maxminddb.const import MODE_AUTO, MODE_MMAP, MODE_FILE, MODE_MEMORY
+from maxminddb.decoder import Decoder
+from maxminddb.errors import InvalidDatabaseError
+from maxminddb.file import FileBuffer
+
+
+class Reader(object):
+
+ """
+ Instances of this class provide a reader for the MaxMind DB format. IP
+ addresses can be looked up using the ``get`` method.
+ """
+
+ _DATA_SECTION_SEPARATOR_SIZE = 16
+ _METADATA_START_MARKER = b"\xAB\xCD\xEFMaxMind.com"
+
+ _ipv4_start = None
+
+ def __init__(self, database, mode=MODE_AUTO):
+ """Reader for the MaxMind DB file format
+
+ Arguments:
+ database -- A path to a valid MaxMind DB file such as a GeoIP2
+ database file.
+ mode -- mode to open the database with. Valid mode are:
+ * MODE_MMAP - read from memory map.
+ * MODE_FILE - read database as standard file.
+ * MODE_MEMORY - load database into memory.
+ * MODE_AUTO - tries MODE_MMAP and then MODE_FILE. Default.
+ """
+ # pylint: disable=redefined-variable-type
+ if (mode == MODE_AUTO and mmap) or mode == MODE_MMAP:
+ with open(database, 'rb') as db_file:
+ self._buffer = mmap.mmap(
+ db_file.fileno(), 0, access=mmap.ACCESS_READ)
+ self._buffer_size = self._buffer.size()
+ elif mode in (MODE_AUTO, MODE_FILE):
+ self._buffer = FileBuffer(database)
+ self._buffer_size = self._buffer.size()
+ elif mode == MODE_MEMORY:
+ with open(database, 'rb') as db_file:
+ self._buffer = db_file.read()
+ self._buffer_size = len(self._buffer)
+ else:
+ raise ValueError('Unsupported open mode ({0}). Only MODE_AUTO, '
+ ' MODE_FILE, and MODE_MEMORY are support by the pure Python '
+ 'Reader'.format(mode))
+
+ metadata_start = self._buffer.rfind(self._METADATA_START_MARKER,
+ max(0, self._buffer_size
+ - 128 * 1024))
+
+ if metadata_start == -1:
+ self.close()
+ raise InvalidDatabaseError('Error opening database file ({0}). '
+ 'Is this a valid MaxMind DB file?'
+ ''.format(database))
+
+ metadata_start += len(self._METADATA_START_MARKER)
+ metadata_decoder = Decoder(self._buffer, metadata_start)
+ (metadata, _) = metadata_decoder.decode(metadata_start)
+ self._metadata = Metadata(
+ **metadata) # pylint: disable=bad-option-value
+
+ self._decoder = Decoder(self._buffer, self._metadata.search_tree_size
+ + self._DATA_SECTION_SEPARATOR_SIZE)
+
+ def metadata(self):
+ """Return the metadata associated with the MaxMind DB file"""
+ return self._metadata
+
+ def get(self, ip_address):
+ """Return the record for the ip_address in the MaxMind DB
+
+
+ Arguments:
+ ip_address -- an IP address in the standard string notation
+ """
+
+ address = compat_ip_address(ip_address)
+
+ if address.version == 6 and self._metadata.ip_version == 4:
+ raise ValueError('Error looking up {0}. You attempted to look up '
+ 'an IPv6 address in an IPv4-only database.'.format(
+ ip_address))
+ pointer = self._find_address_in_tree(address)
+
+ return self._resolve_data_pointer(pointer) if pointer else None
+
+ def _find_address_in_tree(self, ip_address):
+ packed = ip_address.packed
+
+ bit_count = len(packed) * 8
+ node = self._start_node(bit_count)
+
+ for i in range(bit_count):
+ if node >= self._metadata.node_count:
+ break
+ bit = 1 & (int_from_byte(packed[i >> 3]) >> 7 - (i % 8))
+ node = self._read_node(node, bit)
+ if node == self._metadata.node_count:
+ # Record is empty
+ return 0
+ elif node > self._metadata.node_count:
+ return node
+
+ raise InvalidDatabaseError('Invalid node in search tree')
+
+ def _start_node(self, length):
+ if self._metadata.ip_version != 6 or length == 128:
+ return 0
+
+ # We are looking up an IPv4 address in an IPv6 tree. Skip over the
+ # first 96 nodes.
+ if self._ipv4_start:
+ return self._ipv4_start
+
+ node = 0
+ for _ in range(96):
+ if node >= self._metadata.node_count:
+ break
+ node = self._read_node(node, 0)
+ self._ipv4_start = node
+ return node
+
+ def _read_node(self, node_number, index):
+ base_offset = node_number * self._metadata.node_byte_size
+
+ record_size = self._metadata.record_size
+ if record_size == 24:
+ offset = base_offset + index * 3
+ node_bytes = b'\x00' + self._buffer[offset:offset + 3]
+ elif record_size == 28:
+ (middle,) = struct.unpack(
+ b'!B', self._buffer[base_offset + 3:base_offset + 4])
+ if index:
+ middle &= 0x0F
+ else:
+ middle = (0xF0 & middle) >> 4
+ offset = base_offset + index * 4
+ node_bytes = byte_from_int(
+ middle) + self._buffer[offset:offset + 3]
+ elif record_size == 32:
+ offset = base_offset + index * 4
+ node_bytes = self._buffer[offset:offset + 4]
+ else:
+ raise InvalidDatabaseError(
+ 'Unknown record size: {0}'.format(record_size))
+ return struct.unpack(b'!I', node_bytes)[0]
+
+ def _resolve_data_pointer(self, pointer):
+ resolved = pointer - self._metadata.node_count + \
+ self._metadata.search_tree_size
+
+ if resolved > self._buffer_size:
+ raise InvalidDatabaseError(
+ "The MaxMind DB file's search tree is corrupt")
+
+ (data, _) = self._decoder.decode(resolved)
+ return data
+
+ def close(self):
+ """Closes the MaxMind DB file and returns the resources to the system"""
+ # pylint: disable=unidiomatic-typecheck
+ if type(self._buffer) not in (str, bytes):
+ self._buffer.close()
+
+
+class Metadata(object):
+
+ """Metadata for the MaxMind DB reader"""
+
+ # pylint: disable=too-many-instance-attributes
+ def __init__(self, **kwargs):
+ """Creates new Metadata object. kwargs are key/value pairs from spec"""
+ # Although I could just update __dict__, that is less obvious and it
+ # doesn't work well with static analysis tools and some IDEs
+ self.node_count = kwargs['node_count']
+ self.record_size = kwargs['record_size']
+ self.ip_version = kwargs['ip_version']
+ self.database_type = kwargs['database_type']
+ self.languages = kwargs['languages']
+ self.binary_format_major_version = kwargs[
+ 'binary_format_major_version']
+ self.binary_format_minor_version = kwargs[
+ 'binary_format_minor_version']
+ self.build_epoch = kwargs['build_epoch']
+ self.description = kwargs['description']
+
+ @property
+ def node_byte_size(self):
+ """The size of a node in bytes"""
+ return self.record_size // 4
+
+ @property
+ def search_tree_size(self):
+ """The size of the search tree"""
+ return self.node_count * self.node_byte_size
+
+ def __repr__(self):
+ args = ', '.join('%s=%r' % x for x in self.__dict__.items())
+ return '{module}.{class_name}({data})'.format(
+ module=self.__module__,
+ class_name=self.__class__.__name__,
+ data=args)
diff --git a/plexpy/activity_handler.py b/plexpy/activity_handler.py
index 08bb2327..708eb0df 100644
--- a/plexpy/activity_handler.py
+++ b/plexpy/activity_handler.py
@@ -18,6 +18,7 @@ import time
import plexpy
import activity_processor
+import datafactory
import helpers
import logger
import notification_handler
@@ -55,23 +56,46 @@ class ActivityHandler(object):
return None
- def update_db_session(self):
+ def update_db_session(self, session=None):
# Update our session temp table values
monitor_proc = activity_processor.ActivityProcessor()
- monitor_proc.write_session(session=self.get_live_session(), notify=False)
+ monitor_proc.write_session(session=session, notify=False)
def on_start(self):
if self.is_valid_session() and self.get_live_session():
logger.debug(u"PlexPy ActivityHandler :: Session %s has started." % str(self.get_session_key()))
+ session = self.get_live_session()
+
# Check if any notification agents have notifications enabled
if any(d['on_play'] for d in notifiers.available_notification_agents()):
# Fire off notifications
threading.Thread(target=notification_handler.notify,
- kwargs=dict(stream_data=self.get_live_session(), notify_action='play')).start()
+ kwargs=dict(stream_data=session, notify_action='play')).start()
# Write the new session to our temp session table
- self.update_db_session()
+ self.update_db_session(session=session)
+
+ # Check if any notification agents have notifications enabled
+ if any(d['on_concurrent'] for d in notifiers.available_notification_agents()):
+ # Check if any concurrent streams by the user
+ ip = True if plexpy.CONFIG.NOTIFY_CONCURRENT_BY_IP else None
+ ap = activity_processor.ActivityProcessor()
+ user_sessions = ap.get_session_by_user_id(user_id=session['user_id'], ip_address=ip)
+ if len(user_sessions) >= plexpy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD:
+ # Push any notifications - Push it on it's own thread so we don't hold up our db actions
+ threading.Thread(target=notification_handler.notify,
+ kwargs=dict(stream_data=session, notify_action='concurrent')).start()
+
+ # Check if any notification agents have notifications enabled
+ if any(d['on_newdevice'] for d in notifiers.available_notification_agents()):
+ # Check if any concurrent streams by the user
+ data_factory = datafactory.DataFactory()
+ user_devices = data_factory.get_user_devices(user_id=session['user_id'])
+ if session['machine_id'] not in user_devices:
+ # Push any notifications - Push it on it's own thread so we don't hold up our db actions
+ threading.Thread(target=notification_handler.notify,
+ kwargs=dict(stream_data=session, notify_action='newdevice')).start()
def on_stop(self, force_stop=False):
if self.is_valid_session():
diff --git a/plexpy/activity_pinger.py b/plexpy/activity_pinger.py
index ea6449c0..8ab3bf31 100644
--- a/plexpy/activity_pinger.py
+++ b/plexpy/activity_pinger.py
@@ -24,6 +24,7 @@ import libraries
import logger
import notification_handler
import notifiers
+import plextv
import pmsconnect
@@ -372,23 +373,19 @@ def check_server_updates():
with monitor_lock:
logger.info(u"PlexPy Monitor :: Checking for PMS updates...")
- pms_connect = pmsconnect.PmsConnect()
+ plex_tv = plextv.PlexTV()
+ download_info = plex_tv.get_plex_downloads()
- server_identity = pms_connect.get_server_identity()
- update_status = pms_connect.get_update_staus()
+ if download_info:
+ logger.info(u"PlexPy Monitor :: Current PMS version: %s", plexpy.CONFIG.PMS_VERSION)
- if server_identity and update_status:
- version = server_identity['version']
- logger.info(u"PlexPy Monitor :: Current PMS version: %s", version)
-
- if update_status['state'] == 'available':
- update_version = update_status['version']
- logger.info(u"PlexPy Monitor :: PMS update available version: %s", update_version)
+ if download_info['update_available']:
+ logger.info(u"PlexPy Monitor :: PMS update available version: %s", download_info['version'])
# Check if any notification agents have notifications enabled
if any(d['on_pmsupdate'] for d in notifiers.available_notification_agents()):
# Fire off notifications
threading.Thread(target=notification_handler.notify_timeline,
- kwargs=dict(notify_action='pmsupdate')).start()
+ kwargs=dict(notify_action='pmsupdate')).start()
else:
logger.info(u"PlexPy Monitor :: No PMS update available.")
\ No newline at end of file
diff --git a/plexpy/activity_processor.py b/plexpy/activity_processor.py
index 1687d098..f86c781f 100644
--- a/plexpy/activity_processor.py
+++ b/plexpy/activity_processor.py
@@ -19,6 +19,7 @@ import re
import plexpy
import database
+import datafactory
import libraries
import log_reader
import logger
@@ -106,6 +107,26 @@ class ActivityProcessor(object):
ip_address = {'ip_address': ip_address}
self.db.upsert('sessions', ip_address, keys)
+ # Check if any notification agents have notifications enabled
+ if notify and any(d['on_concurrent'] for d in notifiers.available_notification_agents()):
+ # Check if any concurrent streams by the user
+ ip = True if plexpy.CONFIG.NOTIFY_CONCURRENT_BY_IP else None
+ user_sessions = self.get_session_by_user_id(user_id=session['user_id'], ip_address=ip)
+ if len(user_sessions) >= plexpy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD:
+ # Push any notifications - Push it on it's own thread so we don't hold up our db actions
+ threading.Thread(target=notification_handler.notify,
+ kwargs=dict(stream_data=values, notify_action='concurrent')).start()
+
+ # Check if any notification agents have notifications enabled
+ if notify and any(d['on_newdevice'] for d in notifiers.available_notification_agents()):
+ # Check if any concurrent streams by the user
+ data_factory = datafactory.DataFactory()
+ user_devices = data_factory.get_user_devices(user_id=session['user_id'])
+ if session['machine_id'] not in user_devices:
+ # Push any notifications - Push it on it's own thread so we don't hold up our db actions
+ threading.Thread(target=notification_handler.notify,
+ kwargs=dict(stream_data=values, notify_action='newdevice')).start()
+
return True
def write_session_history(self, session=None, import_metadata=None, is_import=False, import_ignore_interval=0):
@@ -470,3 +491,13 @@ class ActivityProcessor(object):
return last_time['buffer_last_triggered']
return None
+
+ def get_session_by_user_id(self, user_id=None, ip_address=None):
+ sessions = []
+ if str(user_id).isdigit():
+ ip = 'GROUP BY ip_address' if ip_address else ''
+ sessions = self.db.select('SELECT * '
+ 'FROM sessions '
+ 'WHERE user_id = ? %s' % ip,
+ [user_id])
+ return sessions
\ No newline at end of file
diff --git a/plexpy/config.py b/plexpy/config.py
index 92bc4ee0..7734c97a 100644
--- a/plexpy/config.py
+++ b/plexpy/config.py
@@ -55,6 +55,11 @@ _CONFIG_DEFINITIONS = {
'PMS_USE_BIF': (int, 'PMS', 0),
'PMS_UUID': (str, 'PMS', ''),
'PMS_TIMEOUT': (int, 'Advanced', 15),
+ 'PMS_PLEXPASS': (int, 'PMS', 0),
+ 'PMS_PLATFORM': (str, 'PMS', ''),
+ 'PMS_VERSION': (str, 'PMS', ''),
+ 'PMS_UPDATE_CHANNEL': (str, 'PMS', 'public'),
+ 'PMS_UPDATE_DISTRO_BUILD': (str, 'PMS', ''),
'TIME_FORMAT': (str, 'General', 'HH:mm'),
'ANON_REDIRECT': (str, 'General', 'http://dereferer.org/?'),
'API_ENABLED': (int, 'General', 0),
@@ -75,6 +80,8 @@ _CONFIG_DEFINITIONS = {
'BOXCAR_ON_EXTUP': (int, 'Boxcar', 0),
'BOXCAR_ON_INTUP': (int, 'Boxcar', 0),
'BOXCAR_ON_PMSUPDATE': (int, 'Boxcar', 0),
+ 'BOXCAR_ON_CONCURRENT': (int, 'Boxcar', 0),
+ 'BOXCAR_ON_NEWDEVICE': (int, 'Boxcar', 0),
'BROWSER_ENABLED': (int, 'Boxcar', 0),
'BROWSER_AUTO_HIDE_DELAY': (int, 'Boxcar', 5),
'BROWSER_ON_PLAY': (int, 'BROWSER', 0),
@@ -89,6 +96,8 @@ _CONFIG_DEFINITIONS = {
'BROWSER_ON_EXTUP': (int, 'BROWSER', 0),
'BROWSER_ON_INTUP': (int, 'BROWSER', 0),
'BROWSER_ON_PMSUPDATE': (int, 'BROWSER', 0),
+ 'BROWSER_ON_CONCURRENT': (int, 'BROWSER', 0),
+ 'BROWSER_ON_NEWDEVICE': (int, 'BROWSER', 0),
'BUFFER_THRESHOLD': (int, 'Monitoring', 3),
'BUFFER_WAIT': (int, 'Monitoring', 900),
'BACKUP_DIR': (str, 'General', ''),
@@ -125,6 +134,8 @@ _CONFIG_DEFINITIONS = {
'EMAIL_ON_EXTUP': (int, 'Email', 0),
'EMAIL_ON_INTUP': (int, 'Email', 0),
'EMAIL_ON_PMSUPDATE': (int, 'Email', 0),
+ 'EMAIL_ON_CONCURRENT': (int, 'Email', 0),
+ 'EMAIL_ON_NEWDEVICE': (int, 'Email', 0),
'ENABLE_HTTPS': (int, 'General', 0),
'FACEBOOK_ENABLED': (int, 'Facebook', 0),
'FACEBOOK_REDIRECT_URI': (str, 'Facebook', ''),
@@ -147,8 +158,11 @@ _CONFIG_DEFINITIONS = {
'FACEBOOK_ON_EXTUP': (int, 'Facebook', 0),
'FACEBOOK_ON_INTUP': (int, 'Facebook', 0),
'FACEBOOK_ON_PMSUPDATE': (int, 'Facebook', 0),
+ 'FACEBOOK_ON_CONCURRENT': (int, 'Facebook', 0),
+ 'FACEBOOK_ON_NEWDEVICE': (int, 'Facebook', 0),
'FIRST_RUN_COMPLETE': (int, 'General', 0),
'FREEZE_DB': (int, 'General', 0),
+ 'GEOIP_DB': (str, 'General', ''),
'GET_FILE_SIZES': (int, 'General', 0),
'GET_FILE_SIZES_HOLD': (dict, 'General', {'section_ids': [], 'rating_keys': []}),
'GIT_BRANCH': (str, 'General', 'master'),
@@ -174,6 +188,8 @@ _CONFIG_DEFINITIONS = {
'GROWL_ON_EXTUP': (int, 'Growl', 0),
'GROWL_ON_INTUP': (int, 'Growl', 0),
'GROWL_ON_PMSUPDATE': (int, 'Growl', 0),
+ 'GROWL_ON_CONCURRENT': (int, 'Growl', 0),
+ 'GROWL_ON_NEWDEVICE': (int, 'Growl', 0),
'HOME_SECTIONS': (list, 'General', ['current_activity','watch_stats','library_stats','recently_added']),
'HOME_LIBRARY_CARDS': (list, 'General', ['first_run']),
'HOME_STATS_LENGTH': (int, 'General', 30),
@@ -196,6 +212,27 @@ _CONFIG_DEFINITIONS = {
'HTTP_PROXY': (int, 'General', 0),
'HTTP_ROOT': (str, 'General', ''),
'HTTP_USERNAME': (str, 'General', ''),
+ 'HIPCHAT_URL': (str, 'Hipchat', ''),
+ 'HIPCHAT_COLOR': (str, 'Hipchat', ''),
+ 'HIPCHAT_INCL_SUBJECT': (int, 'Hipchat', 1),
+ 'HIPCHAT_INCL_PMSLINK': (int, 'Hipchat', 0),
+ 'HIPCHAT_INCL_POSTER': (int, 'Hipchat', 0),
+ 'HIPCHAT_EMOTICON': (str, 'Hipchat', ''),
+ 'HIPCHAT_ENABLED': (int, 'Hipchat', 0),
+ 'HIPCHAT_ON_PLAY': (int, 'Hipchat', 0),
+ 'HIPCHAT_ON_STOP': (int, 'Hipchat', 0),
+ 'HIPCHAT_ON_PAUSE': (int, 'Hipchat', 0),
+ 'HIPCHAT_ON_RESUME': (int, 'Hipchat', 0),
+ 'HIPCHAT_ON_BUFFER': (int, 'Hipchat', 0),
+ 'HIPCHAT_ON_WATCHED': (int, 'Hipchat', 0),
+ 'HIPCHAT_ON_CREATED': (int, 'Hipchat', 0),
+ 'HIPCHAT_ON_EXTDOWN': (int, 'Hipchat', 0),
+ 'HIPCHAT_ON_INTDOWN': (int, 'Hipchat', 0),
+ 'HIPCHAT_ON_EXTUP': (int, 'Hipchat', 0),
+ 'HIPCHAT_ON_INTUP': (int, 'Hipchat', 0),
+ 'HIPCHAT_ON_PMSUPDATE': (int, 'Hipchat', 0),
+ 'HIPCHAT_ON_CONCURRENT': (int, 'Hipchat', 0),
+ 'HIPCHAT_ON_NEWDEVICE': (int, 'Hipchat', 0),
'INTERFACE': (str, 'General', 'default'),
'IP_LOGGING_ENABLE': (int, 'General', 0),
'IFTTT_KEY': (str, 'IFTTT', ''),
@@ -213,10 +250,13 @@ _CONFIG_DEFINITIONS = {
'IFTTT_ON_EXTUP': (int, 'IFTTT', 0),
'IFTTT_ON_INTUP': (int, 'IFTTT', 0),
'IFTTT_ON_PMSUPDATE': (int, 'IFTTT', 0),
+ 'IFTTT_ON_CONCURRENT': (int, 'IFTTT', 0),
+ 'IFTTT_ON_NEWDEVICE': (int, 'IFTTT', 0),
'IMGUR_CLIENT_ID': (str, 'Monitoring', ''),
'JOIN_APIKEY': (str, 'Join', ''),
'JOIN_DEVICEID': (str, 'Join', ''),
'JOIN_ENABLED': (int, 'Join', 0),
+ 'JOIN_INCL_SUBJECT': (int, 'Join', 1),
'JOIN_ON_PLAY': (int, 'Join', 0),
'JOIN_ON_STOP': (int, 'Join', 0),
'JOIN_ON_PAUSE': (int, 'Join', 0),
@@ -229,6 +269,8 @@ _CONFIG_DEFINITIONS = {
'JOIN_ON_EXTUP': (int, 'Join', 0),
'JOIN_ON_INTUP': (int, 'Join', 0),
'JOIN_ON_PMSUPDATE': (int, 'Join', 0),
+ 'JOIN_ON_CONCURRENT': (int, 'Join', 0),
+ 'JOIN_ON_NEWDEVICE': (int, 'Join', 0),
'JOURNAL_MODE': (str, 'Advanced', 'wal'),
'LAUNCH_BROWSER': (int, 'General', 1),
'LOG_BLACKLIST': (int, 'General', 1),
@@ -263,11 +305,15 @@ _CONFIG_DEFINITIONS = {
'NMA_ON_EXTUP': (int, 'NMA', 0),
'NMA_ON_INTUP': (int, 'NMA', 0),
'NMA_ON_PMSUPDATE': (int, 'NMA', 0),
+ 'NMA_ON_CONCURRENT': (int, 'NMA', 0),
+ 'NMA_ON_NEWDEVICE': (int, 'NMA', 0),
'NOTIFY_CONSECUTIVE': (int, 'Monitoring', 1),
'NOTIFY_UPLOAD_POSTERS': (int, 'Monitoring', 0),
'NOTIFY_RECENTLY_ADDED': (int, 'Monitoring', 0),
'NOTIFY_RECENTLY_ADDED_GRANDPARENT': (int, 'Monitoring', 0),
'NOTIFY_RECENTLY_ADDED_DELAY': (int, 'Monitoring', 60),
+ 'NOTIFY_CONCURRENT_BY_IP': (int, 'Monitoring', 0),
+ 'NOTIFY_CONCURRENT_THRESHOLD': (int, 'Monitoring', 2),
'NOTIFY_WATCHED_PERCENT': (int, 'Monitoring', 85),
'NOTIFY_ON_START_SUBJECT_TEXT': (unicode, 'Monitoring', 'PlexPy ({server_name})'),
'NOTIFY_ON_START_BODY_TEXT': (unicode, 'Monitoring', '{user} ({player}) started playing {title}.'),
@@ -293,6 +339,10 @@ _CONFIG_DEFINITIONS = {
'NOTIFY_ON_INTUP_BODY_TEXT': (unicode, 'Monitoring', 'The Plex Media Server is back up.'),
'NOTIFY_ON_PMSUPDATE_SUBJECT_TEXT': (unicode, 'Monitoring', 'PlexPy ({server_name})'),
'NOTIFY_ON_PMSUPDATE_BODY_TEXT': (unicode, 'Monitoring', 'An update is available for the Plex Media Server (version {update_version}).'),
+ 'NOTIFY_ON_CONCURRENT_SUBJECT_TEXT': (unicode, 'Monitoring', 'PlexPy ({server_name})'),
+ 'NOTIFY_ON_CONCURRENT_BODY_TEXT': (unicode, 'Monitoring', '{user} has {user_streams} concurrent streams.'),
+ 'NOTIFY_ON_NEWDEVICE_SUBJECT_TEXT': (unicode, 'Monitoring', 'PlexPy ({server_name})'),
+ 'NOTIFY_ON_NEWDEVICE_BODY_TEXT': (unicode, 'Monitoring', '{user} is streaming from a new device: {player}.'),
'NOTIFY_SCRIPTS_ARGS_TEXT': (unicode, 'Monitoring', ''),
'OSX_NOTIFY_APP': (str, 'OSX_Notify', '/Applications/PlexPy'),
'OSX_NOTIFY_ENABLED': (int, 'OSX_Notify', 0),
@@ -308,6 +358,8 @@ _CONFIG_DEFINITIONS = {
'OSX_NOTIFY_ON_EXTUP': (int, 'OSX_Notify', 0),
'OSX_NOTIFY_ON_INTUP': (int, 'OSX_Notify', 0),
'OSX_NOTIFY_ON_PMSUPDATE': (int, 'OSX_Notify', 0),
+ 'OSX_NOTIFY_ON_CONCURRENT': (int, 'OSX_Notify', 0),
+ 'OSX_NOTIFY_ON_NEWDEVICE': (int, 'OSX_Notify', 0),
'PLEX_CLIENT_HOST': (str, 'Plex', ''),
'PLEX_ENABLED': (int, 'Plex', 0),
'PLEX_PASSWORD': (str, 'Plex', ''),
@@ -324,6 +376,8 @@ _CONFIG_DEFINITIONS = {
'PLEX_ON_EXTUP': (int, 'Plex', 0),
'PLEX_ON_INTUP': (int, 'Plex', 0),
'PLEX_ON_PMSUPDATE': (int, 'Plex', 0),
+ 'PLEX_ON_CONCURRENT': (int, 'Plex', 0),
+ 'PLEX_ON_NEWDEVICE': (int, 'Plex', 0),
'PROWL_ENABLED': (int, 'Prowl', 0),
'PROWL_KEYS': (str, 'Prowl', ''),
'PROWL_PRIORITY': (int, 'Prowl', 0),
@@ -339,6 +393,8 @@ _CONFIG_DEFINITIONS = {
'PROWL_ON_EXTUP': (int, 'Prowl', 0),
'PROWL_ON_INTUP': (int, 'Prowl', 0),
'PROWL_ON_PMSUPDATE': (int, 'Prowl', 0),
+ 'PROWL_ON_CONCURRENT': (int, 'Prowl', 0),
+ 'PROWL_ON_NEWDEVICE': (int, 'Prowl', 0),
'PUSHALOT_APIKEY': (str, 'Pushalot', ''),
'PUSHALOT_ENABLED': (int, 'Pushalot', 0),
'PUSHALOT_ON_PLAY': (int, 'Pushalot', 0),
@@ -353,6 +409,8 @@ _CONFIG_DEFINITIONS = {
'PUSHALOT_ON_EXTUP': (int, 'Pushalot', 0),
'PUSHALOT_ON_INTUP': (int, 'Pushalot', 0),
'PUSHALOT_ON_PMSUPDATE': (int, 'Pushalot', 0),
+ 'PUSHALOT_ON_CONCURRENT': (int, 'Pushalot', 0),
+ 'PUSHALOT_ON_NEWDEVICE': (int, 'Pushalot', 0),
'PUSHBULLET_APIKEY': (str, 'PushBullet', ''),
'PUSHBULLET_DEVICEID': (str, 'PushBullet', ''),
'PUSHBULLET_CHANNEL_TAG': (str, 'PushBullet', ''),
@@ -369,6 +427,8 @@ _CONFIG_DEFINITIONS = {
'PUSHBULLET_ON_EXTUP': (int, 'PushBullet', 0),
'PUSHBULLET_ON_INTUP': (int, 'PushBullet', 0),
'PUSHBULLET_ON_PMSUPDATE': (int, 'PushBullet', 0),
+ 'PUSHBULLET_ON_CONCURRENT': (int, 'PushBullet', 0),
+ 'PUSHBULLET_ON_NEWDEVICE': (int, 'PushBullet', 0),
'PUSHOVER_APITOKEN': (str, 'Pushover', ''),
'PUSHOVER_ENABLED': (int, 'Pushover', 0),
'PUSHOVER_HTML_SUPPORT': (int, 'Pushover', 1),
@@ -387,6 +447,8 @@ _CONFIG_DEFINITIONS = {
'PUSHOVER_ON_EXTUP': (int, 'Pushover', 0),
'PUSHOVER_ON_INTUP': (int, 'Pushover', 0),
'PUSHOVER_ON_PMSUPDATE': (int, 'Pushover', 0),
+ 'PUSHOVER_ON_CONCURRENT': (int, 'Pushover', 0),
+ 'PUSHOVER_ON_NEWDEVICE': (int, 'Pushover', 0),
'REFRESH_LIBRARIES_INTERVAL': (int, 'Monitoring', 12),
'REFRESH_LIBRARIES_ON_STARTUP': (int, 'Monitoring', 1),
'REFRESH_USERS_INTERVAL': (int, 'Monitoring', 12),
@@ -412,6 +474,8 @@ _CONFIG_DEFINITIONS = {
'SLACK_ON_EXTUP': (int, 'Slack', 0),
'SLACK_ON_INTUP': (int, 'Slack', 0),
'SLACK_ON_PMSUPDATE': (int, 'Slack', 0),
+ 'SLACK_ON_CONCURRENT': (int, 'Slack', 0),
+ 'SLACK_ON_NEWDEVICE': (int, 'Slack', 0),
'SCRIPTS_ENABLED': (int, 'Scripts', 0),
'SCRIPTS_FOLDER': (unicode, 'Scripts', ''),
'SCRIPTS_ON_PLAY': (int, 'Scripts', 0),
@@ -426,6 +490,8 @@ _CONFIG_DEFINITIONS = {
'SCRIPTS_ON_INTDOWN': (int, 'Scripts', 0),
'SCRIPTS_ON_INTUP': (int, 'Scripts', 0),
'SCRIPTS_ON_PMSUPDATE': (int, 'Scripts', 0),
+ 'SCRIPTS_ON_CONCURRENT': (int, 'Scripts', 0),
+ 'SCRIPTS_ON_NEWDEVICE': (int, 'Scripts', 0),
'SCRIPTS_ON_PLAY_SCRIPT': (unicode, 'Scripts', ''),
'SCRIPTS_ON_STOP_SCRIPT': (unicode, 'Scripts', ''),
'SCRIPTS_ON_PAUSE_SCRIPT': (unicode, 'Scripts', ''),
@@ -438,6 +504,8 @@ _CONFIG_DEFINITIONS = {
'SCRIPTS_ON_INTDOWN_SCRIPT': (unicode, 'Scripts', ''),
'SCRIPTS_ON_INTUP_SCRIPT': (unicode, 'Scripts', ''),
'SCRIPTS_ON_PMSUPDATE_SCRIPT': (unicode, 'Scripts', ''),
+ 'SCRIPTS_ON_CONCURRENT_SCRIPT': (unicode, 'Scripts', ''),
+ 'SCRIPTS_ON_NEWDEVICE_SCRIPT': (unicode, 'Scripts', ''),
'TELEGRAM_BOT_TOKEN': (str, 'Telegram', ''),
'TELEGRAM_ENABLED': (int, 'Telegram', 0),
'TELEGRAM_CHAT_ID': (str, 'Telegram', ''),
@@ -456,6 +524,8 @@ _CONFIG_DEFINITIONS = {
'TELEGRAM_ON_EXTUP': (int, 'Telegram', 0),
'TELEGRAM_ON_INTUP': (int, 'Telegram', 0),
'TELEGRAM_ON_PMSUPDATE': (int, 'Telegram', 0),
+ 'TELEGRAM_ON_CONCURRENT': (int, 'Telegram', 0),
+ 'TELEGRAM_ON_NEWDEVICE': (int, 'Telegram', 0),
'TV_LOGGING_ENABLE': (int, 'Monitoring', 1),
'TV_NOTIFY_ENABLE': (int, 'Monitoring', 0),
'TV_NOTIFY_ON_START': (int, 'Monitoring', 1),
@@ -480,6 +550,8 @@ _CONFIG_DEFINITIONS = {
'TWITTER_ON_EXTUP': (int, 'Twitter', 0),
'TWITTER_ON_INTUP': (int, 'Twitter', 0),
'TWITTER_ON_PMSUPDATE': (int, 'Twitter', 0),
+ 'TWITTER_ON_CONCURRENT': (int, 'Twitter', 0),
+ 'TWITTER_ON_NEWDEVICE': (int, 'Twitter', 0),
'UPDATE_DB_INTERVAL': (int, 'General', 24),
'UPDATE_SECTION_IDS': (int, 'General', 1),
'UPDATE_LABELS': (int, 'General', 1),
@@ -500,7 +572,9 @@ _CONFIG_DEFINITIONS = {
'XBMC_ON_INTDOWN': (int, 'XBMC', 0),
'XBMC_ON_EXTUP': (int, 'XBMC', 0),
'XBMC_ON_INTUP': (int, 'XBMC', 0),
- 'XBMC_ON_PMSUPDATE': (int, 'XBMC', 0)
+ 'XBMC_ON_PMSUPDATE': (int, 'XBMC', 0),
+ 'XBMC_ON_CONCURRENT': (int, 'XBMC', 0),
+ 'XBMC_ON_NEWDEVICE': (int, 'XBMC', 0)
}
_BLACKLIST_KEYS = ['_APITOKEN', '_TOKEN', '_KEY', '_SECRET', '_PASSWORD', '_APIKEY', '_ID']
@@ -719,4 +793,8 @@ class Config(object):
home_sections = self.HOME_SECTIONS
home_sections.remove('library_stats')
self.HOME_SECTIONS = home_sections
- self.CONFIG_VERSION = '5'
\ No newline at end of file
+ self.CONFIG_VERSION = '5'
+
+ if self.CONFIG_VERSION == '5':
+ self.MONITOR_PMS_UPDATES = 0
+ self.CONFIG_VERSION = '6'
\ No newline at end of file
diff --git a/plexpy/datafactory.py b/plexpy/datafactory.py
index b90e1b76..6c9a0c7c 100644
--- a/plexpy/datafactory.py
+++ b/plexpy/datafactory.py
@@ -69,8 +69,8 @@ class DataFactory(object):
'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS paused_counter',
'session_history.user_id',
'session_history.user',
- '(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE users.friendly_name END) \
- AS friendly_name',
+ '(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \
+ THEN users.username ELSE users.friendly_name END) AS friendly_name',
'platform',
'player',
'ip_address',
@@ -1306,4 +1306,19 @@ class DataFactory(object):
return True
except Exception as e:
logger.warn(u"PlexPy DataFactory :: Unable to execute database query for delete_notification_log: %s." % e)
- return False
\ No newline at end of file
+ return False
+
+ def get_user_devices(self, user_id=''):
+ monitor_db = database.MonitorDatabase()
+
+ if user_id:
+ try:
+ query = 'SELECT machine_id FROM session_history WHERE user_id = ? GROUP BY machine_id'
+ result = monitor_db.select(query=query, args=[user_id])
+ except Exception as e:
+ logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_user_devices: %s." % e)
+ return []
+ else:
+ return []
+
+ return [d['machine_id'] for d in result]
\ No newline at end of file
diff --git a/plexpy/graphs.py b/plexpy/graphs.py
index 0ed17392..3666a471 100644
--- a/plexpy/graphs.py
+++ b/plexpy/graphs.py
@@ -463,7 +463,8 @@ class Graphs(object):
if y_axis == 'plays':
query = 'SELECT ' \
'users.user_id, users.username, ' \
- '(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE users.friendly_name END) AS friendly_name,' \
+ '(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" ' \
+ ' THEN users.username ELSE users.friendly_name END) AS friendly_name,' \
'SUM(CASE WHEN media_type = "episode" THEN 1 ELSE 0 END) AS tv_count, ' \
'SUM(CASE WHEN media_type = "movie" THEN 1 ELSE 0 END) AS movie_count, ' \
'SUM(CASE WHEN media_type = "track" THEN 1 ELSE 0 END) AS music_count, ' \
@@ -479,7 +480,8 @@ class Graphs(object):
else:
query = 'SELECT ' \
'users.user_id, users.username, ' \
- '(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE users.friendly_name END) AS friendly_name,' \
+ '(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" ' \
+ ' THEN users.username ELSE users.friendly_name END) AS friendly_name,' \
'SUM(CASE WHEN media_type = "episode" AND stopped > 0 THEN (stopped - started) ' \
' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS tv_count, ' \
'SUM(CASE WHEN media_type = "movie" AND stopped > 0 THEN (stopped - started) ' \
@@ -904,7 +906,8 @@ class Graphs(object):
if y_axis == 'plays':
query = 'SELECT ' \
'users.user_id, users.username, ' \
- '(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE users.friendly_name END) AS friendly_name, ' \
+ '(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" ' \
+ ' THEN users.username ELSE users.friendly_name END) AS friendly_name,' \
'SUM(CASE WHEN session_history_media_info.transcode_decision = "direct play" ' \
'THEN 1 ELSE 0 END) AS dp_count, ' \
'SUM(CASE WHEN session_history_media_info.transcode_decision = "copy" ' \
@@ -925,7 +928,8 @@ class Graphs(object):
else:
query = 'SELECT ' \
'users.user_id, users.username, ' \
- '(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE users.friendly_name END) AS friendly_name, ' \
+ '(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" ' \
+ ' THEN users.username ELSE users.friendly_name END) AS friendly_name,' \
'SUM(CASE WHEN session_history_media_info.transcode_decision = "direct play" ' \
'AND session_history.stopped > 0 THEN (session_history.stopped - session_history.started) ' \
' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS dp_count, ' \
diff --git a/plexpy/helpers.py b/plexpy/helpers.py
index 3bf89c23..37b1aa5e 100644
--- a/plexpy/helpers.py
+++ b/plexpy/helpers.py
@@ -16,11 +16,14 @@
import base64
import datetime
from functools import wraps
+import geoip2.database, geoip2.errors
+import gzip
import hashlib
import imghdr
from IPy import IP
import json
import math
+import maxminddb
from operator import itemgetter
import os
import re
@@ -514,6 +517,118 @@ def get_ip(host):
return ip_address
+def install_geoip_db():
+ maxmind_url = 'http://geolite.maxmind.com/download/geoip/database/'
+ geolite2_gz = 'GeoLite2-City.mmdb.gz'
+ geolite2_md5 = 'GeoLite2-City.md5'
+ geolite2_db = geolite2_gz[:-3]
+ md5_checksum = ''
+
+ temp_gz = os.path.join(plexpy.CONFIG.CACHE_DIR, geolite2_gz)
+ geolite2_db = plexpy.CONFIG.GEOIP_DB or os.path.join(plexpy.DATA_DIR, geolite2_db)
+
+ # Retrieve the GeoLite2 gzip file
+ logger.debug(u"PlexPy Helpers :: Downloading GeoLite2 gzip file from MaxMind...")
+ try:
+ maxmind = urllib.URLopener()
+ maxmind.retrieve(maxmind_url + geolite2_gz, temp_gz)
+ md5_checksum = urllib2.urlopen(maxmind_url + geolite2_md5).read()
+ except Exception as e:
+ logger.error(u"PlexPy Helpers :: Failed to download GeoLite2 gzip file from MaxMind: %s" % e)
+ return False
+
+ # Extract the GeoLite2 database file
+ logger.debug(u"PlexPy Helpers :: Extracting GeoLite2 database...")
+ try:
+ with gzip.open(temp_gz, 'rb') as gz:
+ with open(geolite2_db, 'wb') as db:
+ db.write(gz.read())
+ except Exception as e:
+ logger.error(u"PlexPy Helpers :: Failed to extract the GeoLite2 database: %s" % e)
+ return False
+
+ # Check MD5 hash for GeoLite2 database file
+ logger.debug(u"PlexPy Helpers :: Checking MD5 checksum for GeoLite2 database...")
+ try:
+ hash_md5 = hashlib.md5()
+ with open(geolite2_db, 'rb') as f:
+ for chunk in iter(lambda: f.read(4096), b""):
+ hash_md5.update(chunk)
+ md5_hash = hash_md5.hexdigest()
+
+ if md5_hash != md5_checksum:
+ logger.error(u"PlexPy Helpers :: MD5 checksum doesn't match for GeoLite2 database. "
+ "Checksum: %s, file hash: %s" % (md5_checksum, md5_hash))
+ return False
+ except Exception as e:
+ logger.error(u"PlexPy Helpers :: Failed to generate MD5 checksum for GeoLite2 database: %s" % e)
+ return False
+
+ # Delete temportary GeoLite2 gzip file
+ logger.debug(u"PlexPy Helpers :: Deleting temporary GeoLite2 gzip file...")
+ try:
+ os.remove(temp_gz)
+ except Exception as e:
+ logger.warn(u"PlexPy Helpers :: Failed to remove temporary GeoLite2 gzip file: %s" % e)
+
+ logger.debug(u"PlexPy Helpers :: GeoLite2 database installed successfully.")
+ plexpy.CONFIG.__setattr__('GEOIP_DB', geolite2_db)
+ plexpy.CONFIG.write()
+
+ return True
+
+def uninstall_geoip_db():
+ logger.debug(u"PlexPy Helpers :: Uninstalling the GeoLite2 database...")
+ try:
+ os.remove(plexpy.CONFIG.GEOIP_DB)
+ plexpy.CONFIG.__setattr__('GEOIP_DB', '')
+ plexpy.CONFIG.write()
+ except Exception as e:
+ logger.error(u"PlexPy Helpers :: Failed to uninstall the GeoLite2 database: %s" % e)
+ return False
+
+ logger.debug(u"PlexPy Helpers :: GeoLite2 database uninstalled successfully.")
+ return True
+
+def geoip_lookup(ip_address):
+ if not plexpy.CONFIG.GEOIP_DB:
+ return 'GeoLite2 database not installed. Please install from the ' \
+ 'Settings page.'
+
+ if not ip_address:
+ return 'No IP address provided.'
+
+ try:
+ reader = geoip2.database.Reader(plexpy.CONFIG.GEOIP_DB)
+ geo = reader.city(ip_address)
+ reader.close()
+ except IOError as e:
+ return 'Missing GeoLite2 database. Please reinstall from the ' \
+ 'Settings page.'
+ except ValueError as e:
+ return 'Unable to read GeoLite2 database. Please reinstall from the ' \
+ 'Settings page.'
+ except maxminddb.InvalidDatabaseError as e:
+ return 'Invalid GeoLite2 database. Please reinstall from the ' \
+ 'Settings page.'
+ except geoip2.errors.AddressNotFoundError as e:
+ return '%s' % e
+ except Exception as e:
+ return 'Error: %s' % e
+
+ geo_info = {'continent': geo.continent.name,
+ 'country': geo.country.name,
+ 'region': geo.subdivisions.most_specific.name,
+ 'city': geo.city.name,
+ 'postal_code': geo.postal.code,
+ 'timezone': geo.location.time_zone,
+ 'latitude': geo.location.latitude,
+ 'longitude': geo.location.longitude,
+ 'accuracy': geo.location.accuracy_radius
+ }
+
+ return geo_info
+
# Taken from SickRage
def anon_url(*url):
"""
diff --git a/plexpy/libraries.py b/plexpy/libraries.py
index c942b768..649d454a 100644
--- a/plexpy/libraries.py
+++ b/plexpy/libraries.py
@@ -753,8 +753,9 @@ class Libraries(object):
try:
if str(section_id).isdigit():
- query = 'SELECT (CASE WHEN users.friendly_name IS NULL THEN users.username ' \
- 'ELSE users.friendly_name END) AS friendly_name, users.user_id, users.thumb, COUNT(user) AS user_count ' \
+ query = 'SELECT (CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" ' \
+ 'THEN users.username ELSE users.friendly_name END) AS friendly_name, ' \
+ 'users.user_id, users.thumb, COUNT(user) AS user_count ' \
'FROM session_history ' \
'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \
'JOIN users ON users.user_id = session_history.user_id ' \
diff --git a/plexpy/logger.py b/plexpy/logger.py
index b43e06d3..17afa084 100644
--- a/plexpy/logger.py
+++ b/plexpy/logger.py
@@ -90,14 +90,14 @@ class PublicIPFilter(logging.Filter):
try:
# Currently only checking for ipv4 addresses
- ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}', record.msg)
+ ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}(?!\d*-[a-z0-9]{6})', record.msg)
for ip in ipv4:
if helpers.is_ip_public(ip):
record.msg = record.msg.replace(ip, ip.partition('.')[0] + '.***.***.***')
args = []
for arg in record.args:
- ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}', arg) if isinstance(arg, basestring) else []
+ ipv4 = re.findall(r'[0-9]+(?:\.[0-9]+){3}(?!\d*-[a-z0-9]{6})', arg) if isinstance(arg, basestring) else []
for ip in ipv4:
if helpers.is_ip_public(ip):
arg = arg.replace(ip, ip.partition('.')[0] + '.***.***.***')
diff --git a/plexpy/notification_handler.py b/plexpy/notification_handler.py
index b64afab5..7d86b4df 100644
--- a/plexpy/notification_handler.py
+++ b/plexpy/notification_handler.py
@@ -182,6 +182,46 @@ def notify(stream_data=None, notify_action=None):
notify_strings=notify_strings,
metadata=metadata)
+ elif agent['on_concurrent'] and notify_action == 'concurrent':
+ # Build and send notification
+ notify_strings, metadata = build_notify_text(session=stream_data,
+ notify_action=notify_action,
+ agent_id=agent['id'])
+
+ notifiers.send_notification(agent_id=agent['id'],
+ subject=notify_strings[0],
+ body=notify_strings[1],
+ script_args=notify_strings[2],
+ notify_action=notify_action,
+ metadata=metadata)
+
+ # Set the notification state in the db
+ set_notify_state(session=stream_data,
+ notify_action=notify_action,
+ agent_info=agent,
+ notify_strings=notify_strings,
+ metadata=metadata)
+
+ elif agent['on_newdevice'] and notify_action == 'newdevice':
+ # Build and send notification
+ notify_strings, metadata = build_notify_text(session=stream_data,
+ notify_action=notify_action,
+ agent_id=agent['id'])
+
+ notifiers.send_notification(agent_id=agent['id'],
+ subject=notify_strings[0],
+ body=notify_strings[1],
+ script_args=notify_strings[2],
+ notify_action=notify_action,
+ metadata=metadata)
+
+ # Set the notification state in the db
+ set_notify_state(session=stream_data,
+ notify_action=notify_action,
+ agent_info=agent,
+ notify_strings=notify_strings,
+ metadata=metadata)
+
elif (stream_data['media_type'] == 'track' and plexpy.CONFIG.MUSIC_NOTIFY_ENABLE):
for agent in notifiers.available_notification_agents():
@@ -285,6 +325,46 @@ def notify(stream_data=None, notify_action=None):
notify_strings=notify_strings,
metadata=metadata)
+ elif agent['on_concurrent'] and notify_action == 'concurrent':
+ # Build and send notification
+ notify_strings, metadata = build_notify_text(session=stream_data,
+ notify_action=notify_action,
+ agent_id=agent['id'])
+
+ notifiers.send_notification(agent_id=agent['id'],
+ subject=notify_strings[0],
+ body=notify_strings[1],
+ script_args=notify_strings[2],
+ notify_action=notify_action,
+ metadata=metadata)
+
+ # Set the notification state in the db
+ set_notify_state(session=stream_data,
+ notify_action=notify_action,
+ agent_info=agent,
+ notify_strings=notify_strings,
+ metadata=metadata)
+
+ elif agent['on_newdevice'] and notify_action == 'newdevice':
+ # Build and send notification
+ notify_strings, metadata = build_notify_text(session=stream_data,
+ notify_action=notify_action,
+ agent_id=agent['id'])
+
+ notifiers.send_notification(agent_id=agent['id'],
+ subject=notify_strings[0],
+ body=notify_strings[1],
+ script_args=notify_strings[2],
+ notify_action=notify_action,
+ metadata=metadata)
+
+ # Set the notification state in the db
+ set_notify_state(session=stream_data,
+ notify_action=notify_action,
+ agent_info=agent,
+ notify_strings=notify_strings,
+ metadata=metadata)
+
elif stream_data['media_type'] == 'clip':
pass
else:
@@ -485,7 +565,10 @@ def build_notify_text(session=None, timeline=None, notify_action=None, agent_id=
pms_connect = pmsconnect.PmsConnect()
metadata_list = pms_connect.get_metadata_details(rating_key=rating_key)
- stream_count = pms_connect.get_current_activity().get('stream_count', '')
+ current_activity = pms_connect.get_current_activity()
+ sessions = current_activity.get('sessions', [])
+ stream_count = current_activity.get('stream_count', '')
+ user_stream_count = sum(1 for d in sessions if d['user_id'] == session['user_id']) if session else ''
if metadata_list:
metadata = metadata_list['metadata']
@@ -525,6 +608,10 @@ def build_notify_text(session=None, timeline=None, notify_action=None, agent_id=
on_watched_body = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_WATCHED_BODY_TEXT), agent_id)
on_created_subject = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_CREATED_SUBJECT_TEXT), agent_id)
on_created_body = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_CREATED_BODY_TEXT), agent_id)
+ on_concurrent_subject = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_CONCURRENT_SUBJECT_TEXT), agent_id)
+ on_concurrent_body = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_CONCURRENT_BODY_TEXT), agent_id)
+ on_newdevice_subject = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_NEWDEVICE_SUBJECT_TEXT), agent_id)
+ on_newdevice_body = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_ON_NEWDEVICE_BODY_TEXT), agent_id)
script_args_text = strip_tag(re.sub(pattern, '', plexpy.CONFIG.NOTIFY_SCRIPTS_ARGS_TEXT), agent_id)
else:
on_start_subject = strip_tag(plexpy.CONFIG.NOTIFY_ON_START_SUBJECT_TEXT, agent_id)
@@ -541,6 +628,10 @@ def build_notify_text(session=None, timeline=None, notify_action=None, agent_id=
on_watched_body = strip_tag(plexpy.CONFIG.NOTIFY_ON_WATCHED_BODY_TEXT, agent_id)
on_created_subject = strip_tag(plexpy.CONFIG.NOTIFY_ON_CREATED_SUBJECT_TEXT, agent_id)
on_created_body = strip_tag(plexpy.CONFIG.NOTIFY_ON_CREATED_BODY_TEXT, agent_id)
+ on_concurrent_subject = strip_tag(plexpy.CONFIG.NOTIFY_ON_CONCURRENT_SUBJECT_TEXT, agent_id)
+ on_concurrent_body = strip_tag(plexpy.CONFIG.NOTIFY_ON_CONCURRENT_BODY_TEXT, agent_id)
+ on_newdevice_subject = strip_tag(plexpy.CONFIG.NOTIFY_ON_NEWDEVICE_SUBJECT_TEXT, agent_id)
+ on_newdevice_body = strip_tag(plexpy.CONFIG.NOTIFY_ON_NEWDEVICE_BODY_TEXT, agent_id)
script_args_text = strip_tag(plexpy.CONFIG.NOTIFY_SCRIPTS_ARGS_TEXT, agent_id)
# Create a title
@@ -624,7 +715,7 @@ def build_notify_text(session=None, timeline=None, notify_action=None, agent_id=
else:
thumb = None
- if thumb:
+ if plexpy.CONFIG.NOTIFY_UPLOAD_POSTERS and thumb:
# Try to retrieve a poster_url from the database
data_factory = datafactory.DataFactory()
poster_url = data_factory.get_poster_url(rating_key=poster_key)
@@ -676,6 +767,7 @@ def build_notify_text(session=None, timeline=None, notify_action=None, agent_id=
'timestamp': arrow.now().format(time_format),
# Stream parameters
'streams': stream_count,
+ 'user_streams': user_stream_count,
'user': session.get('friendly_name',''),
'username': session.get('user',''),
'platform': session.get('platform',''),
@@ -940,6 +1032,52 @@ def build_notify_text(session=None, timeline=None, notify_action=None, agent_id=
except:
logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.")
+ return [subject_text, body_text, script_args], metadata
+ else:
+ return [subject_text, body_text, script_args], metadata
+ elif notify_action == 'concurrent':
+ # Default body text
+ body_text = '%s has %s concurrent streams.' % (session['friendly_name'],
+ user_stream_count)
+
+ if on_concurrent_subject and on_concurrent_body:
+ try:
+ subject_text = unicode(on_concurrent_subject).format(**available_params)
+ except LookupError as e:
+ logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification subject. Using fallback." % e)
+ except:
+ logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification subject. Using fallback.")
+
+ try:
+ body_text = unicode(on_concurrent_body).format(**available_params)
+ except LookupError as e:
+ logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification body. Using fallback." % e)
+ except:
+ logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.")
+
+ return [subject_text, body_text, script_args], metadata
+ else:
+ return [subject_text, body_text, script_args], metadata
+ elif notify_action == 'newdevice':
+ # Default body text
+ body_text = '%s is streaming from a new device: %s.' % (session['friendly_name'],
+ session['player'])
+
+ if on_newdevice_subject and on_newdevice_body:
+ try:
+ subject_text = unicode(on_newdevice_subject).format(**available_params)
+ except LookupError as e:
+ logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification subject. Using fallback." % e)
+ except:
+ logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification subject. Using fallback.")
+
+ try:
+ body_text = unicode(on_newdevice_body).format(**available_params)
+ except LookupError as e:
+ logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification body. Using fallback." % e)
+ except:
+ logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.")
+
return [subject_text, body_text, script_args], metadata
else:
return [subject_text, body_text, script_args], metadata
@@ -961,8 +1099,7 @@ def build_server_notify_text(notify_action=None, agent_id=None):
update_status = {}
if notify_action == 'pmsupdate':
- pms_connect = pmsconnect.PmsConnect()
- update_status = pms_connect.get_update_staus()
+ update_status = plex_tv.get_plex_downloads()
if server_times:
updated_at = server_times['updated_at']
@@ -995,7 +1132,16 @@ def build_server_notify_text(notify_action=None, agent_id=None):
# Update parameters
'update_version': update_status.get('version',''),
'update_url': update_status.get('download_url',''),
- 'update_changelog': update_status.get('changelog','')}
+ 'update_release_date': arrow.get(update_status.get('release_date','')).format(date_format)
+ if update_status.get('release_date','') else '',
+ 'update_channel': 'Plex Pass' if plexpy.CONFIG.PMS_UPDATE_CHANNEL == 'plexpass' else 'Public',
+ 'update_platform': update_status.get('platform',''),
+ 'update_distro': update_status.get('distro',''),
+ 'update_distro_build': update_status.get('build',''),
+ 'update_requirements': update_status.get('requirements',''),
+ 'update_extra_info': update_status.get('extra_info',''),
+ 'update_changelog_added': update_status.get('changelog_added',''),
+ 'update_changelog_fixed': update_status.get('changelog_fixed','')}
# Default text
subject_text = 'PlexPy (%s)' % server_name
@@ -1146,10 +1292,10 @@ def strip_tag(data, agent_id=None):
elif agent_id == 13:
# Allow tags b, i, code, pre, a[href] for Telegram
whitelist = {'b': [],
- 'i': [],
- 'code': [],
- 'pre': [],
- 'a': ['href']}
+ 'i': [],
+ 'code': [],
+ 'pre': [],
+ 'a': ['href']}
return bleach.clean(data, tags=whitelist.keys(), attributes=whitelist, strip=True)
else:
diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py
index 983b34a2..c480de4e 100644
--- a/plexpy/notifiers.py
+++ b/plexpy/notifiers.py
@@ -31,6 +31,7 @@ import urllib
from urllib import urlencode
import urllib2
from urlparse import urlparse
+import uuid
import gntp.notifier
import facebook
@@ -62,7 +63,8 @@ AGENT_IDS = {"Growl": 0,
"Scripts": 15,
"Facebook": 16,
"Browser": 17,
- "Join": 18}
+ "Join": 18,
+ "Hipchat": 19}
def available_notification_agents():
@@ -82,7 +84,9 @@ def available_notification_agents():
'on_intdown': plexpy.CONFIG.GROWL_ON_INTDOWN,
'on_extup': plexpy.CONFIG.GROWL_ON_EXTUP,
'on_intup': plexpy.CONFIG.GROWL_ON_INTUP,
- 'on_pmsupdate': plexpy.CONFIG.GROWL_ON_PMSUPDATE
+ 'on_pmsupdate': plexpy.CONFIG.GROWL_ON_PMSUPDATE,
+ 'on_concurrent': plexpy.CONFIG.GROWL_ON_CONCURRENT,
+ 'on_newdevice': plexpy.CONFIG.GROWL_ON_NEWDEVICE
},
{'name': 'Prowl',
'id': AGENT_IDS['Prowl'],
@@ -100,7 +104,9 @@ def available_notification_agents():
'on_intdown': plexpy.CONFIG.PROWL_ON_INTDOWN,
'on_extup': plexpy.CONFIG.PROWL_ON_EXTUP,
'on_intup': plexpy.CONFIG.PROWL_ON_INTUP,
- 'on_pmsupdate': plexpy.CONFIG.PROWL_ON_PMSUPDATE
+ 'on_pmsupdate': plexpy.CONFIG.PROWL_ON_PMSUPDATE,
+ 'on_concurrent': plexpy.CONFIG.PROWL_ON_CONCURRENT,
+ 'on_newdevice': plexpy.CONFIG.PROWL_ON_NEWDEVICE
},
{'name': 'XBMC',
'id': AGENT_IDS['XBMC'],
@@ -118,7 +124,9 @@ def available_notification_agents():
'on_intdown': plexpy.CONFIG.XBMC_ON_INTDOWN,
'on_extup': plexpy.CONFIG.XBMC_ON_EXTUP,
'on_intup': plexpy.CONFIG.XBMC_ON_INTUP,
- 'on_pmsupdate': plexpy.CONFIG.XBMC_ON_PMSUPDATE
+ 'on_pmsupdate': plexpy.CONFIG.XBMC_ON_PMSUPDATE,
+ 'on_concurrent': plexpy.CONFIG.XBMC_ON_CONCURRENT,
+ 'on_newdevice': plexpy.CONFIG.XBMC_ON_NEWDEVICE
},
{'name': 'Plex Home Theater',
'id': AGENT_IDS['Plex'],
@@ -136,7 +144,9 @@ def available_notification_agents():
'on_intdown': plexpy.CONFIG.PLEX_ON_INTDOWN,
'on_extup': plexpy.CONFIG.PLEX_ON_EXTUP,
'on_intup': plexpy.CONFIG.PLEX_ON_INTUP,
- 'on_pmsupdate': plexpy.CONFIG.PLEX_ON_PMSUPDATE
+ 'on_pmsupdate': plexpy.CONFIG.PLEX_ON_PMSUPDATE,
+ 'on_concurrent': plexpy.CONFIG.PLEX_ON_CONCURRENT,
+ 'on_newdevice': plexpy.CONFIG.PLEX_ON_NEWDEVICE
},
{'name': 'NotifyMyAndroid',
'id': AGENT_IDS['NMA'],
@@ -154,7 +164,9 @@ def available_notification_agents():
'on_intdown': plexpy.CONFIG.NMA_ON_INTDOWN,
'on_extup': plexpy.CONFIG.NMA_ON_EXTUP,
'on_intup': plexpy.CONFIG.NMA_ON_INTUP,
- 'on_pmsupdate': plexpy.CONFIG.NMA_ON_PMSUPDATE
+ 'on_pmsupdate': plexpy.CONFIG.NMA_ON_PMSUPDATE,
+ 'on_concurrent': plexpy.CONFIG.NMA_ON_CONCURRENT,
+ 'on_newdevice': plexpy.CONFIG.NMA_ON_NEWDEVICE
},
{'name': 'Pushalot',
'id': AGENT_IDS['Pushalot'],
@@ -172,7 +184,9 @@ def available_notification_agents():
'on_intdown': plexpy.CONFIG.PUSHALOT_ON_INTDOWN,
'on_extup': plexpy.CONFIG.PUSHALOT_ON_EXTUP,
'on_intup': plexpy.CONFIG.PUSHALOT_ON_INTUP,
- 'on_pmsupdate': plexpy.CONFIG.PUSHALOT_ON_PMSUPDATE
+ 'on_pmsupdate': plexpy.CONFIG.PUSHALOT_ON_PMSUPDATE,
+ 'on_concurrent': plexpy.CONFIG.PUSHALOT_ON_CONCURRENT,
+ 'on_newdevice': plexpy.CONFIG.PUSHALOT_ON_NEWDEVICE
},
{'name': 'Pushbullet',
'id': AGENT_IDS['Pushbullet'],
@@ -190,7 +204,9 @@ def available_notification_agents():
'on_intdown': plexpy.CONFIG.PUSHBULLET_ON_INTDOWN,
'on_extup': plexpy.CONFIG.PUSHBULLET_ON_EXTUP,
'on_intup': plexpy.CONFIG.PUSHBULLET_ON_INTUP,
- 'on_pmsupdate': plexpy.CONFIG.PUSHBULLET_ON_PMSUPDATE
+ 'on_pmsupdate': plexpy.CONFIG.PUSHBULLET_ON_PMSUPDATE,
+ 'on_concurrent': plexpy.CONFIG.PUSHBULLET_ON_CONCURRENT,
+ 'on_newdevice': plexpy.CONFIG.PUSHBULLET_ON_NEWDEVICE
},
{'name': 'Pushover',
'id': AGENT_IDS['Pushover'],
@@ -208,7 +224,9 @@ def available_notification_agents():
'on_intdown': plexpy.CONFIG.PUSHOVER_ON_INTDOWN,
'on_extup': plexpy.CONFIG.PUSHOVER_ON_EXTUP,
'on_intup': plexpy.CONFIG.PUSHOVER_ON_INTUP,
- 'on_pmsupdate': plexpy.CONFIG.PUSHOVER_ON_PMSUPDATE
+ 'on_pmsupdate': plexpy.CONFIG.PUSHOVER_ON_PMSUPDATE,
+ 'on_concurrent': plexpy.CONFIG.PUSHOVER_ON_CONCURRENT,
+ 'on_newdevice': plexpy.CONFIG.PUSHOVER_ON_NEWDEVICE
},
{'name': 'Boxcar2',
'id': AGENT_IDS['Boxcar2'],
@@ -226,7 +244,9 @@ def available_notification_agents():
'on_intdown': plexpy.CONFIG.BOXCAR_ON_INTDOWN,
'on_extup': plexpy.CONFIG.BOXCAR_ON_EXTUP,
'on_intup': plexpy.CONFIG.BOXCAR_ON_INTUP,
- 'on_pmsupdate': plexpy.CONFIG.BOXCAR_ON_PMSUPDATE
+ 'on_pmsupdate': plexpy.CONFIG.BOXCAR_ON_PMSUPDATE,
+ 'on_concurrent': plexpy.CONFIG.BOXCAR_ON_CONCURRENT,
+ 'on_newdevice': plexpy.CONFIG.BOXCAR_ON_NEWDEVICE
},
{'name': 'E-mail',
'id': AGENT_IDS['Email'],
@@ -244,7 +264,9 @@ def available_notification_agents():
'on_intdown': plexpy.CONFIG.EMAIL_ON_INTDOWN,
'on_extup': plexpy.CONFIG.EMAIL_ON_EXTUP,
'on_intup': plexpy.CONFIG.EMAIL_ON_INTUP,
- 'on_pmsupdate': plexpy.CONFIG.EMAIL_ON_PMSUPDATE
+ 'on_pmsupdate': plexpy.CONFIG.EMAIL_ON_PMSUPDATE,
+ 'on_concurrent': plexpy.CONFIG.EMAIL_ON_CONCURRENT,
+ 'on_newdevice': plexpy.CONFIG.EMAIL_ON_NEWDEVICE
},
{'name': 'Twitter',
'id': AGENT_IDS['Twitter'],
@@ -262,7 +284,9 @@ def available_notification_agents():
'on_intdown': plexpy.CONFIG.TWITTER_ON_INTDOWN,
'on_extup': plexpy.CONFIG.TWITTER_ON_EXTUP,
'on_intup': plexpy.CONFIG.TWITTER_ON_INTUP,
- 'on_pmsupdate': plexpy.CONFIG.TWITTER_ON_PMSUPDATE
+ 'on_pmsupdate': plexpy.CONFIG.TWITTER_ON_PMSUPDATE,
+ 'on_concurrent': plexpy.CONFIG.TWITTER_ON_CONCURRENT,
+ 'on_newdevice': plexpy.CONFIG.TWITTER_ON_NEWDEVICE
},
{'name': 'IFTTT',
'id': AGENT_IDS['IFTTT'],
@@ -280,7 +304,9 @@ def available_notification_agents():
'on_intdown': plexpy.CONFIG.IFTTT_ON_INTDOWN,
'on_extup': plexpy.CONFIG.IFTTT_ON_EXTUP,
'on_intup': plexpy.CONFIG.IFTTT_ON_INTUP,
- 'on_pmsupdate': plexpy.CONFIG.IFTTT_ON_PMSUPDATE
+ 'on_pmsupdate': plexpy.CONFIG.IFTTT_ON_PMSUPDATE,
+ 'on_concurrent': plexpy.CONFIG.IFTTT_ON_CONCURRENT,
+ 'on_newdevice': plexpy.CONFIG.IFTTT_ON_NEWDEVICE
},
{'name': 'Telegram',
'id': AGENT_IDS['Telegram'],
@@ -298,7 +324,9 @@ def available_notification_agents():
'on_intdown': plexpy.CONFIG.TELEGRAM_ON_INTDOWN,
'on_extup': plexpy.CONFIG.TELEGRAM_ON_EXTUP,
'on_intup': plexpy.CONFIG.TELEGRAM_ON_INTUP,
- 'on_pmsupdate': plexpy.CONFIG.TELEGRAM_ON_PMSUPDATE
+ 'on_pmsupdate': plexpy.CONFIG.TELEGRAM_ON_PMSUPDATE,
+ 'on_concurrent': plexpy.CONFIG.TELEGRAM_ON_CONCURRENT,
+ 'on_newdevice': plexpy.CONFIG.TELEGRAM_ON_NEWDEVICE
},
{'name': 'Slack',
'id': AGENT_IDS['Slack'],
@@ -316,7 +344,9 @@ def available_notification_agents():
'on_intdown': plexpy.CONFIG.SLACK_ON_INTDOWN,
'on_extup': plexpy.CONFIG.SLACK_ON_EXTUP,
'on_intup': plexpy.CONFIG.SLACK_ON_INTUP,
- 'on_pmsupdate': plexpy.CONFIG.SLACK_ON_PMSUPDATE
+ 'on_pmsupdate': plexpy.CONFIG.SLACK_ON_PMSUPDATE,
+ 'on_concurrent': plexpy.CONFIG.SLACK_ON_CONCURRENT,
+ 'on_newdevice': plexpy.CONFIG.SLACK_ON_NEWDEVICE
},
{'name': 'Scripts',
'id': AGENT_IDS['Scripts'],
@@ -334,7 +364,9 @@ def available_notification_agents():
'on_extup': plexpy.CONFIG.SCRIPTS_ON_EXTUP,
'on_intdown': plexpy.CONFIG.SCRIPTS_ON_INTDOWN,
'on_intup': plexpy.CONFIG.SCRIPTS_ON_INTUP,
- 'on_pmsupdate': plexpy.CONFIG.SCRIPTS_ON_PMSUPDATE
+ 'on_pmsupdate': plexpy.CONFIG.SCRIPTS_ON_PMSUPDATE,
+ 'on_concurrent': plexpy.CONFIG.SCRIPTS_ON_CONCURRENT,
+ 'on_newdevice': plexpy.CONFIG.SCRIPTS_ON_NEWDEVICE
},
{'name': 'Facebook',
'id': AGENT_IDS['Facebook'],
@@ -352,7 +384,9 @@ def available_notification_agents():
'on_intdown': plexpy.CONFIG.FACEBOOK_ON_INTDOWN,
'on_extup': plexpy.CONFIG.FACEBOOK_ON_EXTUP,
'on_intup': plexpy.CONFIG.FACEBOOK_ON_INTUP,
- 'on_pmsupdate': plexpy.CONFIG.FACEBOOK_ON_PMSUPDATE
+ 'on_pmsupdate': plexpy.CONFIG.FACEBOOK_ON_PMSUPDATE,
+ 'on_concurrent': plexpy.CONFIG.FACEBOOK_ON_CONCURRENT,
+ 'on_newdevice': plexpy.CONFIG.FACEBOOK_ON_NEWDEVICE
},
{'name': 'Browser',
'id': AGENT_IDS['Browser'],
@@ -370,7 +404,9 @@ def available_notification_agents():
'on_intdown': plexpy.CONFIG.BROWSER_ON_INTDOWN,
'on_extup': plexpy.CONFIG.BROWSER_ON_EXTUP,
'on_intup': plexpy.CONFIG.BROWSER_ON_INTUP,
- 'on_pmsupdate': plexpy.CONFIG.BROWSER_ON_PMSUPDATE
+ 'on_pmsupdate': plexpy.CONFIG.BROWSER_ON_PMSUPDATE,
+ 'on_concurrent': plexpy.CONFIG.BROWSER_ON_CONCURRENT,
+ 'on_newdevice': plexpy.CONFIG.BROWSER_ON_NEWDEVICE
},
{'name': 'Join',
'id': AGENT_IDS['Join'],
@@ -388,7 +424,29 @@ def available_notification_agents():
'on_intdown': plexpy.CONFIG.JOIN_ON_INTDOWN,
'on_extup': plexpy.CONFIG.JOIN_ON_EXTUP,
'on_intup': plexpy.CONFIG.JOIN_ON_INTUP,
- 'on_pmsupdate': plexpy.CONFIG.JOIN_ON_PMSUPDATE
+ 'on_pmsupdate': plexpy.CONFIG.JOIN_ON_PMSUPDATE,
+ 'on_concurrent': plexpy.CONFIG.JOIN_ON_CONCURRENT,
+ 'on_newdevice': plexpy.CONFIG.JOIN_ON_NEWDEVICE
+ },
+ {'name': 'Hipchat',
+ 'id': AGENT_IDS['Hipchat'],
+ 'config_prefix': 'hipchat',
+ 'has_config': True,
+ 'state': checked(plexpy.CONFIG.HIPCHAT_ENABLED),
+ 'on_play': plexpy.CONFIG.HIPCHAT_ON_PLAY,
+ 'on_stop': plexpy.CONFIG.HIPCHAT_ON_STOP,
+ 'on_pause': plexpy.CONFIG.HIPCHAT_ON_PAUSE,
+ 'on_resume': plexpy.CONFIG.HIPCHAT_ON_RESUME,
+ 'on_buffer': plexpy.CONFIG.HIPCHAT_ON_BUFFER,
+ 'on_watched': plexpy.CONFIG.HIPCHAT_ON_WATCHED,
+ 'on_created': plexpy.CONFIG.HIPCHAT_ON_CREATED,
+ 'on_extdown': plexpy.CONFIG.HIPCHAT_ON_EXTDOWN,
+ 'on_intdown': plexpy.CONFIG.HIPCHAT_ON_INTDOWN,
+ 'on_extup': plexpy.CONFIG.HIPCHAT_ON_EXTUP,
+ 'on_intup': plexpy.CONFIG.HIPCHAT_ON_INTUP,
+ 'on_pmsupdate': plexpy.CONFIG.HIPCHAT_ON_PMSUPDATE,
+ 'on_concurrent': plexpy.CONFIG.HIPCHAT_ON_CONCURRENT,
+ 'on_newdevice': plexpy.CONFIG.HIPCHAT_ON_NEWDEVICE
}
]
@@ -411,7 +469,9 @@ def available_notification_agents():
'on_intdown': plexpy.CONFIG.OSX_NOTIFY_ON_INTDOWN,
'on_extup': plexpy.CONFIG.OSX_NOTIFY_ON_EXTUP,
'on_intup': plexpy.CONFIG.OSX_NOTIFY_ON_INTUP,
- 'on_pmsupdate': plexpy.CONFIG.OSX_NOTIFY_ON_PMSUPDATE
+ 'on_pmsupdate': plexpy.CONFIG.OSX_NOTIFY_ON_PMSUPDATE,
+ 'on_concurrent': plexpy.CONFIG.OSX_NOTIFY_ON_CONCURRENT,
+ 'on_newdevice': plexpy.CONFIG.OSX_NOTIFY_ON_NEWDEVICE
})
return agents
@@ -478,6 +538,9 @@ def get_notification_agent_config(agent_id):
elif agent_id == 18:
join = JOIN()
return join.return_config_options()
+ elif agent_id == 19:
+ hipchat = HIPCHAT()
+ return hipchat.return_config_options()
else:
return []
else:
@@ -545,11 +608,80 @@ def send_notification(agent_id, subject, body, notify_action, **kwargs):
elif agent_id == 18:
join = JOIN()
return join.notify(message=body, subject=subject)
+ elif agent_id == 19:
+ hipchat = HIPCHAT()
+ return hipchat.notify(message=body, subject=subject, **kwargs)
else:
logger.debug(u"PlexPy Notifiers :: Unknown agent id received.")
else:
logger.debug(u"PlexPy Notifiers :: Notification requested but no agent id received.")
+class PrettyMetadata(object):
+ def __init__(self, metadata):
+ self.metadata = metadata
+ self.media_type = metadata['media_type']
+
+ def get_poster_url(self):
+ self.poster_url = self.metadata.get('poster_url','')
+ if not self.poster_url:
+ if self.metadata['media_type'] in ['artist', 'track']:
+ self.poster_url = 'https://raw.githubusercontent.com/drzoidberg33/plexpy/master/data/interfaces/default/images/cover.png'
+ else:
+ self.poster_url = 'https://raw.githubusercontent.com/drzoidberg33/plexpy/master/data/interfaces/default/images/poster.png'
+ return self.poster_url
+
+ def get_poster_link(self):
+ self.poster_link = ''
+ if self.metadata.get('thetvdb_url',''):
+ self.poster_link = self.metadata.get('thetvdb_url', '')
+ elif self.metadata.get('themoviedb_url',''):
+ self.poster_link = self.metadata.get('themoviedb_url', '')
+ elif self.metadata.get('imdb_url',''):
+ self.poster_link = self.metadata.get('imdb_url', '')
+ elif self.metadata.get('lastfm_url',''):
+ self.poster_link = self.metadata.get('lastfm_url', '')
+ return self.poster_link
+
+ def get_caption(self):
+ self.caption = ''
+ if self.metadata.get('thetvdb_url',''):
+ self.caption = 'View on TheTVDB'
+ elif self.metadata.get('themoviedb_url',''):
+ self.caption = 'View on The Movie Database'
+ elif self.metadata.get('imdb_url',''):
+ self.caption = 'View on IMDB'
+ elif self.metadata.get('lastfm_url',''):
+ self.caption = 'View on Last.fm'
+ return self.caption
+
+ def get_title(self, divider = '-'):
+ self.title = None
+ if self.media_type == 'movie':
+ self.title = '%s (%s)' % (self.metadata['title'], self.metadata['year'])
+ elif self.media_type == 'show':
+ self.title = '%s (%s)' % (self.metadata['title'], self.metadata['year'])
+ elif self.media_type == 'artist':
+ self.title = self.metadata['title']
+ elif self.media_type == 'track':
+ self.title = '%s - %s' % (self.metadata['grandparent_title'], self.metadata['title'])
+ elif self.media_type == 'episode':
+ self.title = '%s - %s (S%s %s E%s)' % (self.metadata['grandparent_title'],
+ self.metadata['title'],
+ self.metadata['parent_media_index'],
+ divider,
+ self.metadata['media_index'])
+ return self.title.encode("utf-8")
+
+ def get_subtitle(self):
+ if self.media_type == 'track':
+ self.subtitle = self.metadata['parent_title']
+ else:
+ self.subtitle = self.metadata['summary']
+ return self.subtitle.encode("utf-8")
+
+ def get_plex_url(self):
+ self.plex_url = self.metadata['plex_url']
+ return self.plex_url
class GROWL(object):
"""
@@ -1264,7 +1396,7 @@ class TwitterNotifier(object):
poster_url = metadata.get('poster_url','')
if self.incl_subject:
- self._send_tweet(subject + ': ' + message, attachment=poster_url)
+ self._send_tweet(subject + '\r\n' + message, attachment=poster_url)
else:
self._send_tweet(message, attachment=poster_url)
@@ -1721,18 +1853,21 @@ class TELEGRAM(object):
data = {'chat_id': self.chat_id}
if self.incl_subject:
- text = event.encode('utf-8') + ': ' + message.encode('utf-8')
+ text = event.encode('utf-8') + '\r\n' + message.encode('utf-8')
else:
text = message.encode('utf-8')
if self.incl_poster and 'metadata' in kwargs:
+ poster_data = {'chat_id': self.chat_id,
+ 'disable_notification': True}
+
metadata = kwargs['metadata']
poster_url = metadata.get('poster_url','')
if poster_url:
files = {'photo': (poster_url, urllib.urlopen(poster_url).read())}
response = requests.post('https://api.telegram.org/bot%s/%s' % (self.bot_token, 'sendPhoto'),
- data=data,
+ data=poster_data,
files=files)
request_status = response.status_code
request_content = json.loads(response.text)
@@ -1840,7 +1975,7 @@ class SLACK(object):
return
if self.incl_subject:
- text = event.encode('utf-8') + ': ' + message.encode("utf-8")
+ text = event.encode('utf-8') + '\r\n' + message.encode("utf-8")
else:
text = message.encode("utf-8")
@@ -1854,81 +1989,42 @@ class SLACK(object):
data['icon_url'] = self.icon_emoji
if self.incl_poster and 'metadata' in kwargs:
- attachment = {}
- metadata = kwargs['metadata']
- poster_url = metadata.get('poster_url','')
- poster_link = ''
- caption = ''
+ # Grab formatted metadata
+ pretty_metadata = PrettyMetadata(kwargs['metadata'])
+ poster_url = pretty_metadata.get_poster_url()
+ plex_url = pretty_metadata.get_plex_url()
+ poster_link = pretty_metadata.get_poster_link()
+ caption = pretty_metadata.get_caption()
+ title = pretty_metadata.get_title()
+ subtitle = pretty_metadata.get_subtitle()
- # Use default posters if no poster_url
- if not poster_url:
- if metadata['media_type'] in ['artist', 'track']:
- poster_url = 'https://raw.githubusercontent.com/drzoidberg33/plexpy/master/data/interfaces/default/images/cover.png'
- else:
- poster_url = 'https://raw.githubusercontent.com/drzoidberg33/plexpy/master/data/interfaces/default/images/poster.png'
+ # Build Slack post attachment
+ attachment = {'fallback': 'Image for %s' % title,
+ 'title': title,
+ 'text': subtitle,
+ 'image_url': poster_url,
+ 'thumb_url': poster_url
+ }
- if metadata['media_type'] == 'movie':
- title = '%s (%s)' % (metadata['title'], metadata['year'])
- if metadata.get('imdb_url',''):
- poster_link = metadata.get('imdb_url', '')
- caption = 'View on IMDB'
- elif metadata.get('themoviedb_url',''):
- poster_link = metadata.get('themoviedb_url', '')
- caption = 'View on The Movie Database'
-
- elif metadata['media_type'] == 'show':
- title = '%s (%s)' % (metadata['title'], metadata['year'])
- if metadata.get('thetvdb_url',''):
- poster_link = metadata.get('thetvdb_url', '')
- caption = 'View on TheTVDB'
- elif metadata.get('themoviedb_url',''):
- poster_link = metadata.get('themoviedb_url', '')
- caption = 'View on The Movie Database'
-
- elif metadata['media_type'] == 'episode':
- title = '%s - %s (S%s - E%s)' % (metadata['grandparent_title'],
- metadata['title'],
- metadata['parent_media_index'],
- metadata['media_index'])
- if metadata.get('thetvdb_url',''):
- poster_link = metadata.get('thetvdb_url', '')
- caption = 'View on TheTVDB'
- elif metadata.get('themoviedb_url',''):
- poster_link = metadata.get('themoviedb_url', '')
- caption = 'View on The Movie Database'
-
- elif metadata['media_type'] == 'artist':
- title = metadata['title']
- if metadata.get('lastfm_url',''):
- poster_link = metadata.get('lastfm_url', '')
- caption = 'View on Last.fm'
-
- elif metadata['media_type'] == 'track':
- title = '%s - %s' % (metadata['grandparent_title'], metadata['title'])
- if metadata.get('lastfm_url',''):
- poster_link = metadata.get('lastfm_url', '')
- caption = 'View on Last.fm'
-
- # Build Facebook post attachment
- if self.incl_pmslink:
- caption = 'View on Plex Web'
- attachment['title_link'] = metadata['plex_url']
- attachment['text'] = caption
- elif poster_link:
+ fields = []
+ if poster_link:
attachment['title_link'] = poster_link
- attachment['text'] = caption
-
- attachment['fallback'] = 'Image for %s' % title
- attachment['title'] = title
- attachment['image_url'] = poster_url
+ fields.append({'value': '<%s|%s>' % (poster_link, caption),
+ 'short': True})
+ if self.incl_pmslink:
+ fields.append({'value': '<%s|%s>' % (plex_url, 'View on Plex Web'),
+ 'short': True})
+ if fields:
+ attachment['fields'] = fields
data['attachments'] = [attachment]
- url = urlparse(self.slack_hook).path
+ slackhost = urlparse(self.slack_hook).hostname
+ slackpath = urlparse(self.slack_hook).path
- http_handler = HTTPSConnection("hooks.slack.com")
+ http_handler = HTTPSConnection(slackhost)
http_handler.request("POST",
- url,
+ slackpath,
headers={'Content-type': "application/x-www-form-urlencoded"},
body=json.dumps(data))
@@ -2090,6 +2186,12 @@ class Scripts(object):
elif notify_action == 'pmsupdate':
script = plexpy.CONFIG.SCRIPTS_ON_PMSUPDATE_SCRIPT
+ elif notify_action == 'concurrent':
+ script = plexpy.CONFIG.SCRIPTS_ON_CONCURRENT_SCRIPT
+
+ elif notify_action == 'newdevice':
+ script = plexpy.CONFIG.SCRIPTS_ON_NEWDEVICE_SCRIPT
+
else:
# For manual scripts
script = kwargs.get('script', '')
@@ -2266,6 +2368,20 @@ class Scripts(object):
'description': 'Choose the script for Plex update available.',
'input_type': 'select',
'select_options': self.list_scripts()
+ },
+ {'label': 'User Concurrent Streams',
+ 'value': plexpy.CONFIG.SCRIPTS_ON_CONCURRENT_SCRIPT,
+ 'name': 'scripts_on_concurrent_script',
+ 'description': 'Choose the script for user concurrent streams.',
+ 'input_type': 'select',
+ 'select_options': self.list_scripts()
+ },
+ {'label': 'User New Device',
+ 'value': plexpy.CONFIG.SCRIPTS_ON_NEWDEVICE_SCRIPT,
+ 'name': 'scripts_on_newdevice_script',
+ 'description': 'Choose the script for user new device.',
+ 'input_type': 'select',
+ 'select_options': self.list_scripts()
}
]
@@ -2291,71 +2407,19 @@ class FacebookNotifier(object):
attachment = {}
if self.incl_poster and 'metadata' in kwargs:
- metadata = kwargs['metadata']
- poster_url = metadata.get('poster_url','')
- poster_link = ''
- caption = ''
-
- # Use default posters if no poster_url
- if not poster_url:
- if metadata['media_type'] in ['artist', 'track']:
- poster_url = 'https://raw.githubusercontent.com/drzoidberg33/plexpy/master/data/interfaces/default/images/cover.png'
- else:
- poster_url = 'https://raw.githubusercontent.com/drzoidberg33/plexpy/master/data/interfaces/default/images/poster.png'
-
- if metadata['media_type'] == 'movie':
- title = '%s (%s)' % (metadata['title'], metadata['year'])
- subtitle = metadata['summary']
- if metadata.get('imdb_url',''):
- poster_link = metadata.get('imdb_url', '')
- caption = 'View on IMDB'
- elif metadata.get('themoviedb_url',''):
- poster_link = metadata.get('themoviedb_url', '')
- caption = 'View on The Movie Database'
-
- elif metadata['media_type'] == 'show':
- title = '%s (%s)' % (metadata['title'], metadata['year'])
- subtitle = metadata['summary']
- if metadata.get('thetvdb_url',''):
- poster_link = metadata.get('thetvdb_url', '')
- caption = 'View on TheTVDB'
- elif metadata.get('themoviedb_url',''):
- poster_link = metadata.get('themoviedb_url', '')
- caption = 'View on The Movie Database'
-
- elif metadata['media_type'] == 'episode':
- title = '%s - %s (S%s %s E%s)' % (metadata['grandparent_title'],
- metadata['title'],
- metadata['parent_media_index'],
- '\xc2\xb7'.decode('utf8'),
- metadata['media_index'])
- subtitle = metadata['summary']
- if metadata.get('thetvdb_url',''):
- poster_link = metadata.get('thetvdb_url', '')
- caption = 'View on TheTVDB'
- elif metadata.get('themoviedb_url',''):
- poster_link = metadata.get('themoviedb_url', '')
- caption = 'View on The Movie Database'
-
- elif metadata['media_type'] == 'artist':
- title = metadata['title']
- subtitle = metadata['summary']
- if metadata.get('lastfm_url',''):
- poster_link = metadata.get('lastfm_url', '')
- caption = 'View on Last.fm'
-
- elif metadata['media_type'] == 'track':
- title = '%s - %s' % (metadata['grandparent_title'], metadata['title'])
- subtitle = metadata['parent_title']
- if metadata.get('lastfm_url',''):
- poster_link = metadata.get('lastfm_url', '')
- caption = 'View on Last.fm'
+ # Grab formatted metadata
+ pretty_metadata = PrettyMetadata(kwargs['metadata'])
+ poster_url = pretty_metadata.get_poster_url()
+ plex_url = pretty_metadata.get_plex_url()
+ poster_link = pretty_metadata.get_poster_link()
+ caption = pretty_metadata.get_caption()
+ title = pretty_metadata.get_title('\xc2\xb7'.decode('utf8'))
+ subtitle = pretty_metadata.get_subtitle()
# Build Facebook post attachment
if self.incl_pmslink:
- caption = 'View on Plex Web'
- attachment['link'] = metadata['plex_url']
- attachment['caption'] = caption
+ attachment['link'] = plex_url
+ attachment['caption'] = 'View on Plex Web'
elif poster_link:
attachment['link'] = poster_link
attachment['caption'] = caption
@@ -2367,7 +2431,7 @@ class FacebookNotifier(object):
attachment['description'] = subtitle
if self.incl_subject:
- self._post_facebook(subject + ': ' + message, attachment=attachment)
+ self._post_facebook(subject + '\r\n' + message, attachment=attachment)
else:
self._post_facebook(message, attachment=attachment)
@@ -2425,13 +2489,14 @@ class FacebookNotifier(object):
config_option = [{'label': 'Instructions',
'description': 'Step 1: Visit \
Facebook Developers to add a new app using basic setup.
\
- Step 2: Go to Settings > Advanced and fill in \
- Valid OAuth redirect URIs with your PlexPy URL (e.g. http://localhost:8181).
\
- Step 3: Go to App Review and toggle public to Yes.
\
- Step 4: Fill in the PlexPy URL below with the exact same URL from Step 3.
\
- Step 5: Fill in the App ID and App Secret below.
\
- Step 6: Click the Request Authorization button below.
\
- Step 7: Fill in your Group ID below.',
+ Step 2: Click Add Product on the left, then Get Started \
+ for Facebook Login.
\
+ Step 3: Fill in Valid OAuth redirect URIs with your PlexPy URL (e.g. http://localhost:8181).
\
+ Step 4: Click App Review on the left and toggle "make public" to Yes.
\
+ Step 5: Fill in the PlexPy URL below with the exact same URL from Step 3.
\
+ Step 6: Fill in the App ID and App Secret below.
\
+ Step 7: Click the Request Authorization button below.
\
+ Step 8: Fill in your Group ID below.',
'input_type': 'help'
},
{'label': 'PlexPy URL',
@@ -2554,6 +2619,7 @@ class JOIN(object):
def __init__(self):
self.apikey = plexpy.CONFIG.JOIN_APIKEY
self.deviceid = plexpy.CONFIG.JOIN_DEVICEID
+ self.incl_subject = plexpy.CONFIG.JOIN_INCL_SUBJECT
def conf(self, options):
return cherrypy.config['config'].get('PUSHBULLET', options)
@@ -2566,9 +2632,11 @@ class JOIN(object):
data = {'apikey': self.apikey,
deviceid_key: self.deviceid,
- 'title': subject.encode("utf-8"),
'text': message.encode("utf-8")}
+ if self.incl_subject:
+ data['title'] = subject.encode("utf-8")
+
response = requests.post('https://joinjoaomgcd.appspot.com/_ah/api/messaging/v1/sendPush',
params=data)
request_status = response.status_code
@@ -2649,7 +2717,160 @@ class JOIN(object):
{'label': 'Your Devices IDs',
'description': devices,
'input_type': 'help'
+ },
+ {'label': 'Include Subject Line',
+ 'value': self.incl_subject,
+ 'name': 'join_incl_subject',
+ 'description': 'Include the subject line with the notifications.',
+ 'input_type': 'checkbox'
}
]
return config_option
+
+class HIPCHAT(object):
+
+ def __init__(self):
+ self.apiurl = plexpy.CONFIG.HIPCHAT_URL
+ self.color = plexpy.CONFIG.HIPCHAT_COLOR
+ self.emoticon = plexpy.CONFIG.HIPCHAT_EMOTICON
+ self.incl_pmslink = plexpy.CONFIG.HIPCHAT_INCL_PMSLINK
+ self.incl_poster = plexpy.CONFIG.HIPCHAT_INCL_POSTER
+ self.incl_subject = plexpy.CONFIG.HIPCHAT_INCL_SUBJECT
+
+ def notify(self, message, subject, **kwargs):
+ if not message or not subject:
+ return
+
+ data = {'notify': 'false'}
+
+ text = message.encode('utf-8')
+
+ if self.incl_subject:
+ data['from'] = subject.encode('utf-8')
+
+ if self.color:
+ data['color'] = self.color
+
+ if self.incl_poster and 'metadata' in kwargs:
+ pretty_metadata = PrettyMetadata(kwargs['metadata'])
+ poster_url = pretty_metadata.get_poster_url()
+ poster_link = pretty_metadata.get_poster_link()
+ caption = pretty_metadata.get_caption()
+ title = pretty_metadata.get_title()
+ subtitle = pretty_metadata.get_subtitle()
+ plex_url = pretty_metadata.get_plex_url()
+
+ card = {'title': title,
+ 'format': 'medium',
+ 'style': 'application',
+ 'id': uuid.uuid4().hex,
+ 'activity': {'html': text,
+ 'icon': {'url': poster_url}},
+ 'description': {'format': 'text',
+ 'value': subtitle},
+ 'thumbnail': {'url': poster_url}
+ }
+
+ attributes = []
+ if poster_link:
+ card['url'] = poster_link
+ attributes.append({'value': {'label': caption,
+ 'url': poster_link}})
+ if self.incl_pmslink:
+ attributes.append({'value': {'label': 'View on Plex Web',
+ 'url': plex_url}})
+ if attributes:
+ card['attributes'] = attributes
+
+ data['message'] = text
+ data['card'] = card
+
+ else:
+ if self.emoticon:
+ text = self.emoticon + ' ' + text
+ data['message'] = text
+ data['message_format'] = 'text'
+
+ hiphost = urlparse(self.apiurl).hostname
+ hipfullq = urlparse(self.apiurl).path + '?' + urlparse(self.apiurl).query
+
+ http_handler = HTTPSConnection(hiphost)
+ http_handler.request("POST",
+ hipfullq,
+ headers={'Content-type': "application/json"},
+ body=json.dumps(data))
+ response = http_handler.getresponse()
+ request_status = response.status
+
+ if request_status == 200 or request_status == 204:
+ logger.info(u"PlexPy Notifiers :: Hipchat notification sent.")
+ return True
+ elif request_status >= 400 and request_status < 500:
+ logger.warn(u"PlexPy Notifiers :: Hipchat notification failed: [%s] %s" % (request_status, response.reason))
+ return False
+ else:
+ logger.warn(u"PlexPy Notifiers :: Hipchat notification failed.")
+ return False
+
+ def test(self, apiurl, color, hipchat_emoticon, hipchat_incl_subject):
+
+ self.enabled = True
+ self.apiurl = apiurl
+ self.color = color
+ self.emoticon = hipchat_emoticon
+ self.incl_subject = hipchat_incl_subject
+
+ return self.notify('PlexPy', 'Test Message')
+
+ def return_config_options(self):
+ config_option = [{'label': 'Hipchat Custom Integrations Full URL',
+ 'value': self.apiurl,
+ 'name': 'hipchat_url',
+ 'description': 'Your Hipchat BYO integration URL. You can get a key from'
+ ' here.',
+ 'input_type': 'text'
+ },
+ {'label': 'Hipchat Color',
+ 'value': self.color,
+ 'name': 'hipchat_color',
+ 'description': 'Background color for the message.',
+ 'input_type': 'select',
+ 'select_options': {'': '',
+ 'gray': 'gray',
+ 'green': 'green',
+ 'purple': 'purple',
+ 'random': 'random',
+ 'red': 'red',
+ 'yellow': 'yellow'
+ }
+ },
+ {'label': 'Hipchat Emoticon',
+ 'value': self.emoticon,
+ 'name': 'hipchat_emoticon',
+ 'description': 'Include an emoticon tag at the beginning of text notifications (e.g. (taco)). Leave blank for none.'
+ ' Use a stock emoticon or create a custom emoticon'
+ ' here.',
+ 'input_type': 'text'
+ },
+ {'label': 'Include Poster',
+ 'value': self.incl_poster,
+ 'name': 'hipchat_incl_poster',
+ 'description': 'Include a poster in the notifications.
This will change the notification type to HTML and emoticons will no longer work.',
+ 'input_type': 'checkbox'
+ },
+ {'label': 'Include Link to Plex Web',
+ 'value': self.incl_pmslink,
+ 'name': 'hipchat_incl_pmslink',
+ 'description': 'Include a link to the media in Plex Web with the notifications.',
+ 'input_type': 'checkbox'
+ },
+ {'label': 'Include Subject Line',
+ 'value': self.incl_subject,
+ 'name': 'hipchat_incl_subject',
+ 'description': 'Includes the subject with the notifications.',
+ 'input_type': 'checkbox'
+ }
+ ]
+
+ return config_option
\ No newline at end of file
diff --git a/plexpy/plextv.py b/plexpy/plextv.py
index ee46aa95..f39c6b10 100644
--- a/plexpy/plextv.py
+++ b/plexpy/plextv.py
@@ -17,6 +17,7 @@
# along with PlexPy. If not, see .
import base64
+import json
from xml.dom import minidom
import plexpy
@@ -95,25 +96,31 @@ def get_real_pms_url():
fallback_url = 'http://' + plexpy.CONFIG.PMS_IP + ':' + str(plexpy.CONFIG.PMS_PORT)
- if plexpy.CONFIG.PMS_SSL:
- result = PlexTV().get_server_urls(include_https=True)
- else:
- result = PlexTV().get_server_urls(include_https=False)
+ plex_tv = PlexTV()
+ result = plex_tv.get_server_urls(include_https=plexpy.CONFIG.PMS_SSL)
+ plexpass = plex_tv.get_plexpass_status()
+
+ connections = []
+ if result:
+ plexpy.CONFIG.__setattr__('PMS_VERSION', result['version'])
+ plexpy.CONFIG.__setattr__('PMS_PLATFORM', result['platform'])
+ plexpy.CONFIG.__setattr__('PMS_PLEXPASS', plexpass)
+ connections = result['connections']
# Only need to retrieve PMS_URL if using SSL
if plexpy.CONFIG.PMS_SSL:
- if result:
+ if connections:
if plexpy.CONFIG.PMS_IS_REMOTE:
# Get all remote connections
- connections = [c for c in result if c['local'] == '0' and 'plex.direct' in c['uri']]
+ conns = [c for c in connections if c['local'] == '0' and 'plex.direct' in c['uri']]
else:
# Get all local connections
- connections = [c for c in result if c['local'] == '1' and 'plex.direct' in c['uri']]
+ conns = [c for c in connections if c['local'] == '1' and 'plex.direct' in c['uri']]
- if connections:
+ if conns:
# Get connection with matching address, otherwise return first connection
- conn = next((c for c in connections if c['address'] == plexpy.CONFIG.PMS_IP
- and c['port'] == str(plexpy.CONFIG.PMS_PORT)), connections[0])
+ conn = next((c for c in conns if c['address'] == plexpy.CONFIG.PMS_IP
+ and c['port'] == str(plexpy.CONFIG.PMS_PORT)), conns[0])
plexpy.CONFIG.__setattr__('PMS_URL', conn['uri'])
plexpy.CONFIG.write()
logger.info(u"PlexPy PlexTV :: Server URL retrieved.")
@@ -273,6 +280,18 @@ class PlexTV(object):
return request
+ def get_plextv_downloads(self, plexpass=False, output_format=''):
+ if plexpass:
+ uri = '/api/downloads/1.json?channel=plexpass'
+ else:
+ uri = '/api/downloads/1.json'
+ request = self.request_handler.make_request(uri=uri,
+ proto=self.protocol,
+ request_type='GET',
+ output_format=output_format)
+
+ return request
+
def get_full_users_list(self):
friends_list = self.get_plextv_friends()
own_account = self.get_plextv_user_details()
@@ -454,7 +473,7 @@ class PlexTV(object):
server_id = plexpy.CONFIG.PMS_IDENTIFIER
else:
logger.error(u"PlexPy PlexTV :: Unable to retrieve server identity.")
- return []
+ return {}
plextv_resources = self.get_plextv_resources(include_https=include_https)
@@ -462,22 +481,26 @@ class PlexTV(object):
xml_parse = minidom.parseString(plextv_resources)
except Exception as e:
logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_server_urls: %s" % e)
- return []
+ return {}
except:
logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_server_urls.")
- return []
+ return {}
try:
xml_head = xml_parse.getElementsByTagName('Device')
except Exception as e:
logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_server_urls: %s." % e)
- return []
+ return {}
# Function to get all connections for a device
def get_connections(device):
conn = []
connections = device.getElementsByTagName('Connection')
+ server = {"platform": helpers.get_xml_attr(device, 'platform'),
+ "version": helpers.get_xml_attr(device, 'productVersion')
+ }
+
for c in connections:
server_details = {"protocol": helpers.get_xml_attr(c, 'protocol'),
"address": helpers.get_xml_attr(c, 'address'),
@@ -487,18 +510,19 @@ class PlexTV(object):
}
conn.append(server_details)
- return conn
+ server['connections'] = conn
+ return server
- server_urls = []
+ server = {}
# Try to match the device
for a in xml_head:
if helpers.get_xml_attr(a, 'clientIdentifier') == server_id:
- server_urls = get_connections(a)
+ server = get_connections(a)
break
# Else no device match found
- if not server_urls:
+ if not server:
# Try to match the PMS_IP and PMS_PORT
for a in xml_head:
if helpers.get_xml_attr(a, 'provides') == 'server':
@@ -511,16 +535,16 @@ class PlexTV(object):
plexpy.CONFIG.PMS_IDENTIFIER = helpers.get_xml_attr(a, 'clientIdentifier')
plexpy.CONFIG.write()
- logger.info(u"PlexPy PlexTV :: PMS identifier changed from %s to %s." % \
- (server_id, plexpy.CONFIG.PMS_IDENTIFIER))
+ logger.info(u"PlexPy PlexTV :: PMS identifier changed from %s to %s."
+ % (server_id, plexpy.CONFIG.PMS_IDENTIFIER))
- server_urls = get_connections(a)
+ server = get_connections(a)
break
- if server_urls:
+ if server.get('connections'):
break
- return server_urls
+ return server
def get_server_times(self):
servers = self.get_plextv_server_list(output_format='xml')
@@ -588,4 +612,81 @@ class PlexTV(object):
}
clean_servers.append(server)
- return clean_servers
\ No newline at end of file
+ return clean_servers
+
+ def get_plex_downloads(self):
+ logger.debug(u"PlexPy PlexTV :: Plex update channel is %s." % plexpy.CONFIG.PMS_UPDATE_CHANNEL)
+ plex_downloads = self.get_plextv_downloads(plexpass=(plexpy.CONFIG.PMS_UPDATE_CHANNEL == 'plexpass'))
+
+ try:
+ available_downloads = json.loads(plex_downloads)
+ except Exception as e:
+ logger.warn(u"PlexPy PlexTV :: Unable to load JSON for get_plex_updates.")
+ return {}
+
+ # Get the updates for the platform
+ platform_downloads = available_downloads.get('computer').get(plexpy.CONFIG.PMS_PLATFORM) or \
+ available_downloads.get('nas').get(plexpy.CONFIG.PMS_PLATFORM)
+
+ if not platform_downloads:
+ logger.error(u"PlexPy PlexTV :: Unable to retrieve Plex updates: Could not match server platform: %s."
+ % plexpy.CONFIG.PMS_PLATFORM)
+ return {}
+
+ v_old = plexpy.CONFIG.PMS_VERSION.split('-')[0].split('.')
+ v_new = platform_downloads.get('version', '').split('-')[0].split('.')
+
+ if len(v_old) < 4:
+ logger.error(u"PlexPy PlexTV :: Unable to retrieve Plex updates: Invalid current server version: %s."
+ % plexpy.CONFIG.PMS_VERSION)
+ return {}
+ if len(v_new) < 4:
+ logger.error(u"PlexPy PlexTV :: Unable to retrieve Plex updates: Invalid new server version: %s."
+ % platform_downloads.get('version'))
+ return {}
+
+ # Compare versions
+ if v_new[0] > v_old[0] or \
+ v_new[0] == v_old[0] and v_new[1] > v_old[1] or \
+ v_new[0] == v_old[0] and v_new[1] == v_old[1] and v_new[2] > v_old[2] or \
+ v_new[0] == v_old[0] and v_new[1] == v_old[1] and v_new[2] == v_old[2] and v_new[3] > v_old[3]:
+ update_available = True
+ else:
+ update_available = False
+
+ # Get proper download
+ releases = platform_downloads.get('releases', [{}])
+ release = next((r for r in releases if r['build'] == plexpy.CONFIG.PMS_UPDATE_DISTRO_BUILD), releases[0])
+
+ download_info = {'update_available': update_available,
+ 'platform': platform_downloads.get('name'),
+ 'release_date': platform_downloads.get('release_date'),
+ 'version': platform_downloads.get('version'),
+ 'requirements': platform_downloads.get('requirements'),
+ 'extra_info': platform_downloads.get('extra_info'),
+ 'changelog_added': platform_downloads.get('items_added'),
+ 'changelog_fixed': platform_downloads.get('items_fixed'),
+ 'label': release.get('label'),
+ 'distro': release.get('distro'),
+ 'distro_build': release.get('build'),
+ 'download_url': release.get('url'),
+ }
+
+ return download_info
+
+ def get_plexpass_status(self):
+ account_data = self.get_plextv_user_details(output_format='xml')
+
+ try:
+ subscription = account_data.getElementsByTagName('subscription')
+ except Exception as e:
+ logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_plexpass_status: %s." % e)
+ return False
+
+ if subscription and helpers.get_xml_attr(subscription[0], 'active') == '1':
+ return True
+ else:
+ logger.debug(u"PlexPy PlexTV :: Plex Pass subscription not found.")
+ plexpy.CONFIG.__setattr__('PMS_PLEXPASS', 0)
+ plexpy.CONFIG.write()
+ return False
\ No newline at end of file
diff --git a/plexpy/users.py b/plexpy/users.py
index ef530e55..949c9f44 100644
--- a/plexpy/users.py
+++ b/plexpy/users.py
@@ -181,8 +181,8 @@ class Users(object):
'session_history_media_info.transcode_decision',
'session_history.user',
'session_history.user_id as custom_user_id',
- '(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE \
- users.friendly_name END) AS friendly_name'
+ '(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \
+ THEN users.username ELSE users.friendly_name END) AS friendly_name'
]
try:
@@ -717,8 +717,8 @@ class Users(object):
'user_login.host',
'user_login.user_agent',
'user_login.timestamp',
- '(CASE WHEN users.friendly_name IS NULL THEN user_login.user ELSE users.friendly_name END) \
- AS friendly_name'
+ '(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \
+ THEN users.username ELSE users.friendly_name END) AS friendly_name'
]
try:
diff --git a/plexpy/version.py b/plexpy/version.py
index cfdffc6b..2da51443 100644
--- a/plexpy/version.py
+++ b/plexpy/version.py
@@ -1,2 +1,2 @@
PLEXPY_VERSION = "master"
-PLEXPY_RELEASE_VERSION = "1.4.6"
+PLEXPY_RELEASE_VERSION = "1.4.7"
diff --git a/plexpy/webserve.py b/plexpy/webserve.py
index f4ff17c2..363eedbb 100644
--- a/plexpy/webserve.py
+++ b/plexpy/webserve.py
@@ -269,7 +269,7 @@ class WebInterface(object):
else:
if s['video_decision'] == 'transcode' or s['audio_decision'] == 'transcode':
data['transcode'] += 1
- elif s['video_decision'] == 'direct copy' or s['audio_decision'] == 'copy play':
+ elif s['video_decision'] == 'copy' or s['audio_decision'] == 'copy':
data['direct_stream'] += 1
else:
data['direct_play'] += 1
@@ -2491,7 +2491,7 @@ class WebInterface(object):
@cherrypy.expose
@requireAuth(member_of("admin"))
- def settings(self):
+ def settings(self, **kwargs):
interface_dir = os.path.join(plexpy.PROG_DIR, 'data/interfaces/')
interface_list = [name for name in os.listdir(interface_dir) if
os.path.isdir(os.path.join(interface_dir, name))]
@@ -2569,6 +2569,8 @@ class WebInterface(object):
"notify_recently_added": checked(plexpy.CONFIG.NOTIFY_RECENTLY_ADDED),
"notify_recently_added_grandparent": checked(plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_GRANDPARENT),
"notify_recently_added_delay": plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_DELAY,
+ "notify_concurrent_by_ip": plexpy.CONFIG.NOTIFY_CONCURRENT_BY_IP,
+ "notify_concurrent_threshold": plexpy.CONFIG.NOTIFY_CONCURRENT_THRESHOLD,
"notify_watched_percent": plexpy.CONFIG.NOTIFY_WATCHED_PERCENT,
"notify_on_start_subject_text": plexpy.CONFIG.NOTIFY_ON_START_SUBJECT_TEXT,
"notify_on_start_body_text": plexpy.CONFIG.NOTIFY_ON_START_BODY_TEXT,
@@ -2594,6 +2596,10 @@ class WebInterface(object):
"notify_on_intup_body_text": plexpy.CONFIG.NOTIFY_ON_INTUP_BODY_TEXT,
"notify_on_pmsupdate_subject_text": plexpy.CONFIG.NOTIFY_ON_PMSUPDATE_SUBJECT_TEXT,
"notify_on_pmsupdate_body_text": plexpy.CONFIG.NOTIFY_ON_PMSUPDATE_BODY_TEXT,
+ "notify_on_concurrent_subject_text": plexpy.CONFIG.NOTIFY_ON_CONCURRENT_SUBJECT_TEXT,
+ "notify_on_concurrent_body_text": plexpy.CONFIG.NOTIFY_ON_CONCURRENT_BODY_TEXT,
+ "notify_on_newdevice_subject_text": plexpy.CONFIG.NOTIFY_ON_NEWDEVICE_SUBJECT_TEXT,
+ "notify_on_newdevice_body_text": plexpy.CONFIG.NOTIFY_ON_NEWDEVICE_BODY_TEXT,
"notify_scripts_args_text": plexpy.CONFIG.NOTIFY_SCRIPTS_ARGS_TEXT,
"home_sections": json.dumps(plexpy.CONFIG.HOME_SECTIONS),
"home_stats_length": plexpy.CONFIG.HOME_STATS_LENGTH,
@@ -2606,10 +2612,11 @@ class WebInterface(object):
"group_history_tables": checked(plexpy.CONFIG.GROUP_HISTORY_TABLES),
"git_token": plexpy.CONFIG.GIT_TOKEN,
"imgur_client_id": plexpy.CONFIG.IMGUR_CLIENT_ID,
- "cache_images": checked(plexpy.CONFIG.CACHE_IMAGES)
+ "cache_images": checked(plexpy.CONFIG.CACHE_IMAGES),
+ "pms_version": plexpy.CONFIG.PMS_VERSION
}
- return serve_template(templatename="settings.html", title="Settings", config=config)
+ return serve_template(templatename="settings.html", title="Settings", config=config, kwargs=kwargs)
@cherrypy.expose
@cherrypy.tools.json_out()
@@ -2764,11 +2771,27 @@ class WebInterface(object):
else:
return {'result': 'error', 'message': 'Config backup failed.'}
+ @cherrypy.expose
+ @requireAuth(member_of("admin"))
+ def get_configuration_table(self, **kwargs):
+ return serve_template(templatename="configuration_table.html")
+
@cherrypy.expose
@requireAuth(member_of("admin"))
def get_scheduler_table(self, **kwargs):
return serve_template(templatename="scheduler_table.html")
+ @cherrypy.expose
+ @cherrypy.tools.json_out()
+ @requireAuth(member_of("admin"))
+ def get_server_update_params(self):
+ plex_tv = plextv.PlexTV()
+ plexpass = plex_tv.get_plexpass_status()
+ return {'plexpass': plexpass,
+ 'pms_platform': plexpy.CONFIG.PMS_PLATFORM,
+ 'pms_update_channel': plexpy.CONFIG.PMS_UPDATE_CHANNEL,
+ 'pms_update_distro_build': plexpy.CONFIG.PMS_UPDATE_DISTRO_BUILD}
+
@cherrypy.expose
@cherrypy.tools.json_out()
@requireAuth(member_of("admin"))
@@ -2782,6 +2805,34 @@ class WebInterface(object):
else:
return {'result': 'error', 'message': 'Database backup failed.'}
+ @cherrypy.expose
+ @cherrypy.tools.json_out()
+ @requireAuth(member_of("admin"))
+ @addtoapi()
+ def install_geoip_db(self):
+ """ Downloads and installs the GeoLite2 database """
+
+ result = helpers.install_geoip_db()
+
+ if result:
+ return {'result': 'success', 'message': 'GeoLite2 database installed successful.'}
+ else:
+ return {'result': 'error', 'message': 'GeoLite2 database install failed.'}
+
+ @cherrypy.expose
+ @cherrypy.tools.json_out()
+ @requireAuth(member_of("admin"))
+ @addtoapi()
+ def uninstall_geoip_db(self):
+ """ Uninstalls the GeoLite2 database """
+
+ result = helpers.uninstall_geoip_db()
+
+ if result:
+ return {'result': 'success', 'message': 'GeoLite2 database uninstalled successfully.'}
+ else:
+ return {'result': 'error', 'message': 'GeoLite2 database uninstall failed.'}
+
@cherrypy.expose
@requireAuth(member_of("admin"))
def get_notification_agent_config(self, agent_id, **kwargs):
@@ -2833,6 +2884,7 @@ class WebInterface(object):
10 # Email
16 # Facebook
0 # Growl
+ 19 # Hipchat
12 # IFTTT
18 # Join
4 # NotifyMyAndroid
@@ -3217,7 +3269,9 @@ class WebInterface(object):
logger.error('No image input received.')
return
- refresh = True if refresh == 'true' else False
+ if refresh:
+ mo = member_of('admin')
+ refresh = True if mo() else False
if rating_key and not img:
img = '/library/metadata/%s/thumb/1337' % rating_key
@@ -4202,7 +4256,7 @@ class WebInterface(object):
'Can you hurry up. My horse is getting tired.',
'What killed the dinosaurs? The Ice Age!',
'That\'s for sleeping with my wife!',
- 'Remember when I said I’d kill you last... I lied!',
+ 'Remember when I said I\'d kill you last... I lied!',
'You want to be a farmer? Here\'s a couple of acres',
'Now, this is the plan. Get your ass to Mars.',
'I just had a terrible thought... What if this is a dream?'
@@ -4229,3 +4283,39 @@ class WebInterface(object):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_update_staus()
return result
+
+ @cherrypy.expose
+ @cherrypy.tools.json_out()
+ @requireAuth()
+ @addtoapi()
+ def get_geoip_lookup(self, ip_address='', **kwargs):
+ """ Get the geolocation info for an IP address. The GeoLite2 database must be installed.
+
+ ```
+ Required parameters:
+ ip_address
+
+ Optional parameters:
+ None
+
+ Returns:
+ json:
+ {"continent": "North America",
+ "country": "United States",
+ "region": "California",
+ "city": "Mountain View",
+ "postal_code": "94035",
+ "timezone": "America/Los_Angeles",
+ "latitude": 37.386,
+ "longitude": -122.0838,
+ "accuracy": 1000
+ }
+ json:
+ {"error": "The address 127.0.0.1 is not in the database."
+ }
+ ```
+ """
+ geo_info = helpers.geoip_lookup(ip_address)
+ if isinstance(geo_info, basestring):
+ return {'error': geo_info}
+ return geo_info