mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-08-14 18:47:19 -07:00
Add ipwhois library + dependencies
This commit is contained in:
parent
43bd49ce5b
commit
469d22a833
104 changed files with 21349 additions and 2417 deletions
29
lib/ipwhois/__init__.py
Normal file
29
lib/ipwhois/__init__.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
# Copyright (c) 2013, 2014, 2015, 2016 Philip Hane
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
__version__ = '0.13.0'
|
||||
|
||||
from .exceptions import *
|
||||
from .net import Net
|
||||
from .ipwhois import IPWhois
|
252
lib/ipwhois/data/iso_3166-1.csv
Normal file
252
lib/ipwhois/data/iso_3166-1.csv
Normal file
|
@ -0,0 +1,252 @@
|
|||
AD,Andorra,
|
||||
AE,United Arab Emirates,
|
||||
AF,Afghanistan,
|
||||
AG,Antigua and Barbuda,
|
||||
AI,Anguilla,
|
||||
AL,Albania,
|
||||
AM,Armenia,
|
||||
AN,Netherlands Antilles,
|
||||
AO,Angola,
|
||||
AP,"Asia/Pacific Region",
|
||||
AQ,Antarctica,
|
||||
AR,Argentina,
|
||||
AS,American Samoa,
|
||||
AT,Austria,
|
||||
AU,Australia,
|
||||
AW,Aruba,
|
||||
AX,Aland Islands,
|
||||
AZ,Azerbaijan,
|
||||
BA,Bosnia and Herzegovina,
|
||||
BB,Barbados,
|
||||
BD,Bangladesh,
|
||||
BE,Belgium,
|
||||
BF,Burkina Faso,
|
||||
BG,Bulgaria,
|
||||
BH,Bahrain,
|
||||
BI,Burundi,
|
||||
BJ,Benin,
|
||||
BL,Saint Bartelemey,
|
||||
BM,Bermuda,
|
||||
BN,Brunei Darussalam,
|
||||
BO,Bolivia,
|
||||
BQ,"Bonaire, Saint Eustatius and Saba",
|
||||
BR,Brazil,
|
||||
BS,Bahamas,
|
||||
BT,Bhutan,
|
||||
BV,Bouvet Island,
|
||||
BW,Botswana,
|
||||
BY,Belarus,
|
||||
BZ,Belize,
|
||||
CA,Canada,
|
||||
CC,Cocos (Keeling) Islands,
|
||||
CD,"Congo, The Democratic Republic of the",
|
||||
CF,Central African Republic,
|
||||
CG,Congo,
|
||||
CH,Switzerland,
|
||||
CI,Cote d'Ivoire,
|
||||
CK,Cook Islands,
|
||||
CL,Chile,
|
||||
CM,Cameroon,
|
||||
CN,China,
|
||||
CO,Colombia,
|
||||
CR,Costa Rica,
|
||||
CU,Cuba,
|
||||
CV,Cape Verde,
|
||||
CW,Curacao,
|
||||
CX,Christmas Island,
|
||||
CY,Cyprus,
|
||||
CZ,Czech Republic,
|
||||
DE,Germany,
|
||||
DJ,Djibouti,
|
||||
DK,Denmark,
|
||||
DM,Dominica,
|
||||
DO,Dominican Republic,
|
||||
DZ,Algeria,
|
||||
EC,Ecuador,
|
||||
EE,Estonia,
|
||||
EG,Egypt,
|
||||
EH,Western Sahara,
|
||||
ER,Eritrea,
|
||||
ES,Spain,
|
||||
ET,Ethiopia,
|
||||
EU,Europe,
|
||||
FI,Finland,
|
||||
FJ,Fiji,
|
||||
FK,Falkland Islands (Malvinas),
|
||||
FM,"Micronesia, Federated States of",
|
||||
FO,Faroe Islands,
|
||||
FR,France,
|
||||
GA,Gabon,
|
||||
GB,United Kingdom,
|
||||
GD,Grenada,
|
||||
GE,Georgia,
|
||||
GF,French Guiana,
|
||||
GG,Guernsey,
|
||||
GH,Ghana,
|
||||
GI,Gibraltar,
|
||||
GL,Greenland,
|
||||
GM,Gambia,
|
||||
GN,Guinea,
|
||||
GP,Guadeloupe,
|
||||
GQ,Equatorial Guinea,
|
||||
GR,Greece,
|
||||
GS,South Georgia and the South Sandwich Islands,
|
||||
GT,Guatemala,
|
||||
GU,Guam,
|
||||
GW,Guinea-Bissau,
|
||||
GY,Guyana,
|
||||
HK,Hong Kong,
|
||||
HM,Heard Island and McDonald Islands,
|
||||
HN,Honduras,
|
||||
HR,Croatia,
|
||||
HT,Haiti,
|
||||
HU,Hungary,
|
||||
ID,Indonesia,
|
||||
IE,Ireland,
|
||||
IL,Israel,
|
||||
IM,Isle of Man,
|
||||
IN,India,
|
||||
IO,British Indian Ocean Territory,
|
||||
IQ,Iraq,
|
||||
IR,"Iran, Islamic Republic of",
|
||||
IS,Iceland,
|
||||
IT,Italy,
|
||||
JE,Jersey,
|
||||
JM,Jamaica,
|
||||
JO,Jordan,
|
||||
JP,Japan,
|
||||
KE,Kenya,
|
||||
KG,Kyrgyzstan,
|
||||
KH,Cambodia,
|
||||
KI,Kiribati,
|
||||
KM,Comoros,
|
||||
KN,Saint Kitts and Nevis,
|
||||
KP,"Korea, Democratic People's Republic of",
|
||||
KR,"Korea, Republic of",
|
||||
KW,Kuwait,
|
||||
KY,Cayman Islands,
|
||||
KZ,Kazakhstan,
|
||||
LA,Lao People's Democratic Republic,
|
||||
LB,Lebanon,
|
||||
LC,Saint Lucia,
|
||||
LI,Liechtenstein,
|
||||
LK,Sri Lanka,
|
||||
LR,Liberia,
|
||||
LS,Lesotho,
|
||||
LT,Lithuania,
|
||||
LU,Luxembourg,
|
||||
LV,Latvia,
|
||||
LY,Libyan Arab Jamahiriya,
|
||||
MA,Morocco,
|
||||
MC,Monaco,
|
||||
MD,"Moldova, Republic of",
|
||||
ME,Montenegro,
|
||||
MF,Saint Martin,
|
||||
MG,Madagascar,
|
||||
MH,Marshall Islands,
|
||||
MK,Macedonia,
|
||||
ML,Mali,
|
||||
MM,Myanmar,
|
||||
MN,Mongolia,
|
||||
MO,Macao,
|
||||
MP,Northern Mariana Islands,
|
||||
MQ,Martinique,
|
||||
MR,Mauritania,
|
||||
MS,Montserrat,
|
||||
MT,Malta,
|
||||
MU,Mauritius,
|
||||
MV,Maldives,
|
||||
MW,Malawi,
|
||||
MX,Mexico,
|
||||
MY,Malaysia,
|
||||
MZ,Mozambique,
|
||||
NA,Namibia,
|
||||
NC,New Caledonia,
|
||||
NE,Niger,
|
||||
NF,Norfolk Island,
|
||||
NG,Nigeria,
|
||||
NI,Nicaragua,
|
||||
NL,Netherlands,
|
||||
NO,Norway,
|
||||
NP,Nepal,
|
||||
NR,Nauru,
|
||||
NU,Niue,
|
||||
NZ,New Zealand,
|
||||
OM,Oman,
|
||||
PA,Panama,
|
||||
PE,Peru,
|
||||
PF,French Polynesia,
|
||||
PG,Papua New Guinea,
|
||||
PH,Philippines,
|
||||
PK,Pakistan,
|
||||
PL,Poland,
|
||||
PM,Saint Pierre and Miquelon,
|
||||
PN,Pitcairn,
|
||||
PR,Puerto Rico,
|
||||
PS,Palestinian Territory,
|
||||
PT,Portugal,
|
||||
PW,Palau,
|
||||
PY,Paraguay,
|
||||
QA,Qatar,
|
||||
RE,Reunion,
|
||||
RO,Romania,
|
||||
RS,Serbia,
|
||||
RU,Russian Federation,
|
||||
RW,Rwanda,
|
||||
SA,Saudi Arabia,
|
||||
SB,Solomon Islands,
|
||||
SC,Seychelles,
|
||||
SD,Sudan,
|
||||
SE,Sweden,
|
||||
SG,Singapore,
|
||||
SH,Saint Helena,
|
||||
SI,Slovenia,
|
||||
SJ,Svalbard and Jan Mayen,
|
||||
SK,Slovakia,
|
||||
SL,Sierra Leone,
|
||||
SM,San Marino,
|
||||
SN,Senegal,
|
||||
SO,Somalia,
|
||||
SR,Suriname,
|
||||
SS,South Sudan,
|
||||
ST,Sao Tome and Principe,
|
||||
SV,El Salvador,
|
||||
SX,Sint Maarten,
|
||||
SY,Syrian Arab Republic,
|
||||
SZ,Swaziland,
|
||||
TC,Turks and Caicos Islands,
|
||||
TD,Chad,
|
||||
TF,French Southern Territories,
|
||||
TG,Togo,
|
||||
TH,Thailand,
|
||||
TJ,Tajikistan,
|
||||
TK,Tokelau,
|
||||
TL,Timor-Leste,
|
||||
TM,Turkmenistan,
|
||||
TN,Tunisia,
|
||||
TO,Tonga,
|
||||
TR,Turkey,
|
||||
TT,Trinidad and Tobago,
|
||||
TV,Tuvalu,
|
||||
TW,Taiwan,
|
||||
TZ,"Tanzania, United Republic of",
|
||||
UA,Ukraine,
|
||||
UG,Uganda,
|
||||
UM,United States Minor Outlying Islands,
|
||||
US,United States,
|
||||
UY,Uruguay,
|
||||
UZ,Uzbekistan,
|
||||
VA,Holy See (Vatican City State),
|
||||
VC,Saint Vincent and the Grenadines,
|
||||
VE,Venezuela,
|
||||
VG,"Virgin Islands, British",
|
||||
VI,"Virgin Islands, U.S.",
|
||||
VN,Vietnam,
|
||||
VU,Vanuatu,
|
||||
WF,Wallis and Futuna,
|
||||
WS,Samoa,
|
||||
YE,Yemen,
|
||||
YT,Mayotte,
|
||||
ZA,South Africa,
|
||||
ZM,Zambia,
|
||||
ZW,Zimbabwe,
|
|
1003
lib/ipwhois/data/iso_3166-1_list_en.xml
Normal file
1003
lib/ipwhois/data/iso_3166-1_list_en.xml
Normal file
File diff suppressed because it is too large
Load diff
102
lib/ipwhois/exceptions.py
Normal file
102
lib/ipwhois/exceptions.py
Normal file
|
@ -0,0 +1,102 @@
|
|||
# Copyright (c) 2013, 2014, 2015, 2016 Philip Hane
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
class NetError(Exception):
|
||||
"""
|
||||
An Exception for when a parameter provided is not an instance of
|
||||
ipwhois.net.Net.
|
||||
"""
|
||||
|
||||
|
||||
class IPDefinedError(Exception):
|
||||
"""
|
||||
An Exception for when the IP is defined (does not need to be resolved).
|
||||
"""
|
||||
|
||||
|
||||
class ASNLookupError(Exception):
|
||||
"""
|
||||
An Exception for when the ASN lookup failed.
|
||||
"""
|
||||
|
||||
|
||||
class ASNRegistryError(Exception):
|
||||
"""
|
||||
An Exception for when the ASN registry does not match one of the five
|
||||
expected values (arin, ripencc, apnic, lacnic, afrinic).
|
||||
"""
|
||||
|
||||
|
||||
class HostLookupError(Exception):
|
||||
"""
|
||||
An Exception for when the host lookup failed.
|
||||
"""
|
||||
|
||||
|
||||
class BlacklistError(Exception):
|
||||
"""
|
||||
An Exception for when the server is in a blacklist.
|
||||
"""
|
||||
|
||||
|
||||
class WhoisLookupError(Exception):
|
||||
"""
|
||||
An Exception for when the whois lookup failed.
|
||||
"""
|
||||
|
||||
|
||||
class HTTPLookupError(Exception):
|
||||
"""
|
||||
An Exception for when the RDAP lookup failed.
|
||||
"""
|
||||
|
||||
|
||||
class HTTPRateLimitError(Exception):
|
||||
"""
|
||||
An Exception for when HTTP queries exceed the NIC's request limit and have
|
||||
exhausted all retries.
|
||||
"""
|
||||
|
||||
|
||||
class InvalidEntityContactObject(Exception):
|
||||
"""
|
||||
An Exception for when JSON output is not an RDAP entity contact information
|
||||
object:
|
||||
https://tools.ietf.org/html/rfc7483#section-5.4
|
||||
"""
|
||||
|
||||
|
||||
class InvalidNetworkObject(Exception):
|
||||
"""
|
||||
An Exception for when JSON output is not an RDAP network object:
|
||||
https://tools.ietf.org/html/rfc7483#section-5.4
|
||||
"""
|
||||
|
||||
|
||||
class InvalidEntityObject(Exception):
|
||||
"""
|
||||
An Exception for when JSON output is not an RDAP entity object:
|
||||
https://tools.ietf.org/html/rfc7483#section-5.1
|
||||
"""
|
355
lib/ipwhois/hr.py
Normal file
355
lib/ipwhois/hr.py
Normal file
|
@ -0,0 +1,355 @@
|
|||
# Copyright (c) 2013, 2014, 2015, 2016 Philip Hane
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
# TODO: Add '_links' for RFC/other references
|
||||
|
||||
HR_ASN = {
|
||||
'asn': {
|
||||
'_short': 'ASN',
|
||||
'_name': 'Autonomous System Number',
|
||||
'_description': 'Globally unique identifier used for routing '
|
||||
'information exchange with Autonomous Systems.'
|
||||
},
|
||||
'asn_cidr': {
|
||||
'_short': 'ASN CIDR Block',
|
||||
'_name': 'ASN Classless Inter-Domain Routing Block',
|
||||
'_description': 'Network routing block assigned to an ASN.'
|
||||
},
|
||||
'asn_country_code': {
|
||||
'_short': 'ASN Country Code',
|
||||
'_name': 'ASN Assigned Country Code',
|
||||
'_description': 'ASN assigned country code in ISO 3166-1 format.'
|
||||
},
|
||||
'asn_date': {
|
||||
'_short': 'ASN Date',
|
||||
'_name': 'ASN Allocation Date',
|
||||
'_description': 'ASN allocation date in ISO 8601 format.'
|
||||
},
|
||||
'asn_registry': {
|
||||
'_short': 'ASN Registry',
|
||||
'_name': 'ASN Assigned Registry',
|
||||
'_description': 'ASN assigned regional internet registry.'
|
||||
}
|
||||
}
|
||||
|
||||
HR_RDAP_COMMON = {
|
||||
'entities': {
|
||||
'_short': 'Entities',
|
||||
'_name': 'RIR Object Entities',
|
||||
'_description': 'List of object names referenced by an RIR object.'
|
||||
},
|
||||
'events': {
|
||||
'_short': 'Events',
|
||||
'_name': 'Events',
|
||||
'_description': 'Events for an RIR object.',
|
||||
'action': {
|
||||
'_short': 'Action',
|
||||
'_name': 'Event Action (Reason)',
|
||||
'_description': 'The reason for an event.'
|
||||
},
|
||||
'timestamp': {
|
||||
'_short': 'Timestamp',
|
||||
'_name': 'Event Timestamp',
|
||||
'_description': 'The date an event occured in ISO 8601 '
|
||||
'format.'
|
||||
},
|
||||
'actor': {
|
||||
'_short': 'Actor',
|
||||
'_name': 'Event Actor',
|
||||
'_description': 'The identifier for an event initiator.'
|
||||
}
|
||||
},
|
||||
'handle': {
|
||||
'_short': 'Handle',
|
||||
'_name': 'RIR Handle',
|
||||
'_description': 'Unique identifier for a registered object.'
|
||||
},
|
||||
'links': {
|
||||
'_short': 'Links',
|
||||
'_name': 'Links',
|
||||
'_description': 'HTTP/HTTPS links provided for an RIR object.'
|
||||
},
|
||||
'notices': {
|
||||
'_short': 'Notices',
|
||||
'_name': 'Notices',
|
||||
'_description': 'Notices for an RIR object.',
|
||||
'description': {
|
||||
'_short': 'Description',
|
||||
'_name': 'Notice Description',
|
||||
'_description': 'The description/body of a notice.'
|
||||
},
|
||||
'title': {
|
||||
'_short': 'Title',
|
||||
'_name': 'Notice Title',
|
||||
'_description': 'The title/header for a notice.'
|
||||
},
|
||||
'links': {
|
||||
'_short': 'Links',
|
||||
'_name': 'Notice Links',
|
||||
'_description': 'HTTP/HTTPS links provided for a notice.'
|
||||
}
|
||||
},
|
||||
'remarks': {
|
||||
'_short': 'Remarks',
|
||||
'_name': 'Remarks',
|
||||
'_description': 'Remarks for an RIR object.',
|
||||
'description': {
|
||||
'_short': 'Description',
|
||||
'_name': 'Remark Description',
|
||||
'_description': 'The description/body of a remark.'
|
||||
},
|
||||
'title': {
|
||||
'_short': 'Title',
|
||||
'_name': 'Remark Title',
|
||||
'_description': 'The title/header for a remark.'
|
||||
},
|
||||
'links': {
|
||||
'_short': 'Links',
|
||||
'_name': 'Remark Links',
|
||||
'_description': 'HTTP/HTTPS links provided for a remark.'
|
||||
}
|
||||
},
|
||||
'status': {
|
||||
'_short': 'Status',
|
||||
'_name': 'Object Status',
|
||||
'_description': 'List indicating the state of a registered object.'
|
||||
}
|
||||
}
|
||||
|
||||
HR_RDAP = {
|
||||
'network': {
|
||||
'_short': 'Network',
|
||||
'_name': 'RIR Network',
|
||||
'_description': 'The assigned network for an IP address.',
|
||||
'cidr': {
|
||||
'_short': 'CIDR Block',
|
||||
'_name': 'Classless Inter-Domain Routing Block',
|
||||
'_description': 'Network routing block an IP address belongs to.'
|
||||
},
|
||||
'country': {
|
||||
'_short': 'Country Code',
|
||||
'_name': 'Country Code',
|
||||
'_description': 'Country code registered with the RIR in '
|
||||
'ISO 3166-1 format.'
|
||||
},
|
||||
'end_address': {
|
||||
'_short': 'End Address',
|
||||
'_name': 'Ending IP Address',
|
||||
'_description': 'The last IP address in a network block.'
|
||||
},
|
||||
'events': HR_RDAP_COMMON['events'],
|
||||
'handle': HR_RDAP_COMMON['handle'],
|
||||
'ip_version': {
|
||||
'_short': 'IP Version',
|
||||
'_name': 'IP Protocol Version',
|
||||
'_description': 'The IP protocol version (v4 or v6) of an IP '
|
||||
'address.'
|
||||
},
|
||||
'links': HR_RDAP_COMMON['links'],
|
||||
'name': {
|
||||
'_short': 'Name',
|
||||
'_name': 'RIR Network Name',
|
||||
'_description': 'The identifier assigned to the network '
|
||||
'registration for an IP address.'
|
||||
},
|
||||
'notices': HR_RDAP_COMMON['notices'],
|
||||
'parent_handle': {
|
||||
'_short': 'Parent Handle',
|
||||
'_name': 'RIR Parent Handle',
|
||||
'_description': 'Unique identifier for the parent network of '
|
||||
'a registered network.'
|
||||
},
|
||||
'remarks': HR_RDAP_COMMON['remarks'],
|
||||
'start_address': {
|
||||
'_short': 'Start Address',
|
||||
'_name': 'Starting IP Address',
|
||||
'_description': 'The first IP address in a network block.'
|
||||
},
|
||||
'status': HR_RDAP_COMMON['status'],
|
||||
'type': {
|
||||
'_short': 'Type',
|
||||
'_name': 'RIR Network Type',
|
||||
'_description': 'The RIR classification of a registered network.'
|
||||
}
|
||||
},
|
||||
'entities': HR_RDAP_COMMON['entities'],
|
||||
'objects': {
|
||||
'_short': 'Objects',
|
||||
'_name': 'RIR Objects',
|
||||
'_description': 'The objects (entities) referenced by an RIR network.',
|
||||
'contact': {
|
||||
'_short': 'Contact',
|
||||
'_name': 'Contact Information',
|
||||
'_description': 'Contact information registered with an RIR '
|
||||
'object.',
|
||||
'address': {
|
||||
'_short': 'Address',
|
||||
'_name': 'Postal Address',
|
||||
'_description': 'The contact postal address.'
|
||||
},
|
||||
'email': {
|
||||
'_short': 'Email',
|
||||
'_name': 'Email Address',
|
||||
'_description': 'The contact email address.'
|
||||
},
|
||||
'kind': {
|
||||
'_short': 'Kind',
|
||||
'_name': 'Kind',
|
||||
'_description': 'The contact information kind (individual, '
|
||||
'group, org, etc).'
|
||||
},
|
||||
'name': {
|
||||
'_short': 'Name',
|
||||
'_name': 'Name',
|
||||
'_description': 'The contact name.'
|
||||
},
|
||||
'phone': {
|
||||
'_short': 'Phone',
|
||||
'_name': 'Phone Number',
|
||||
'_description': 'The contact phone number.'
|
||||
},
|
||||
'role': {
|
||||
'_short': 'Role',
|
||||
'_name': 'Role',
|
||||
'_description': 'The contact\'s role.'
|
||||
},
|
||||
'title': {
|
||||
'_short': 'Title',
|
||||
'_name': 'Title',
|
||||
'_description': 'The contact\'s position or job title.'
|
||||
}
|
||||
},
|
||||
'entities': HR_RDAP_COMMON['entities'],
|
||||
'events': HR_RDAP_COMMON['events'],
|
||||
'events_actor': {
|
||||
'_short': 'Events Misc',
|
||||
'_name': 'Events w/o Actor',
|
||||
'_description': 'An event for an RIR object with no event actor.',
|
||||
'action': {
|
||||
'_short': 'Action',
|
||||
'_name': 'Event Action (Reason)',
|
||||
'_description': 'The reason for an event.'
|
||||
},
|
||||
'timestamp': {
|
||||
'_short': 'Timestamp',
|
||||
'_name': 'Event Timestamp',
|
||||
'_description': 'The date an event occured in ISO 8601 '
|
||||
'format.'
|
||||
}
|
||||
},
|
||||
'handle': HR_RDAP_COMMON['handle'],
|
||||
'links': HR_RDAP_COMMON['links'],
|
||||
'notices': HR_RDAP_COMMON['notices'],
|
||||
'remarks': HR_RDAP_COMMON['remarks'],
|
||||
'roles': {
|
||||
'_short': 'Roles',
|
||||
'_name': 'Roles',
|
||||
'_description': 'List of roles assigned to a registered object.'
|
||||
},
|
||||
'status': HR_RDAP_COMMON['status'],
|
||||
}
|
||||
}
|
||||
|
||||
HR_WHOIS = {
|
||||
'nets': {
|
||||
'_short': 'Network',
|
||||
'_name': 'RIR Network',
|
||||
'_description': 'The assigned network for an IP address. May be a '
|
||||
'parent or child network.',
|
||||
'address': {
|
||||
'_short': 'Address',
|
||||
'_name': 'Postal Address',
|
||||
'_description': 'The contact postal address.'
|
||||
},
|
||||
'cidr': {
|
||||
'_short': 'CIDR Blocks',
|
||||
'_name': 'Classless Inter-Domain Routing Blocks',
|
||||
'_description': 'Network routing blocks an IP address belongs to.'
|
||||
},
|
||||
'city': {
|
||||
'_short': 'City',
|
||||
'_name': 'City',
|
||||
'_description': 'The city registered with a whois network.'
|
||||
},
|
||||
'country': {
|
||||
'_short': 'Country Code',
|
||||
'_name': 'Country Code',
|
||||
'_description': 'Country code registered for the network in '
|
||||
'ISO 3166-1 format.'
|
||||
},
|
||||
'created': {
|
||||
'_short': 'Created',
|
||||
'_name': 'Created Timestamp',
|
||||
'_description': 'The date the network was created in ISO 8601 '
|
||||
'format.'
|
||||
},
|
||||
'description': {
|
||||
'_short': 'Description',
|
||||
'_name': 'Description',
|
||||
'_description': 'The description for the network.'
|
||||
},
|
||||
'emails': {
|
||||
'_short': 'Emails',
|
||||
'_name': 'Email Addresses',
|
||||
'_description': 'The contact email addresses.'
|
||||
},
|
||||
'handle': {
|
||||
'_short': 'Handle',
|
||||
'_name': 'RIR Network Handle',
|
||||
'_description': 'Unique identifier for a registered network.'
|
||||
},
|
||||
'name': {
|
||||
'_short': 'Name',
|
||||
'_name': 'RIR Network Name',
|
||||
'_description': 'The identifier assigned to the network '
|
||||
'registration for an IP address.'
|
||||
},
|
||||
'postal_code': {
|
||||
'_short': 'Postal',
|
||||
'_name': 'Postal Code',
|
||||
'_description': 'The postal code registered with a whois network.'
|
||||
},
|
||||
'range': {
|
||||
'_short': 'Ranges',
|
||||
'_name': 'CIDR Block Ranges',
|
||||
'_description': 'Network routing blocks an IP address belongs to.'
|
||||
},
|
||||
'state': {
|
||||
'_short': 'State',
|
||||
'_name': 'State',
|
||||
'_description': 'The state registered with a whois network.'
|
||||
},
|
||||
'updated': {
|
||||
'_short': 'Updated',
|
||||
'_name': 'Updated Timestamp',
|
||||
'_description': 'The date the network was updated in ISO 8601 '
|
||||
'format.'
|
||||
}
|
||||
},
|
||||
'referral': {
|
||||
'_short': 'Referral',
|
||||
'_name': 'Referral Whois',
|
||||
'_description': 'The referral whois data if referenced and enabled.',
|
||||
}
|
||||
}
|
244
lib/ipwhois/ipwhois.py
Normal file
244
lib/ipwhois/ipwhois.py
Normal file
|
@ -0,0 +1,244 @@
|
|||
# Copyright (c) 2013, 2014, 2015, 2016 Philip Hane
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from . import Net
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IPWhois:
|
||||
"""
|
||||
The wrapper class for performing whois/RDAP lookups and parsing for
|
||||
IPv4 and IPv6 addresses.
|
||||
|
||||
Args:
|
||||
address: An IPv4 or IPv6 address as a string, integer, IPv4Address, or
|
||||
IPv6Address.
|
||||
timeout: The default timeout for socket connections in seconds.
|
||||
proxy_opener: The urllib.request.OpenerDirector request for proxy
|
||||
support or None.
|
||||
allow_permutations: allow net.Net() to use additional methods if DNS
|
||||
lookups to Cymru fail.
|
||||
"""
|
||||
|
||||
def __init__(self, address, timeout=5, proxy_opener=None,
|
||||
allow_permutations=True):
|
||||
|
||||
self.net = Net(
|
||||
address=address, timeout=timeout, proxy_opener=proxy_opener,
|
||||
allow_permutations=allow_permutations
|
||||
)
|
||||
|
||||
self.address = self.net.address
|
||||
self.timeout = self.net.timeout
|
||||
self.address_str = self.net.address_str
|
||||
self.version = self.net.version
|
||||
self.reversed = self.net.reversed
|
||||
self.dns_zone = self.net.dns_zone
|
||||
|
||||
def __repr__(self):
|
||||
|
||||
return 'IPWhois({0}, {1}, {2})'.format(
|
||||
self.address_str, str(self.timeout), repr(self.net.opener)
|
||||
)
|
||||
|
||||
def lookup(self, *args, **kwargs):
|
||||
"""
|
||||
Temporary wrapper for legacy whois lookups (moved to
|
||||
IPWhois.lookup_whois()). This will be removed in a future
|
||||
release (TBD).
|
||||
"""
|
||||
|
||||
from warnings import warn
|
||||
warn("IPWhois.lookup() has been deprecated and will be removed. "
|
||||
"You should now use IPWhois.lookup_whois() for legacy whois "
|
||||
"lookups.")
|
||||
return self.lookup_whois(*args, **kwargs)
|
||||
|
||||
def lookup_whois(self, inc_raw=False, retry_count=3, get_referral=False,
|
||||
extra_blacklist=None, ignore_referral_errors=False,
|
||||
field_list=None, asn_alts=None, extra_org_map=None):
|
||||
"""
|
||||
The function for retrieving and parsing whois information for an IP
|
||||
address via port 43 (WHOIS).
|
||||
|
||||
Args:
|
||||
inc_raw: Boolean for whether to include the raw whois results in
|
||||
the returned dictionary.
|
||||
retry_count: The number of times to retry in case socket errors,
|
||||
timeouts, connection resets, etc. are encountered.
|
||||
get_referral: Boolean for whether to retrieve referral whois
|
||||
information, if available.
|
||||
extra_blacklist: A list of blacklisted whois servers in addition to
|
||||
the global BLACKLIST.
|
||||
ignore_referral_errors: Boolean for whether to ignore and continue
|
||||
when an exception is encountered on referral whois lookups.
|
||||
field_list: If provided, a list of fields to parse:
|
||||
['name', 'handle', 'description', 'country', 'state', 'city',
|
||||
'address', 'postal_code', 'emails', 'created', 'updated']
|
||||
asn_alts: Array of additional lookup types to attempt if the
|
||||
ASN dns lookup fails. Allow permutations must be enabled.
|
||||
Defaults to all ['whois', 'http'].
|
||||
extra_org_map: Dictionary mapping org handles to RIRs. This is for
|
||||
limited cases where ARIN REST (ASN fallback HTTP lookup) does
|
||||
not show an RIR as the org handle e.g., DNIC (which is now the
|
||||
built in ORG_MAP) e.g., {'DNIC': 'arin'}. Valid RIR values are
|
||||
(note the case-sensitive - this is meant to match the REST
|
||||
result): 'ARIN', 'RIPE', 'apnic', 'lacnic', 'afrinic'
|
||||
|
||||
Returns:
|
||||
Dictionary:
|
||||
|
||||
:query: The IP address (String)
|
||||
:asn: The Autonomous System Number (String)
|
||||
:asn_date: The ASN Allocation date (String)
|
||||
:asn_registry: The assigned ASN registry (String)
|
||||
:asn_cidr: The assigned ASN CIDR (String)
|
||||
:asn_country_code: The assigned ASN country code (String)
|
||||
:nets: Dictionaries containing network information which consists
|
||||
of the fields listed in the ipwhois.whois.RIR_WHOIS dictionary.
|
||||
(List)
|
||||
:raw: Raw whois results if the inc_raw parameter is True. (String)
|
||||
:referral: Dictionary of referral whois information if get_referral
|
||||
is True and the server isn't blacklisted. Consists of fields
|
||||
listed in the ipwhois.whois.RWHOIS dictionary.
|
||||
:raw_referral: Raw referral whois results if the inc_raw parameter
|
||||
is True. (String)
|
||||
"""
|
||||
|
||||
from .whois import Whois
|
||||
|
||||
# Create the return dictionary.
|
||||
results = {}
|
||||
|
||||
# Retrieve the ASN information.
|
||||
log.debug('ASN lookup for {0}'.format(self.address_str))
|
||||
asn_data, response = self.net.lookup_asn(
|
||||
retry_count=retry_count, asn_alts=asn_alts,
|
||||
extra_org_map=extra_org_map
|
||||
)
|
||||
|
||||
# Add the ASN information to the return dictionary.
|
||||
results.update(asn_data)
|
||||
|
||||
# Retrieve the whois data and parse.
|
||||
whois = Whois(self.net)
|
||||
log.debug('WHOIS lookup for {0}'.format(self.address_str))
|
||||
whois_data = whois.lookup(
|
||||
inc_raw=inc_raw, retry_count=retry_count, response=response,
|
||||
get_referral=get_referral, extra_blacklist=extra_blacklist,
|
||||
ignore_referral_errors=ignore_referral_errors, asn_data=asn_data,
|
||||
field_list=field_list
|
||||
)
|
||||
|
||||
# Add the RDAP information to the return dictionary.
|
||||
results.update(whois_data)
|
||||
|
||||
return results
|
||||
|
||||
def lookup_rdap(self, inc_raw=False, retry_count=3, depth=0,
|
||||
excluded_entities=None, bootstrap=False,
|
||||
rate_limit_timeout=120, asn_alts=None, extra_org_map=None):
|
||||
"""
|
||||
The function for retrieving and parsing whois information for an IP
|
||||
address via HTTP (RDAP).
|
||||
|
||||
**This is now the recommended method, as RDAP contains much better
|
||||
information to parse.**
|
||||
|
||||
Args:
|
||||
inc_raw: Boolean for whether to include the raw whois results in
|
||||
the returned dictionary.
|
||||
retry_count: The number of times to retry in case socket errors,
|
||||
timeouts, connection resets, etc. are encountered.
|
||||
depth: How many levels deep to run queries when additional
|
||||
referenced objects are found.
|
||||
excluded_entities: A list of entity handles to not perform lookups.
|
||||
bootstrap: If True, performs lookups via ARIN bootstrap rather
|
||||
than lookups based on ASN data. ASN lookups are not performed
|
||||
and no output for any of the asn* fields is provided.
|
||||
rate_limit_timeout: The number of seconds to wait before retrying
|
||||
when a rate limit notice is returned via rdap+json.
|
||||
asn_alts: Array of additional lookup types to attempt if the
|
||||
ASN dns lookup fails. Allow permutations must be enabled.
|
||||
Defaults to all ['whois', 'http'].
|
||||
extra_org_map: Dictionary mapping org handles to RIRs. This is for
|
||||
limited cases where ARIN REST (ASN fallback HTTP lookup) does
|
||||
not show an RIR as the org handle e.g., DNIC (which is now the
|
||||
built in ORG_MAP) e.g., {'DNIC': 'arin'}. Valid RIR values are
|
||||
(note the case-sensitive - this is meant to match the REST
|
||||
result): 'ARIN', 'RIPE', 'apnic', 'lacnic', 'afrinic'
|
||||
|
||||
Returns:
|
||||
Dictionary:
|
||||
|
||||
:query: The IP address (String)
|
||||
:asn: The Autonomous System Number (String)
|
||||
:asn_date: The ASN Allocation date (String)
|
||||
:asn_registry: The assigned ASN registry (String)
|
||||
:asn_cidr: The assigned ASN CIDR (String)
|
||||
:asn_country_code: The assigned ASN country code (String)
|
||||
:entities: List of entity handles referred by the top level query.
|
||||
:network: Dictionary containing network information which consists
|
||||
of the fields listed in the ipwhois.rdap._RDAPNetwork dict.
|
||||
:objects: Dictionary of (entity handle: entity dict) which consists
|
||||
of the fields listed in the ipwhois.rdap._RDAPEntity dict.
|
||||
:raw: (Dictionary) - Whois results in json format if the inc_raw
|
||||
parameter is True.
|
||||
"""
|
||||
|
||||
from .rdap import RDAP
|
||||
|
||||
# Create the return dictionary.
|
||||
results = {}
|
||||
|
||||
asn_data = None
|
||||
response = None
|
||||
if not bootstrap:
|
||||
|
||||
# Retrieve the ASN information.
|
||||
log.debug('ASN lookup for {0}'.format(self.address_str))
|
||||
asn_data, asn_response = self.net.lookup_asn(
|
||||
retry_count=retry_count, asn_alts=asn_alts,
|
||||
extra_org_map=extra_org_map
|
||||
)
|
||||
|
||||
# Add the ASN information to the return dictionary.
|
||||
results.update(asn_data)
|
||||
|
||||
# Retrieve the RDAP data and parse.
|
||||
rdap = RDAP(self.net)
|
||||
log.debug('RDAP lookup for {0}'.format(self.address_str))
|
||||
rdap_data = rdap.lookup(
|
||||
inc_raw=inc_raw, retry_count=retry_count, asn_data=asn_data,
|
||||
depth=depth, excluded_entities=excluded_entities,
|
||||
response=response, bootstrap=bootstrap,
|
||||
rate_limit_timeout=rate_limit_timeout
|
||||
)
|
||||
|
||||
# Add the RDAP information to the return dictionary.
|
||||
results.update(rdap_data)
|
||||
|
||||
return results
|
958
lib/ipwhois/net.py
Normal file
958
lib/ipwhois/net.py
Normal file
|
@ -0,0 +1,958 @@
|
|||
# Copyright (c) 2013, 2014, 2015, 2016 Philip Hane
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import sys
|
||||
import socket
|
||||
import dns.resolver
|
||||
import json
|
||||
import logging
|
||||
from time import sleep
|
||||
|
||||
# Import the dnspython3 rdtypes to fix the dynamic import problem when frozen.
|
||||
import dns.rdtypes.ANY.TXT # @UnusedImport
|
||||
|
||||
from .exceptions import (IPDefinedError, ASNRegistryError, ASNLookupError,
|
||||
BlacklistError, WhoisLookupError, HTTPLookupError,
|
||||
HostLookupError, HTTPRateLimitError)
|
||||
from .whois import RIR_WHOIS
|
||||
from .utils import ipv4_is_defined, ipv6_is_defined
|
||||
|
||||
if sys.version_info >= (3, 3): # pragma: no cover
|
||||
from ipaddress import (ip_address,
|
||||
IPv4Address,
|
||||
IPv6Address,
|
||||
ip_network,
|
||||
summarize_address_range,
|
||||
collapse_addresses)
|
||||
else: # pragma: no cover
|
||||
from ipaddr import (IPAddress as ip_address,
|
||||
IPv4Address,
|
||||
IPv6Address,
|
||||
IPNetwork as ip_network,
|
||||
summarize_address_range,
|
||||
collapse_address_list as collapse_addresses)
|
||||
|
||||
try: # pragma: no cover
|
||||
from urllib.request import (OpenerDirector,
|
||||
ProxyHandler,
|
||||
build_opener,
|
||||
Request,
|
||||
URLError)
|
||||
from urllib.parse import urlencode
|
||||
except ImportError: # pragma: no cover
|
||||
from urllib2 import (OpenerDirector,
|
||||
ProxyHandler,
|
||||
build_opener,
|
||||
Request,
|
||||
URLError)
|
||||
from urllib import urlencode
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# POSSIBLY UPDATE TO USE RDAP
|
||||
ARIN = 'http://whois.arin.net/rest/nets;q={0}?showDetails=true&showARIN=true'
|
||||
|
||||
# National Internet Registry
|
||||
NIR = {
|
||||
'jpnic': {
|
||||
'url': ('http://whois.nic.ad.jp/cgi-bin/whois_gw?lang=%2Fe&key={0}'
|
||||
'&submit=query'),
|
||||
'request_type': 'GET',
|
||||
'request_headers': {'Accept': 'text/html'}
|
||||
},
|
||||
'krnic': {
|
||||
'url': 'http://whois.kisa.or.kr/eng/whois.jsc',
|
||||
'request_type': 'POST',
|
||||
'request_headers': {'Accept': 'text/html'},
|
||||
'form_data_ip_field': 'query'
|
||||
}
|
||||
}
|
||||
|
||||
CYMRU_WHOIS = 'whois.cymru.com'
|
||||
|
||||
IPV4_DNS_ZONE = '{0}.origin.asn.cymru.com'
|
||||
|
||||
IPV6_DNS_ZONE = '{0}.origin6.asn.cymru.com'
|
||||
|
||||
BLACKLIST = [
|
||||
'root.rwhois.net'
|
||||
]
|
||||
|
||||
ORG_MAP = {
|
||||
'ARIN': 'arin',
|
||||
'VR-ARIN': 'arin',
|
||||
'RIPE': 'ripencc',
|
||||
'APNIC': 'apnic',
|
||||
'LACNIC': 'lacnic',
|
||||
'AFRINIC': 'afrinic',
|
||||
'DNIC': 'arin'
|
||||
}
|
||||
|
||||
|
||||
class Net:
|
||||
"""
|
||||
The class for performing network queries.
|
||||
|
||||
Args:
|
||||
address: An IPv4 or IPv6 address in string format.
|
||||
timeout: The default timeout for socket connections in seconds.
|
||||
proxy_opener: The urllib.request.OpenerDirector request for proxy
|
||||
support or None.
|
||||
allow_permutations: Use additional methods if DNS lookups to Cymru
|
||||
fail.
|
||||
|
||||
Raises:
|
||||
IPDefinedError: The address provided is defined (does not need to be
|
||||
resolved).
|
||||
"""
|
||||
|
||||
def __init__(self, address, timeout=5, proxy_opener=None,
|
||||
allow_permutations=True):
|
||||
|
||||
# IPv4Address or IPv6Address
|
||||
if isinstance(address, IPv4Address) or isinstance(
|
||||
address, IPv6Address):
|
||||
|
||||
self.address = address
|
||||
|
||||
else:
|
||||
|
||||
# Use ipaddress package exception handling.
|
||||
self.address = ip_address(address)
|
||||
|
||||
# Default timeout for socket connections.
|
||||
self.timeout = timeout
|
||||
|
||||
# Allow other than DNS lookups for ASNs.
|
||||
self.allow_permutations = allow_permutations
|
||||
|
||||
self.dns_resolver = dns.resolver.Resolver()
|
||||
self.dns_resolver.timeout = timeout
|
||||
self.dns_resolver.lifetime = timeout
|
||||
|
||||
# Proxy opener.
|
||||
if isinstance(proxy_opener, OpenerDirector):
|
||||
|
||||
self.opener = proxy_opener
|
||||
|
||||
else:
|
||||
|
||||
handler = ProxyHandler()
|
||||
self.opener = build_opener(handler)
|
||||
|
||||
# IP address in string format for use in queries.
|
||||
self.address_str = self.address.__str__()
|
||||
|
||||
# Determine the IP version, 4 or 6
|
||||
self.version = self.address.version
|
||||
|
||||
if self.version == 4:
|
||||
|
||||
# Check if no ASN/whois resolution needs to occur.
|
||||
is_defined = ipv4_is_defined(self.address_str)
|
||||
|
||||
if is_defined[0]:
|
||||
|
||||
raise IPDefinedError(
|
||||
'IPv4 address {0} is already defined as {1} via '
|
||||
'{2}.'.format(
|
||||
self.address_str, is_defined[1], is_defined[2]
|
||||
)
|
||||
)
|
||||
|
||||
# Reverse the IPv4Address for the DNS ASN query.
|
||||
split = self.address_str.split('.')
|
||||
split.reverse()
|
||||
self.reversed = '.'.join(split)
|
||||
|
||||
self.dns_zone = IPV4_DNS_ZONE.format(self.reversed)
|
||||
|
||||
else:
|
||||
|
||||
# Check if no ASN/whois resolution needs to occur.
|
||||
is_defined = ipv6_is_defined(self.address_str)
|
||||
|
||||
if is_defined[0]:
|
||||
|
||||
raise IPDefinedError(
|
||||
'IPv6 address {0} is already defined as {1} via '
|
||||
'{2}.'.format(
|
||||
self.address_str, is_defined[1], is_defined[2]
|
||||
)
|
||||
)
|
||||
|
||||
# Explode the IPv6Address to fill in any missing 0's.
|
||||
exploded = self.address.exploded
|
||||
|
||||
# Cymru seems to timeout when the IPv6 address has trailing '0000'
|
||||
# groups. Remove these groups.
|
||||
groups = exploded.split(':')
|
||||
for index, value in reversed(list(enumerate(groups))):
|
||||
|
||||
if value == '0000':
|
||||
|
||||
del groups[index]
|
||||
|
||||
else:
|
||||
|
||||
break
|
||||
|
||||
exploded = ':'.join(groups)
|
||||
|
||||
# Reverse the IPv6Address for the DNS ASN query.
|
||||
val = str(exploded).replace(':', '')
|
||||
val = val[::-1]
|
||||
self.reversed = '.'.join(val)
|
||||
|
||||
self.dns_zone = IPV6_DNS_ZONE.format(self.reversed)
|
||||
|
||||
def get_asn_dns(self, result=None):
|
||||
"""
|
||||
The function for retrieving ASN information for an IP address from
|
||||
Cymru via port 53 (DNS).
|
||||
|
||||
Args:
|
||||
result: Optional result object. This bypasses the ASN lookup.
|
||||
|
||||
Returns:
|
||||
Dictionary: A dictionary containing the following keys:
|
||||
asn (String) - The Autonomous System Number.
|
||||
asn_date (String) - The ASN Allocation date.
|
||||
asn_registry (String) - The assigned ASN registry.
|
||||
asn_cidr (String) - The assigned ASN CIDR.
|
||||
asn_country_code (String) - The assigned ASN country code.
|
||||
|
||||
Raises:
|
||||
ASNRegistryError: The ASN registry is not known.
|
||||
ASNLookupError: The ASN lookup failed.
|
||||
"""
|
||||
|
||||
try:
|
||||
|
||||
if result is None:
|
||||
|
||||
log.debug('ASN query for {0}'.format(self.dns_zone))
|
||||
data = self.dns_resolver.query(self.dns_zone, 'TXT')
|
||||
temp = str(data[0]).split('|')
|
||||
|
||||
else:
|
||||
|
||||
temp = result
|
||||
|
||||
# Parse out the ASN information.
|
||||
ret = {'asn_registry': temp[3].strip(' \n')}
|
||||
|
||||
if ret['asn_registry'] not in RIR_WHOIS.keys():
|
||||
|
||||
raise ASNRegistryError(
|
||||
'ASN registry {0} is not known.'.format(
|
||||
ret['asn_registry'])
|
||||
)
|
||||
|
||||
ret['asn'] = temp[0].strip(' "\n')
|
||||
ret['asn_cidr'] = temp[1].strip(' \n')
|
||||
ret['asn_country_code'] = temp[2].strip(' \n').upper()
|
||||
ret['asn_date'] = temp[4].strip(' "\n')
|
||||
|
||||
return ret
|
||||
|
||||
except ASNRegistryError:
|
||||
|
||||
raise
|
||||
|
||||
except (dns.resolver.NXDOMAIN, dns.resolver.NoNameservers,
|
||||
dns.resolver.NoAnswer, dns.exception.Timeout) as e:
|
||||
|
||||
raise ASNLookupError(
|
||||
'ASN lookup failed (DNS {0}) for {1}.'.format(
|
||||
e.__class__.__name__, self.address_str)
|
||||
)
|
||||
|
||||
except:
|
||||
|
||||
raise ASNLookupError(
|
||||
'ASN lookup failed for {0}.'.format(self.address_str)
|
||||
)
|
||||
|
||||
def get_asn_whois(self, retry_count=3, result=None):
|
||||
"""
|
||||
The function for retrieving ASN information for an IP address from
|
||||
Cymru via port 43/tcp (WHOIS).
|
||||
|
||||
Args:
|
||||
retry_count: The number of times to retry in case socket errors,
|
||||
timeouts, connection resets, etc. are encountered.
|
||||
result: Optional result object. This bypasses the ASN lookup.
|
||||
|
||||
Returns:
|
||||
Dictionary: A dictionary containing the following keys:
|
||||
asn (String) - The Autonomous System Number.
|
||||
asn_date (String) - The ASN Allocation date.
|
||||
asn_registry (String) - The assigned ASN registry.
|
||||
asn_cidr (String) - The assigned ASN CIDR.
|
||||
asn_country_code (String) - The assigned ASN country code.
|
||||
|
||||
Raises:
|
||||
ASNRegistryError: The ASN registry is not known.
|
||||
ASNLookupError: The ASN lookup failed.
|
||||
"""
|
||||
|
||||
try:
|
||||
|
||||
if result is None:
|
||||
|
||||
# Create the connection for the Cymru whois query.
|
||||
conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
conn.settimeout(self.timeout)
|
||||
log.debug('ASN query for {0}'.format(self.address_str))
|
||||
conn.connect((CYMRU_WHOIS, 43))
|
||||
|
||||
# Query the Cymru whois server, and store the results.
|
||||
conn.send((
|
||||
' -r -a -c -p -f -o {0}{1}'.format(
|
||||
self.address_str, '\r\n')
|
||||
).encode())
|
||||
|
||||
data = ''
|
||||
while True:
|
||||
|
||||
d = conn.recv(4096).decode()
|
||||
data += d
|
||||
|
||||
if not d:
|
||||
|
||||
break
|
||||
|
||||
conn.close()
|
||||
|
||||
else:
|
||||
|
||||
data = result
|
||||
|
||||
# Parse out the ASN information.
|
||||
temp = str(data).split('|')
|
||||
|
||||
ret = {'asn_registry': temp[4].strip(' \n')}
|
||||
|
||||
if ret['asn_registry'] not in RIR_WHOIS.keys():
|
||||
|
||||
raise ASNRegistryError(
|
||||
'ASN registry {0} is not known.'.format(
|
||||
ret['asn_registry'])
|
||||
)
|
||||
|
||||
ret['asn'] = temp[0].strip(' \n')
|
||||
ret['asn_cidr'] = temp[2].strip(' \n')
|
||||
ret['asn_country_code'] = temp[3].strip(' \n').upper()
|
||||
ret['asn_date'] = temp[5].strip(' \n')
|
||||
|
||||
return ret
|
||||
|
||||
except (socket.timeout, socket.error) as e: # pragma: no cover
|
||||
|
||||
log.debug('ASN query socket error: {0}'.format(e))
|
||||
if retry_count > 0:
|
||||
|
||||
log.debug('ASN query retrying (count: {0})'.format(
|
||||
str(retry_count)))
|
||||
return self.get_asn_whois(retry_count - 1)
|
||||
|
||||
else:
|
||||
|
||||
raise ASNLookupError(
|
||||
'ASN lookup failed for {0}.'.format(self.address_str)
|
||||
)
|
||||
|
||||
except ASNRegistryError:
|
||||
|
||||
raise
|
||||
|
||||
except:
|
||||
|
||||
raise ASNLookupError(
|
||||
'ASN lookup failed for {0}.'.format(self.address_str)
|
||||
)
|
||||
|
||||
def get_asn_http(self, retry_count=3, result=None, extra_org_map=None):
|
||||
"""
|
||||
The function for retrieving ASN information for an IP address from
|
||||
Arin via port 80 (HTTP). Currently limited to fetching asn_registry
|
||||
through a Arin whois (REST) lookup. The other values are returned as
|
||||
None to keep a consistent dict output. This should be used as a last
|
||||
chance fallback call behind ASN DNS & ASN Whois lookups.
|
||||
|
||||
Args:
|
||||
retry_count: The number of times to retry in case socket errors,
|
||||
timeouts, connection resets, etc. are encountered.
|
||||
result: Optional result object. This bypasses the ASN lookup.
|
||||
extra_org_map: Dictionary mapping org handles to RIRs. This is for
|
||||
limited cases where ARIN REST (ASN fallback HTTP lookup) does
|
||||
not show an RIR as the org handle e.g., DNIC (which is now the
|
||||
built in ORG_MAP) e.g., {'DNIC': 'arin'}. Valid RIR values are
|
||||
(note the case-sensitive - this is meant to match the REST
|
||||
result): 'ARIN', 'RIPE', 'apnic', 'lacnic', 'afrinic'
|
||||
|
||||
Returns:
|
||||
Dictionary: A dictionary containing the following keys:
|
||||
asn (String) - None, can't retrieve with this method.
|
||||
asn_date (String) - None, can't retrieve with this method.
|
||||
asn_registry (String) - The assigned ASN registry.
|
||||
asn_cidr (String) - None, can't retrieve with this method.
|
||||
asn_country_code (String) - None, can't retrieve with this
|
||||
method.
|
||||
|
||||
Raises:
|
||||
ASNRegistryError: The ASN registry is not known.
|
||||
ASNLookupError: The ASN lookup failed.
|
||||
"""
|
||||
|
||||
# Set the org_map. Map the orgRef handle to an RIR.
|
||||
org_map = ORG_MAP.copy()
|
||||
try:
|
||||
|
||||
org_map.update(extra_org_map)
|
||||
|
||||
except (TypeError, ValueError, IndexError, KeyError):
|
||||
|
||||
pass
|
||||
|
||||
try:
|
||||
|
||||
if result is None:
|
||||
|
||||
# Lets attempt to get the ASN registry information from
|
||||
# ARIN.
|
||||
log.debug('ASN query for {0}'.format(self.address_str))
|
||||
response = self.get_http_json(
|
||||
url=str(ARIN).format(self.address_str),
|
||||
retry_count=retry_count,
|
||||
headers={'Accept': 'application/json'}
|
||||
)
|
||||
|
||||
else:
|
||||
|
||||
response = result
|
||||
|
||||
asn_data = {
|
||||
'asn_registry': None,
|
||||
'asn': None,
|
||||
'asn_cidr': None,
|
||||
'asn_country_code': None,
|
||||
'asn_date': None
|
||||
}
|
||||
|
||||
try:
|
||||
|
||||
net_list = response['nets']['net']
|
||||
|
||||
if not isinstance(net_list, list):
|
||||
net_list = [net_list]
|
||||
|
||||
except (KeyError, TypeError):
|
||||
|
||||
log.debug('No networks found')
|
||||
net_list = []
|
||||
|
||||
for n in net_list:
|
||||
|
||||
try:
|
||||
|
||||
asn_data['asn_registry'] = (
|
||||
org_map[n['orgRef']['@handle'].upper()]
|
||||
)
|
||||
|
||||
except KeyError as e:
|
||||
|
||||
log.debug('Could not parse ASN registry via HTTP: '
|
||||
'{0}'.format(str(e)))
|
||||
raise ASNRegistryError('ASN registry lookup failed.')
|
||||
|
||||
break
|
||||
|
||||
return asn_data
|
||||
|
||||
except (socket.timeout, socket.error) as e: # pragma: no cover
|
||||
|
||||
log.debug('ASN query socket error: {0}'.format(e))
|
||||
if retry_count > 0:
|
||||
|
||||
log.debug('ASN query retrying (count: {0})'.format(
|
||||
str(retry_count)))
|
||||
return self.get_asn_http(retry_count=retry_count-1)
|
||||
|
||||
else:
|
||||
|
||||
raise ASNLookupError(
|
||||
'ASN lookup failed for {0}.'.format(self.address_str)
|
||||
)
|
||||
|
||||
except ASNRegistryError:
|
||||
|
||||
raise
|
||||
|
||||
except:
|
||||
|
||||
raise ASNLookupError(
|
||||
'ASN lookup failed for {0}.'.format(self.address_str)
|
||||
)
|
||||
|
||||
def get_whois(self, asn_registry='arin', retry_count=3, server=None,
|
||||
port=43, extra_blacklist=None):
|
||||
"""
|
||||
The function for retrieving whois or rwhois information for an IP
|
||||
address via any port. Defaults to port 43/tcp (WHOIS).
|
||||
|
||||
Args:
|
||||
asn_registry: The NIC to run the query against.
|
||||
retry_count: The number of times to retry in case socket errors,
|
||||
timeouts, connection resets, etc. are encountered.
|
||||
server: An optional server to connect to. If provided, asn_registry
|
||||
will be ignored.
|
||||
port: The network port to connect on.
|
||||
extra_blacklist: A list of blacklisted whois servers in addition to
|
||||
the global BLACKLIST.
|
||||
|
||||
Returns:
|
||||
String: The raw whois data.
|
||||
|
||||
Raises:
|
||||
BlacklistError: Raised if the whois server provided is in the
|
||||
global BLACKLIST or extra_blacklist.
|
||||
WhoisLookupError: The whois lookup failed.
|
||||
"""
|
||||
|
||||
try:
|
||||
|
||||
extra_bl = extra_blacklist if extra_blacklist else []
|
||||
|
||||
if any(server in srv for srv in (BLACKLIST, extra_bl)):
|
||||
raise BlacklistError(
|
||||
'The server {0} is blacklisted.'.format(server)
|
||||
)
|
||||
|
||||
if server is None:
|
||||
server = RIR_WHOIS[asn_registry]['server']
|
||||
|
||||
# Create the connection for the whois query.
|
||||
conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
conn.settimeout(self.timeout)
|
||||
log.debug('WHOIS query for {0} at {1}:{2}'.format(
|
||||
self.address_str, server, port))
|
||||
conn.connect((server, port))
|
||||
|
||||
# Prep the query.
|
||||
query = self.address_str + '\r\n'
|
||||
if asn_registry == 'arin':
|
||||
|
||||
query = 'n + {0}'.format(query)
|
||||
|
||||
# Query the whois server, and store the results.
|
||||
conn.send(query.encode())
|
||||
|
||||
response = ''
|
||||
while True:
|
||||
|
||||
d = conn.recv(4096).decode('ascii', 'ignore')
|
||||
|
||||
response += d
|
||||
|
||||
if not d:
|
||||
|
||||
break
|
||||
|
||||
conn.close()
|
||||
|
||||
if 'Query rate limit exceeded' in response: # pragma: no cover
|
||||
|
||||
log.debug('WHOIS query rate limit exceeded. Waiting...')
|
||||
sleep(1)
|
||||
return self.get_whois(
|
||||
asn_registry=asn_registry, retry_count=retry_count-1,
|
||||
server=server, port=port, extra_blacklist=extra_blacklist
|
||||
)
|
||||
|
||||
elif ('error 501' in response or 'error 230' in response
|
||||
): # pragma: no cover
|
||||
|
||||
log.debug('WHOIS query error: {0}'.format(response))
|
||||
raise ValueError
|
||||
|
||||
return str(response)
|
||||
|
||||
except (socket.timeout, socket.error) as e:
|
||||
|
||||
log.debug('WHOIS query socket error: {0}'.format(e))
|
||||
if retry_count > 0:
|
||||
|
||||
log.debug('WHOIS query retrying (count: {0})'.format(
|
||||
str(retry_count)))
|
||||
return self.get_whois(
|
||||
asn_registry=asn_registry, retry_count=retry_count-1,
|
||||
server=server, port=port, extra_blacklist=extra_blacklist
|
||||
)
|
||||
|
||||
else:
|
||||
|
||||
raise WhoisLookupError(
|
||||
'WHOIS lookup failed for {0}.'.format(self.address_str)
|
||||
)
|
||||
|
||||
except BlacklistError:
|
||||
|
||||
raise
|
||||
|
||||
except: # pragma: no cover
|
||||
|
||||
raise WhoisLookupError(
|
||||
'WHOIS lookup failed for {0}.'.format(self.address_str)
|
||||
)
|
||||
|
||||
def get_http_json(self, url=None, retry_count=3, rate_limit_timeout=120,
|
||||
headers=None):
|
||||
"""
|
||||
The function for retrieving a json result via HTTP.
|
||||
|
||||
Args:
|
||||
url: The URL to retrieve.
|
||||
retry_count: The number of times to retry in case socket errors,
|
||||
timeouts, connection resets, etc. are encountered.
|
||||
rate_limit_timeout: The number of seconds to wait before retrying
|
||||
when a rate limit notice is returned via rdap+json.
|
||||
headers: The HTTP headers dictionary. The Accept header defaults
|
||||
to 'application/rdap+json'.
|
||||
|
||||
Returns:
|
||||
Dictionary: The data in json format.
|
||||
|
||||
Raises:
|
||||
HTTPLookupError: The HTTP lookup failed.
|
||||
HTTPRateLimitError: The HTTP request rate limited and retries
|
||||
were exhausted.
|
||||
"""
|
||||
|
||||
if headers is None:
|
||||
headers = {'Accept': 'application/rdap+json'}
|
||||
|
||||
try:
|
||||
|
||||
# Create the connection for the whois query.
|
||||
log.debug('HTTP query for {0} at {1}'.format(
|
||||
self.address_str, url))
|
||||
conn = Request(url, headers=headers)
|
||||
data = self.opener.open(conn, timeout=self.timeout)
|
||||
try:
|
||||
d = json.loads(data.readall().decode('utf-8', 'ignore'))
|
||||
except AttributeError: # pragma: no cover
|
||||
d = json.loads(data.read().decode('utf-8', 'ignore'))
|
||||
|
||||
try:
|
||||
# Tests written but commented out. I do not want to send a
|
||||
# flood of requests on every test.
|
||||
for tmp in d['notices']: # pragma: no cover
|
||||
if tmp['title'] == 'Rate Limit Notice':
|
||||
log.debug('RDAP query rate limit exceeded.')
|
||||
|
||||
if retry_count > 0:
|
||||
log.debug('Waiting {0} seconds...'.format(
|
||||
str(rate_limit_timeout)))
|
||||
|
||||
sleep(rate_limit_timeout)
|
||||
return self.get_http_json(
|
||||
url=url, retry_count=retry_count-1,
|
||||
rate_limit_timeout=rate_limit_timeout,
|
||||
headers=headers
|
||||
)
|
||||
else:
|
||||
raise HTTPRateLimitError(
|
||||
'HTTP lookup failed for {0}. Rate limit '
|
||||
'exceeded, wait and try again (possibly a '
|
||||
'temporary block).'.format(url))
|
||||
|
||||
except (KeyError, IndexError): # pragma: no cover
|
||||
|
||||
pass
|
||||
|
||||
return d
|
||||
|
||||
except (URLError, socket.timeout, socket.error) as e:
|
||||
|
||||
# Check needed for Python 2.6, also why URLError is caught.
|
||||
try: # pragma: no cover
|
||||
if not isinstance(e.reason, (socket.timeout, socket.error)):
|
||||
raise HTTPLookupError('HTTP lookup failed for {0}.'
|
||||
''.format(url))
|
||||
except AttributeError: # pragma: no cover
|
||||
|
||||
pass
|
||||
|
||||
log.debug('HTTP query socket error: {0}'.format(e))
|
||||
if retry_count > 0:
|
||||
|
||||
log.debug('HTTP query retrying (count: {0})'.format(
|
||||
str(retry_count)))
|
||||
|
||||
return self.get_http_json(
|
||||
url=url, retry_count=retry_count-1,
|
||||
rate_limit_timeout=rate_limit_timeout, headers=headers
|
||||
)
|
||||
|
||||
else:
|
||||
|
||||
raise HTTPLookupError('HTTP lookup failed for {0}.'.format(
|
||||
url))
|
||||
|
||||
except (HTTPLookupError, HTTPRateLimitError) as e: # pragma: no cover
|
||||
|
||||
raise e
|
||||
|
||||
except: # pragma: no cover
|
||||
|
||||
raise HTTPLookupError('HTTP lookup failed for {0}.'.format(url))
|
||||
|
||||
def get_host(self, retry_count=3):
|
||||
"""
|
||||
The function for retrieving host information for an IP address.
|
||||
|
||||
Args:
|
||||
retry_count: The number of times to retry in case socket errors,
|
||||
timeouts, connection resets, etc. are encountered.
|
||||
|
||||
Returns:
|
||||
Tuple: hostname, aliaslist, ipaddrlist
|
||||
|
||||
Raises:
|
||||
HostLookupError: The host lookup failed.
|
||||
"""
|
||||
|
||||
try:
|
||||
|
||||
default_timeout_set = False
|
||||
if not socket.getdefaulttimeout():
|
||||
|
||||
socket.setdefaulttimeout(self.timeout)
|
||||
default_timeout_set = True
|
||||
|
||||
log.debug('Host query for {0}'.format(self.address_str))
|
||||
ret = socket.gethostbyaddr(self.address_str)
|
||||
|
||||
if default_timeout_set: # pragma: no cover
|
||||
|
||||
socket.setdefaulttimeout(None)
|
||||
|
||||
return ret
|
||||
|
||||
except (socket.timeout, socket.error) as e:
|
||||
|
||||
log.debug('Host query socket error: {0}'.format(e))
|
||||
if retry_count > 0:
|
||||
|
||||
log.debug('Host query retrying (count: {0})'.format(
|
||||
str(retry_count)))
|
||||
|
||||
return self.get_host(retry_count - 1)
|
||||
|
||||
else:
|
||||
|
||||
raise HostLookupError(
|
||||
'Host lookup failed for {0}.'.format(self.address_str)
|
||||
)
|
||||
|
||||
except: # pragma: no cover
|
||||
|
||||
raise HostLookupError(
|
||||
'Host lookup failed for {0}.'.format(self.address_str)
|
||||
)
|
||||
|
||||
def lookup_asn(self, retry_count=3, asn_alts=None, extra_org_map=None):
|
||||
"""
|
||||
The wrapper function for retrieving and parsing ASN information for an
|
||||
IP address.
|
||||
|
||||
Args:
|
||||
retry_count: The number of times to retry in case socket errors,
|
||||
timeouts, connection resets, etc. are encountered.
|
||||
asn_alts: Array of additional lookup types to attempt if the
|
||||
ASN dns lookup fails. Allow permutations must be enabled.
|
||||
Defaults to all ['whois', 'http'].
|
||||
extra_org_map: Dictionary mapping org handles to RIRs. This is for
|
||||
limited cases where ARIN REST (ASN fallback HTTP lookup) does
|
||||
not show an RIR as the org handle e.g., DNIC (which is now the
|
||||
built in ORG_MAP) e.g., {'DNIC': 'arin'}. Valid RIR values are
|
||||
(note the case-sensitive - this is meant to match the REST
|
||||
result): 'ARIN', 'RIPE', 'apnic', 'lacnic', 'afrinic'
|
||||
|
||||
Returns:
|
||||
Tuple:
|
||||
|
||||
:Dictionary: Result from get_asn_dns() or get_asn_whois().
|
||||
:Dictionary: The response returned by get_asn_dns() or
|
||||
get_asn_whois().
|
||||
|
||||
Raises:
|
||||
ASNRegistryError: ASN registry does not match.
|
||||
HTTPLookupError: The HTTP lookup failed.
|
||||
"""
|
||||
|
||||
lookups = asn_alts if asn_alts is not None else ['whois', 'http']
|
||||
|
||||
# Initialize the response.
|
||||
response = None
|
||||
|
||||
# Attempt to resolve ASN info via Cymru. DNS is faster, try that first.
|
||||
try:
|
||||
|
||||
self.dns_resolver.lifetime = self.dns_resolver.timeout * (
|
||||
retry_count and retry_count or 1)
|
||||
asn_data = self.get_asn_dns()
|
||||
|
||||
except (ASNLookupError, ASNRegistryError) as e:
|
||||
|
||||
if not self.allow_permutations:
|
||||
|
||||
raise ASNRegistryError('ASN registry lookup failed. '
|
||||
'Permutations not allowed.')
|
||||
|
||||
try:
|
||||
if 'whois' in lookups:
|
||||
|
||||
log.debug('ASN DNS lookup failed, trying ASN WHOIS: '
|
||||
'{0}'.format(e))
|
||||
asn_data = self.get_asn_whois(retry_count)
|
||||
|
||||
else:
|
||||
|
||||
raise ASNLookupError
|
||||
|
||||
except (ASNLookupError, ASNRegistryError): # pragma: no cover
|
||||
|
||||
if 'http' in lookups:
|
||||
|
||||
# Lets attempt to get the ASN registry information from
|
||||
# ARIN.
|
||||
log.debug('ASN WHOIS lookup failed, trying ASN via HTTP')
|
||||
try:
|
||||
|
||||
asn_data = self.get_asn_http(
|
||||
retry_count=retry_count,
|
||||
extra_org_map=extra_org_map
|
||||
)
|
||||
|
||||
except ASNRegistryError:
|
||||
|
||||
raise ASNRegistryError('ASN registry lookup failed.')
|
||||
|
||||
except ASNLookupError:
|
||||
|
||||
raise HTTPLookupError('ASN HTTP lookup failed.')
|
||||
|
||||
else:
|
||||
|
||||
raise ASNRegistryError('ASN registry lookup failed.')
|
||||
|
||||
return asn_data, response
|
||||
|
||||
def get_http_raw(self, url=None, retry_count=3, headers=None,
|
||||
request_type='GET', form_data=None):
|
||||
"""
|
||||
The function for retrieving a raw HTML result via HTTP.
|
||||
|
||||
Args:
|
||||
url: The URL to retrieve.
|
||||
retry_count: The number of times to retry in case socket errors,
|
||||
timeouts, connection resets, etc. are encountered.
|
||||
headers: The HTTP headers dictionary. The Accept header defaults
|
||||
to 'application/rdap+json'.
|
||||
request_type: 'GET' or 'POST'
|
||||
form_data: Dictionary of form POST data
|
||||
|
||||
Returns:
|
||||
String: The raw data.
|
||||
|
||||
Raises:
|
||||
HTTPLookupError: The HTTP lookup failed.
|
||||
"""
|
||||
|
||||
if headers is None:
|
||||
headers = {'Accept': 'text/html'}
|
||||
|
||||
if form_data:
|
||||
form_data = urlencode(form_data)
|
||||
try:
|
||||
form_data = bytes(form_data, encoding='ascii')
|
||||
except TypeError: # pragma: no cover
|
||||
pass
|
||||
|
||||
try:
|
||||
|
||||
# Create the connection for the HTTP query.
|
||||
log.debug('HTTP query for {0} at {1}'.format(
|
||||
self.address_str, url))
|
||||
try:
|
||||
conn = Request(url=url, data=form_data, headers=headers,
|
||||
method=request_type)
|
||||
except TypeError: # pragma: no cover
|
||||
conn = Request(url=url, data=form_data, headers=headers)
|
||||
data = self.opener.open(conn, timeout=self.timeout)
|
||||
|
||||
try:
|
||||
d = data.readall().decode('ascii', 'ignore')
|
||||
except AttributeError: # pragma: no cover
|
||||
d = data.read().decode('ascii', 'ignore')
|
||||
|
||||
return str(d)
|
||||
|
||||
except (URLError, socket.timeout, socket.error) as e:
|
||||
|
||||
# Check needed for Python 2.6, also why URLError is caught.
|
||||
try: # pragma: no cover
|
||||
if not isinstance(e.reason, (socket.timeout, socket.error)):
|
||||
raise HTTPLookupError('HTTP lookup failed for {0}.'
|
||||
''.format(url))
|
||||
except AttributeError: # pragma: no cover
|
||||
|
||||
pass
|
||||
|
||||
log.debug('HTTP query socket error: {0}'.format(e))
|
||||
if retry_count > 0:
|
||||
|
||||
log.debug('HTTP query retrying (count: {0})'.format(
|
||||
str(retry_count)))
|
||||
|
||||
return self.get_http_raw(
|
||||
url=url, retry_count=retry_count - 1, headers=headers,
|
||||
request_type=request_type, form_data=form_data
|
||||
)
|
||||
|
||||
else:
|
||||
|
||||
raise HTTPLookupError('HTTP lookup failed for {0}.'.format(
|
||||
url))
|
||||
|
||||
except HTTPLookupError as e: # pragma: no cover
|
||||
|
||||
raise e
|
||||
|
||||
except Exception: # pragma: no cover
|
||||
|
||||
raise HTTPLookupError('HTTP lookup failed for {0}.'.format(url))
|
832
lib/ipwhois/rdap.py
Normal file
832
lib/ipwhois/rdap.py
Normal file
|
@ -0,0 +1,832 @@
|
|||
# Copyright (c) 2013, 2014, 2015, 2016 Philip Hane
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from . import (Net, NetError, InvalidEntityContactObject, InvalidNetworkObject,
|
||||
InvalidEntityObject, HTTPLookupError)
|
||||
from .utils import ipv4_lstrip_zeros, calculate_cidr, unique_everseen
|
||||
from .net import ip_address
|
||||
import logging
|
||||
import json
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
BOOTSTRAP_URL = 'http://rdap.arin.net/bootstrap'
|
||||
|
||||
RIR_RDAP = {
|
||||
'arin': {
|
||||
'ip_url': 'http://rdap.arin.net/registry/ip/{0}',
|
||||
'entity_url': 'http://rdap.arin.net/registry/entity/{0}'
|
||||
},
|
||||
'ripencc': {
|
||||
'ip_url': 'http://rdap.db.ripe.net/ip/{0}',
|
||||
'entity_url': 'http://rdap.db.ripe.net/entity/{0}'
|
||||
},
|
||||
'apnic': {
|
||||
'ip_url': 'http://rdap.apnic.net/ip/{0}',
|
||||
'entity_url': 'http://rdap.apnic.net/entity/{0}'
|
||||
},
|
||||
'lacnic': {
|
||||
'ip_url': 'http://rdap.lacnic.net/rdap/ip/{0}',
|
||||
'entity_url': 'http://rdap.lacnic.net/rdap/entity/{0}'
|
||||
},
|
||||
'afrinic': {
|
||||
'ip_url': 'http://rdap.afrinic.net/rdap/ip/{0}',
|
||||
'entity_url': 'http://rdap.afrinic.net/rdap/entity/{0}'
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class _RDAPContact:
|
||||
"""
|
||||
The class for parsing RDAP entity contact information objects:
|
||||
https://tools.ietf.org/html/rfc7483#section-5.1
|
||||
https://tools.ietf.org/html/rfc7095
|
||||
|
||||
Args:
|
||||
vcard: The vcard list from an RDAP IP address query.
|
||||
|
||||
Raises:
|
||||
InvalidEntityContactObject: vcard is not an RDAP entity contact
|
||||
information object.
|
||||
"""
|
||||
|
||||
def __init__(self, vcard):
|
||||
|
||||
if not isinstance(vcard, list):
|
||||
|
||||
raise InvalidEntityContactObject('JSON result must be a list.')
|
||||
|
||||
self.vcard = vcard
|
||||
self.vars = {
|
||||
'name': None,
|
||||
'kind': None,
|
||||
'address': None,
|
||||
'phone': None,
|
||||
'email': None,
|
||||
'role': None,
|
||||
'title': None
|
||||
}
|
||||
|
||||
def _parse_name(self, val):
|
||||
"""
|
||||
The function for parsing the vcard name.
|
||||
|
||||
Args:
|
||||
val: The value to parse.
|
||||
"""
|
||||
|
||||
self.vars['name'] = val[3].strip()
|
||||
|
||||
def _parse_kind(self, val):
|
||||
"""
|
||||
The function for parsing the vcard kind.
|
||||
|
||||
Args:
|
||||
val: The value to parse.
|
||||
"""
|
||||
|
||||
self.vars['kind'] = val[3].strip()
|
||||
|
||||
def _parse_address(self, val):
|
||||
"""
|
||||
The function for parsing the vcard address.
|
||||
|
||||
Args:
|
||||
val: The value to parse.
|
||||
"""
|
||||
|
||||
ret = {
|
||||
'type': None,
|
||||
'value': None
|
||||
}
|
||||
|
||||
try:
|
||||
|
||||
ret['type'] = val[1]['type']
|
||||
|
||||
except (KeyError, ValueError, TypeError):
|
||||
|
||||
pass
|
||||
|
||||
try:
|
||||
|
||||
ret['value'] = val[1]['label']
|
||||
|
||||
except (KeyError, ValueError, TypeError):
|
||||
|
||||
ret['value'] = '\n'.join(val[3]).strip()
|
||||
|
||||
try:
|
||||
|
||||
self.vars['address'].append(ret)
|
||||
|
||||
except AttributeError:
|
||||
|
||||
self.vars['address'] = []
|
||||
self.vars['address'].append(ret)
|
||||
|
||||
def _parse_phone(self, val):
|
||||
"""
|
||||
The function for parsing the vcard phone numbers.
|
||||
|
||||
Args:
|
||||
val: The value to parse.
|
||||
"""
|
||||
|
||||
ret = {
|
||||
'type': None,
|
||||
'value': None
|
||||
}
|
||||
|
||||
try:
|
||||
|
||||
ret['type'] = val[1]['type']
|
||||
|
||||
except (IndexError, KeyError, ValueError, TypeError):
|
||||
|
||||
pass
|
||||
|
||||
ret['value'] = val[3].strip()
|
||||
|
||||
try:
|
||||
|
||||
self.vars['phone'].append(ret)
|
||||
|
||||
except AttributeError:
|
||||
|
||||
self.vars['phone'] = []
|
||||
self.vars['phone'].append(ret)
|
||||
|
||||
def _parse_email(self, val):
|
||||
"""
|
||||
The function for parsing the vcard email addresses.
|
||||
|
||||
Args:
|
||||
val: The value to parse.
|
||||
"""
|
||||
|
||||
ret = {
|
||||
'type': None,
|
||||
'value': None
|
||||
}
|
||||
|
||||
try:
|
||||
|
||||
ret['type'] = val[1]['type']
|
||||
|
||||
except (KeyError, ValueError, TypeError):
|
||||
|
||||
pass
|
||||
|
||||
ret['value'] = val[3].strip()
|
||||
|
||||
try:
|
||||
|
||||
self.vars['email'].append(ret)
|
||||
|
||||
except AttributeError:
|
||||
|
||||
self.vars['email'] = []
|
||||
self.vars['email'].append(ret)
|
||||
|
||||
def _parse_role(self, val):
|
||||
"""
|
||||
The function for parsing the vcard role.
|
||||
|
||||
Args:
|
||||
val: The value to parse.
|
||||
"""
|
||||
|
||||
self.vars['role'] = val[3].strip()
|
||||
|
||||
def _parse_title(self, val):
|
||||
"""
|
||||
The function for parsing the vcard title.
|
||||
|
||||
Args:
|
||||
val: The value to parse.
|
||||
"""
|
||||
|
||||
self.vars['title'] = val[3].strip()
|
||||
|
||||
def parse(self):
|
||||
"""
|
||||
The function for parsing the vcard to the vars dictionary.
|
||||
"""
|
||||
|
||||
keys = {
|
||||
'fn': self._parse_name,
|
||||
'kind': self._parse_kind,
|
||||
'adr': self._parse_address,
|
||||
'tel': self._parse_phone,
|
||||
'email': self._parse_email,
|
||||
'role': self._parse_role,
|
||||
'title': self._parse_title
|
||||
}
|
||||
|
||||
for val in self.vcard:
|
||||
|
||||
try:
|
||||
|
||||
parser = keys.get(val[0])
|
||||
parser(val)
|
||||
|
||||
except (KeyError, ValueError, TypeError):
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class _RDAPCommon:
|
||||
"""
|
||||
The common class for parsing RDAP objects:
|
||||
https://tools.ietf.org/html/rfc7483#section-5
|
||||
|
||||
Args:
|
||||
json_result: The JSON response from an RDAP query.
|
||||
|
||||
Raises:
|
||||
ValueError: vcard is not a known RDAP object.
|
||||
"""
|
||||
|
||||
def __init__(self, json_result):
|
||||
|
||||
if not isinstance(json_result, dict):
|
||||
|
||||
raise ValueError
|
||||
|
||||
self.json = json_result
|
||||
self.vars = {
|
||||
'handle': None,
|
||||
'status': None,
|
||||
'remarks': None,
|
||||
'notices': None,
|
||||
'links': None,
|
||||
'events': None,
|
||||
'raw': None
|
||||
}
|
||||
|
||||
def summarize_links(self, links_json):
|
||||
"""
|
||||
The function for summarizing RDAP links in to a unique list.
|
||||
https://tools.ietf.org/html/rfc7483#section-4.2
|
||||
|
||||
Args:
|
||||
links_json: A json dictionary of links from RDAP results.
|
||||
|
||||
Returns:
|
||||
List: A unique list of found RDAP link dictionaries.
|
||||
"""
|
||||
|
||||
ret = []
|
||||
|
||||
for link_dict in links_json:
|
||||
|
||||
ret.append(link_dict['href'])
|
||||
|
||||
ret = list(unique_everseen(ret))
|
||||
|
||||
return ret
|
||||
|
||||
def summarize_notices(self, notices_json):
|
||||
"""
|
||||
The function for summarizing RDAP notices in to a unique list.
|
||||
https://tools.ietf.org/html/rfc7483#section-4.3
|
||||
|
||||
Args:
|
||||
notices_json: A json dictionary of notices from RDAP results.
|
||||
|
||||
Returns:
|
||||
List: A unique list of found RDAP notices dictionaries.
|
||||
"""
|
||||
|
||||
ret = []
|
||||
|
||||
for notices_dict in notices_json:
|
||||
|
||||
tmp = {
|
||||
'title': None,
|
||||
'description': None,
|
||||
'links': None
|
||||
}
|
||||
|
||||
try:
|
||||
|
||||
tmp['title'] = notices_dict['title']
|
||||
|
||||
except (KeyError, ValueError, TypeError):
|
||||
|
||||
pass
|
||||
|
||||
try:
|
||||
|
||||
tmp['description'] = '\n'.join(notices_dict['description'])
|
||||
|
||||
except (KeyError, ValueError, TypeError):
|
||||
|
||||
pass
|
||||
|
||||
try:
|
||||
|
||||
tmp['links'] = self.summarize_links(notices_dict['links'])
|
||||
|
||||
except (KeyError, ValueError, TypeError):
|
||||
|
||||
pass
|
||||
|
||||
if all(tmp.values()):
|
||||
|
||||
ret.append(tmp)
|
||||
|
||||
return ret
|
||||
|
||||
def summarize_events(self, events_json):
|
||||
"""
|
||||
The function for summarizing RDAP events in to a unique list.
|
||||
https://tools.ietf.org/html/rfc7483#section-4.5
|
||||
|
||||
Args:
|
||||
events_json: A json dictionary of events from RDAP results.
|
||||
|
||||
Returns:
|
||||
List: A unique list of found RDAP events dictionaries.
|
||||
"""
|
||||
|
||||
ret = []
|
||||
|
||||
for event in events_json:
|
||||
|
||||
event_dict = {
|
||||
'action': event['eventAction'],
|
||||
'timestamp': event['eventDate'],
|
||||
'actor': None
|
||||
}
|
||||
|
||||
try:
|
||||
|
||||
event_dict['actor'] = event['eventActor']
|
||||
|
||||
except (KeyError, ValueError, TypeError):
|
||||
|
||||
pass
|
||||
|
||||
ret.append(event_dict)
|
||||
|
||||
return ret
|
||||
|
||||
def _parse(self):
|
||||
"""
|
||||
The function for parsing the JSON response to the vars dictionary.
|
||||
"""
|
||||
|
||||
try:
|
||||
|
||||
self.vars['status'] = self.json['status']
|
||||
|
||||
except (KeyError, ValueError, TypeError):
|
||||
|
||||
pass
|
||||
|
||||
for v in ['remarks', 'notices']:
|
||||
|
||||
try:
|
||||
|
||||
self.vars[v] = self.summarize_notices(self.json[v])
|
||||
|
||||
except (KeyError, ValueError, TypeError):
|
||||
|
||||
pass
|
||||
|
||||
try:
|
||||
|
||||
self.vars['links'] = self.summarize_links(self.json['links'])
|
||||
|
||||
except (KeyError, ValueError, TypeError):
|
||||
|
||||
pass
|
||||
|
||||
try:
|
||||
|
||||
self.vars['events'] = self.summarize_events(self.json['events'])
|
||||
|
||||
except (KeyError, ValueError, TypeError):
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class _RDAPNetwork(_RDAPCommon):
|
||||
"""
|
||||
The class for parsing RDAP network objects:
|
||||
https://tools.ietf.org/html/rfc7483#section-5.4
|
||||
|
||||
Args:
|
||||
json_result: The JSON response from an RDAP IP address query.
|
||||
|
||||
Raises:
|
||||
InvalidNetworkObject: json_result is not an RDAP network object.
|
||||
"""
|
||||
|
||||
def __init__(self, json_result):
|
||||
|
||||
try:
|
||||
|
||||
_RDAPCommon.__init__(self, json_result)
|
||||
|
||||
except ValueError:
|
||||
|
||||
raise InvalidNetworkObject('JSON result must be a dict.')
|
||||
|
||||
self.vars.update({
|
||||
'start_address': None,
|
||||
'end_address': None,
|
||||
'cidr': None,
|
||||
'ip_version': None,
|
||||
'type': None,
|
||||
'name': None,
|
||||
'country': None,
|
||||
'parent_handle': None
|
||||
})
|
||||
|
||||
def parse(self):
|
||||
"""
|
||||
The function for parsing the JSON response to the vars dictionary.
|
||||
"""
|
||||
|
||||
try:
|
||||
|
||||
self.vars['handle'] = self.json['handle'].strip()
|
||||
|
||||
except (KeyError, ValueError):
|
||||
|
||||
log.debug('Handle missing, json_output: {0}'.format(json.dumps(
|
||||
self.json)))
|
||||
raise InvalidNetworkObject('Handle is missing for RDAP network '
|
||||
'object')
|
||||
|
||||
try:
|
||||
|
||||
self.vars['ip_version'] = self.json['ipVersion'].strip()
|
||||
|
||||
# RDAP IPv4 addresses are padded to 3 digits per octet, remove
|
||||
# the leading 0's.
|
||||
if self.vars['ip_version'] == 'v4':
|
||||
|
||||
self.vars['start_address'] = ip_address(
|
||||
ipv4_lstrip_zeros(self.json['startAddress'])
|
||||
).__str__()
|
||||
|
||||
self.vars['end_address'] = ip_address(
|
||||
ipv4_lstrip_zeros(self.json['endAddress'])
|
||||
).__str__()
|
||||
|
||||
# No bugs found for IPv6 yet, proceed as normal.
|
||||
else:
|
||||
|
||||
self.vars['start_address'] = self.json['startAddress'].strip()
|
||||
self.vars['end_address'] = self.json['endAddress'].strip()
|
||||
|
||||
except (KeyError, ValueError, TypeError):
|
||||
|
||||
log.debug('IP address data incomplete. Data parsed prior to '
|
||||
'exception: {0}'.format(json.dumps(self.vars)))
|
||||
raise InvalidNetworkObject('IP address data is missing for RDAP '
|
||||
'network object.')
|
||||
|
||||
try:
|
||||
|
||||
self.vars['cidr'] = ', '.join(calculate_cidr(
|
||||
self.vars['start_address'], self.vars['end_address']
|
||||
))
|
||||
|
||||
except (KeyError, ValueError, TypeError, AttributeError) as \
|
||||
e: # pragma: no cover
|
||||
|
||||
log.debug('CIDR calculation failed: {0}'.format(e))
|
||||
pass
|
||||
|
||||
for v in ['name', 'type', 'country']:
|
||||
|
||||
try:
|
||||
|
||||
self.vars[v] = self.json[v].strip()
|
||||
|
||||
except (KeyError, ValueError):
|
||||
|
||||
pass
|
||||
|
||||
try:
|
||||
|
||||
self.vars['parent_handle'] = self.json['parentHandle'].strip()
|
||||
|
||||
except (KeyError, ValueError):
|
||||
|
||||
pass
|
||||
|
||||
self._parse()
|
||||
|
||||
|
||||
class _RDAPEntity(_RDAPCommon):
|
||||
"""
|
||||
The class for parsing RDAP entity objects:
|
||||
https://tools.ietf.org/html/rfc7483#section-5.1
|
||||
|
||||
Args:
|
||||
json_result: The JSON response from an RDAP query.
|
||||
|
||||
Raises:
|
||||
InvalidEntityObject: json_result is not an RDAP entity object.
|
||||
"""
|
||||
|
||||
def __init__(self, json_result):
|
||||
|
||||
try:
|
||||
|
||||
_RDAPCommon.__init__(self, json_result)
|
||||
|
||||
except ValueError:
|
||||
|
||||
raise InvalidEntityObject('JSON result must be a dict.')
|
||||
|
||||
self.vars.update({
|
||||
'roles': None,
|
||||
'contact': None,
|
||||
'events_actor': None,
|
||||
'entities': []
|
||||
})
|
||||
|
||||
def parse(self):
|
||||
"""
|
||||
The function for parsing the JSON response to the vars dictionary.
|
||||
"""
|
||||
|
||||
try:
|
||||
|
||||
self.vars['handle'] = self.json['handle'].strip()
|
||||
|
||||
except (KeyError, ValueError, TypeError):
|
||||
|
||||
raise InvalidEntityObject('Handle is missing for RDAP entity')
|
||||
|
||||
for v in ['roles', 'country']:
|
||||
|
||||
try:
|
||||
|
||||
self.vars[v] = self.json[v]
|
||||
|
||||
except (KeyError, ValueError):
|
||||
|
||||
pass
|
||||
|
||||
try:
|
||||
|
||||
vcard = self.json['vcardArray'][1]
|
||||
c = _RDAPContact(vcard)
|
||||
c.parse()
|
||||
|
||||
self.vars['contact'] = c.vars
|
||||
|
||||
except (KeyError, ValueError, TypeError):
|
||||
|
||||
pass
|
||||
|
||||
try:
|
||||
|
||||
self.vars['events_actor'] = self.summarize_events(
|
||||
self.json['asEventActor'])
|
||||
|
||||
except (KeyError, ValueError, TypeError):
|
||||
|
||||
pass
|
||||
|
||||
self.vars['entities'] = []
|
||||
try:
|
||||
|
||||
for ent in self.json['entities']:
|
||||
|
||||
if ent['handle'] not in self.vars['entities']:
|
||||
|
||||
self.vars['entities'].append(ent['handle'])
|
||||
|
||||
except (KeyError, ValueError, TypeError):
|
||||
|
||||
pass
|
||||
|
||||
if not self.vars['entities']:
|
||||
|
||||
self.vars['entities'] = None
|
||||
|
||||
self._parse()
|
||||
|
||||
|
||||
class RDAP:
|
||||
"""
|
||||
The class for parsing IP address whois information via RDAP:
|
||||
https://tools.ietf.org/html/rfc7483
|
||||
https://www.arin.net/resources/rdap.html
|
||||
|
||||
Args:
|
||||
net: A ipwhois.net.Net object.
|
||||
|
||||
Raises:
|
||||
NetError: The parameter provided is not an instance of
|
||||
ipwhois.net.Net
|
||||
IPDefinedError: The address provided is defined (does not need to be
|
||||
resolved).
|
||||
"""
|
||||
|
||||
def __init__(self, net):
|
||||
|
||||
if isinstance(net, Net):
|
||||
|
||||
self._net = net
|
||||
|
||||
else:
|
||||
|
||||
raise NetError('The provided net parameter is not an instance of '
|
||||
'ipwhois.net.Net')
|
||||
|
||||
def lookup(self, inc_raw=False, retry_count=3, asn_data=None, depth=0,
|
||||
excluded_entities=None, response=None, bootstrap=False,
|
||||
rate_limit_timeout=120):
|
||||
"""
|
||||
The function for retrieving and parsing information for an IP
|
||||
address via RDAP (HTTP).
|
||||
|
||||
Args:
|
||||
inc_raw: Boolean for whether to include the raw results in the
|
||||
returned dictionary.
|
||||
retry_count: The number of times to retry in case socket errors,
|
||||
timeouts, connection resets, etc. are encountered.
|
||||
asn_data: Result dictionary from ipwhois.net.Net.lookup_asn().
|
||||
Optional if the bootstrap parameter is True.
|
||||
depth: How many levels deep to run queries when additional
|
||||
referenced objects are found.
|
||||
excluded_entities: A list of entity handles to not perform lookups.
|
||||
response: Optional response object, this bypasses the RDAP lookup.
|
||||
bootstrap: If True, performs lookups via ARIN bootstrap rather
|
||||
than lookups based on ASN data.
|
||||
rate_limit_timeout: The number of seconds to wait before retrying
|
||||
when a rate limit notice is returned via rdap+json.
|
||||
|
||||
Returns:
|
||||
Dictionary:
|
||||
|
||||
:query: The IP address (String)
|
||||
:network: Dictionary of values returned by _RDAPNetwork. The raw
|
||||
result is included for each entity if the inc_raw parameter is
|
||||
True.
|
||||
:entities: List of entity keys referenced by the top level IP
|
||||
address query.
|
||||
:objects: Dictionary of objects with the handles as keys, and the
|
||||
dictionary returned by _RDAPEntity, etc as the values. The raw
|
||||
result is included for each object if the inc_raw parameter is
|
||||
True.
|
||||
"""
|
||||
|
||||
if not excluded_entities:
|
||||
|
||||
excluded_entities = []
|
||||
|
||||
# Create the return dictionary.
|
||||
results = {
|
||||
'query': self._net.address_str,
|
||||
'network': None,
|
||||
'entities': None,
|
||||
'objects': None,
|
||||
'raw': None
|
||||
}
|
||||
|
||||
if bootstrap:
|
||||
|
||||
ip_url = '{0}/ip/{1}'.format(BOOTSTRAP_URL, self._net.address_str)
|
||||
|
||||
else:
|
||||
|
||||
ip_url = str(RIR_RDAP[asn_data['asn_registry']]['ip_url']).format(
|
||||
self._net.address_str)
|
||||
|
||||
# Only fetch the response if we haven't already.
|
||||
if response is None:
|
||||
|
||||
log.debug('Response not given, perform RDAP lookup for {0}'.format(
|
||||
ip_url))
|
||||
|
||||
# Retrieve the whois data.
|
||||
response = self._net.get_http_json(
|
||||
url=ip_url, retry_count=retry_count,
|
||||
rate_limit_timeout=rate_limit_timeout
|
||||
)
|
||||
|
||||
if inc_raw:
|
||||
|
||||
results['raw'] = response
|
||||
|
||||
log.debug('Parsing RDAP network object')
|
||||
result_net = _RDAPNetwork(response)
|
||||
result_net.parse()
|
||||
results['network'] = result_net.vars
|
||||
results['entities'] = []
|
||||
results['objects'] = {}
|
||||
|
||||
# Iterate through and parse the root level entities.
|
||||
log.debug('Parsing RDAP root level entities')
|
||||
try:
|
||||
|
||||
for ent in response['entities']:
|
||||
|
||||
if ent['handle'] not in [results['entities'],
|
||||
excluded_entities]:
|
||||
|
||||
result_ent = _RDAPEntity(ent)
|
||||
result_ent.parse()
|
||||
|
||||
results['objects'][ent['handle']] = result_ent.vars
|
||||
|
||||
results['entities'].append(ent['handle'])
|
||||
|
||||
except KeyError:
|
||||
|
||||
pass
|
||||
|
||||
# Iterate through to the defined depth, retrieving and parsing all
|
||||
# unique entities.
|
||||
temp_objects = results['objects']
|
||||
|
||||
if depth > 0 and len(temp_objects) > 0:
|
||||
|
||||
log.debug('Parsing RDAP sub-entities to depth: {0}'.format(str(
|
||||
depth)))
|
||||
|
||||
while depth > 0 and len(temp_objects) > 0:
|
||||
|
||||
new_objects = {}
|
||||
for obj in temp_objects.values():
|
||||
|
||||
try:
|
||||
|
||||
for ent in obj['entities']:
|
||||
|
||||
if ent not in (list(results['objects'].keys()) +
|
||||
list(new_objects.keys()) +
|
||||
excluded_entities):
|
||||
|
||||
if bootstrap:
|
||||
entity_url = '{0}/entity/{1}'.format(
|
||||
BOOTSTRAP_URL, ent)
|
||||
else:
|
||||
tmp_reg = asn_data['asn_registry']
|
||||
entity_url = RIR_RDAP[tmp_reg]['entity_url']
|
||||
entity_url = str(entity_url).format(ent)
|
||||
|
||||
try:
|
||||
|
||||
# RDAP entity query
|
||||
response = self._net.get_http_json(
|
||||
url=entity_url, retry_count=retry_count,
|
||||
rate_limit_timeout=rate_limit_timeout
|
||||
)
|
||||
|
||||
# Parse the entity
|
||||
result_ent = _RDAPEntity(response)
|
||||
result_ent.parse()
|
||||
new_objects[ent] = result_ent.vars
|
||||
|
||||
if inc_raw:
|
||||
|
||||
new_objects[ent]['raw'] = response
|
||||
|
||||
except (HTTPLookupError, InvalidEntityObject):
|
||||
|
||||
pass
|
||||
|
||||
except TypeError:
|
||||
|
||||
pass
|
||||
|
||||
# Update the result objects, and set the new temp object list to
|
||||
# iterate for the next depth of entities.
|
||||
results['objects'].update(new_objects)
|
||||
temp_objects = new_objects
|
||||
depth -= 1
|
||||
|
||||
return results
|
553
lib/ipwhois/utils.py
Normal file
553
lib/ipwhois/utils.py
Normal file
|
@ -0,0 +1,553 @@
|
|||
# Copyright (c) 2013, 2014, 2015, 2016 Philip Hane
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import sys
|
||||
from xml.dom.minidom import parseString
|
||||
from os import path
|
||||
import re
|
||||
import copy
|
||||
import io
|
||||
import csv
|
||||
import logging
|
||||
|
||||
if sys.version_info >= (3, 3): # pragma: no cover
|
||||
from ipaddress import (ip_address,
|
||||
ip_network,
|
||||
IPv4Address,
|
||||
IPv4Network,
|
||||
IPv6Address,
|
||||
summarize_address_range,
|
||||
collapse_addresses)
|
||||
else: # pragma: no cover
|
||||
from ipaddr import (IPAddress as ip_address,
|
||||
IPNetwork as ip_network,
|
||||
IPv4Address,
|
||||
IPv4Network,
|
||||
IPv6Address,
|
||||
summarize_address_range,
|
||||
collapse_address_list as collapse_addresses)
|
||||
|
||||
try: # pragma: no cover
|
||||
from itertools import filterfalse
|
||||
|
||||
except ImportError: # pragma: no cover
|
||||
from itertools import ifilterfalse as filterfalse
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
IETF_RFC_REFERENCES = {
|
||||
# IPv4
|
||||
'RFC 1122, Section 3.2.1.3':
|
||||
'http://tools.ietf.org/html/rfc1122#section-3.2.1.3',
|
||||
'RFC 1918': 'http://tools.ietf.org/html/rfc1918',
|
||||
'RFC 3927': 'http://tools.ietf.org/html/rfc3927',
|
||||
'RFC 5736': 'http://tools.ietf.org/html/rfc5736',
|
||||
'RFC 5737': 'http://tools.ietf.org/html/rfc5737',
|
||||
'RFC 3068': 'http://tools.ietf.org/html/rfc3068',
|
||||
'RFC 2544': 'http://tools.ietf.org/html/rfc2544',
|
||||
'RFC 3171': 'http://tools.ietf.org/html/rfc3171',
|
||||
'RFC 919, Section 7': 'http://tools.ietf.org/html/rfc919#section-7',
|
||||
# IPv6
|
||||
'RFC 4291, Section 2.7': 'http://tools.ietf.org/html/rfc4291#section-2.7',
|
||||
'RFC 4291': 'http://tools.ietf.org/html/rfc4291',
|
||||
'RFC 4291, Section 2.5.2':
|
||||
'http://tools.ietf.org/html/rfc4291#section-2.5.2',
|
||||
'RFC 4291, Section 2.5.3':
|
||||
'http://tools.ietf.org/html/rfc4291#section-2.5.3',
|
||||
'RFC 4291, Section 2.5.6':
|
||||
'http://tools.ietf.org/html/rfc4291#section-2.5.6',
|
||||
'RFC 4291, Section 2.5.7':
|
||||
'http://tools.ietf.org/html/rfc4291#section-2.5.7',
|
||||
'RFC 4193': 'https://tools.ietf.org/html/rfc4193'
|
||||
}
|
||||
|
||||
IP_REGEX = (
|
||||
r'(?P<ip>'
|
||||
# IPv4
|
||||
'(((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.)){3}'
|
||||
'(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)'
|
||||
# IPv6
|
||||
'|\[?(((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:)'
|
||||
'{6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|'
|
||||
'2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]'
|
||||
'{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d'
|
||||
'\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|'
|
||||
'((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|'
|
||||
'2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]'
|
||||
'{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)'
|
||||
'(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(('
|
||||
'(:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1'
|
||||
'\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(('
|
||||
'[0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4})'
|
||||
'{0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]'
|
||||
'?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:(('
|
||||
'25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})'
|
||||
')|:)))(%.+)?))\]?'
|
||||
# Optional IPv4 Port
|
||||
'((:(6553[0-5]|655[0-2]\d|65[0-4]\d{2}|6[0-4]\d{3}|[1-5]\d{4}|[1-9]\d{0,3}'
|
||||
# Optional CIDR block
|
||||
'))|(\/(?:[012]\d?|3[012]?|[4-9])))?'
|
||||
')'
|
||||
)
|
||||
|
||||
|
||||
def ipv4_lstrip_zeros(address):
|
||||
"""
|
||||
The function to strip leading zeros in each octet of an IPv4 address.
|
||||
|
||||
Args:
|
||||
address: An IPv4 address in string format.
|
||||
|
||||
Returns:
|
||||
String: The modified IPv4 address string.
|
||||
"""
|
||||
|
||||
# Split the octets.
|
||||
obj = address.strip().split('.')
|
||||
|
||||
for x, y in enumerate(obj):
|
||||
|
||||
# Strip leading zeros. Split / here in case CIDR is attached.
|
||||
obj[x] = y.split('/')[0].lstrip('0')
|
||||
if obj[x] in ['', None]:
|
||||
obj[x] = '0'
|
||||
|
||||
return '.'.join(obj)
|
||||
|
||||
|
||||
def calculate_cidr(start_address, end_address):
|
||||
"""
|
||||
The function to calculate a CIDR range(s) from a start and end IP address.
|
||||
|
||||
Args:
|
||||
start_address: The starting IP address in string format.
|
||||
end_address: The ending IP address in string format.
|
||||
|
||||
Returns:
|
||||
List: A list of calculated CIDR ranges.
|
||||
"""
|
||||
|
||||
tmp_addrs = []
|
||||
|
||||
try:
|
||||
|
||||
tmp_addrs.extend(summarize_address_range(
|
||||
ip_address(start_address),
|
||||
ip_address(end_address)))
|
||||
|
||||
except (KeyError, ValueError, TypeError): # pragma: no cover
|
||||
|
||||
try:
|
||||
|
||||
tmp_addrs.extend(summarize_address_range(
|
||||
ip_network(start_address).network_address,
|
||||
ip_network(end_address).network_address))
|
||||
|
||||
except AttributeError: # pragma: no cover
|
||||
|
||||
tmp_addrs.extend(summarize_address_range(
|
||||
ip_network(start_address).ip,
|
||||
ip_network(end_address).ip))
|
||||
|
||||
return [i.__str__() for i in collapse_addresses(tmp_addrs)]
|
||||
|
||||
|
||||
def get_countries(is_legacy_xml=False):
|
||||
"""
|
||||
The function to generate a dictionary containing ISO_3166-1 country codes
|
||||
to names.
|
||||
|
||||
Args:
|
||||
is_legacy_xml: Boolean for whether to use the older country code
|
||||
list (iso_3166-1_list_en.xml).
|
||||
|
||||
Returns:
|
||||
Dictionary: A dictionary with the country codes as the keys and the
|
||||
country names as the values.
|
||||
"""
|
||||
|
||||
# Initialize the countries dictionary.
|
||||
countries = {}
|
||||
|
||||
# Set the data directory based on if the script is a frozen executable.
|
||||
if sys.platform == 'win32' and getattr(sys, 'frozen', False):
|
||||
|
||||
data_dir = path.dirname(sys.executable) # pragma: no cover
|
||||
|
||||
else:
|
||||
|
||||
data_dir = path.dirname(__file__)
|
||||
|
||||
if is_legacy_xml:
|
||||
|
||||
log.debug('Opening country code legacy XML: {0}'.format(
|
||||
str(data_dir) + '/data/iso_3166-1_list_en.xml'))
|
||||
|
||||
# Create the country codes file object.
|
||||
f = io.open(str(data_dir) + '/data/iso_3166-1_list_en.xml', 'r',
|
||||
encoding='ISO-8859-1')
|
||||
|
||||
# Read the file.
|
||||
data = f.read()
|
||||
|
||||
# Check if there is data.
|
||||
if not data: # pragma: no cover
|
||||
|
||||
return {}
|
||||
|
||||
# Parse the data to get the DOM.
|
||||
dom = parseString(data)
|
||||
|
||||
# Retrieve the country entries.
|
||||
entries = dom.getElementsByTagName('ISO_3166-1_Entry')
|
||||
|
||||
# Iterate through the entries and add to the countries dictionary.
|
||||
for entry in entries:
|
||||
|
||||
# Retrieve the country code and name from the DOM.
|
||||
code = entry.getElementsByTagName(
|
||||
'ISO_3166-1_Alpha-2_Code_element')[0].firstChild.data
|
||||
name = entry.getElementsByTagName(
|
||||
'ISO_3166-1_Country_name')[0].firstChild.data
|
||||
|
||||
# Add to the countries dictionary.
|
||||
countries[code] = name.title()
|
||||
|
||||
else:
|
||||
|
||||
log.debug('Opening country code CSV: {0}'.format(
|
||||
str(data_dir) + '/data/iso_3166-1_list_en.xml'))
|
||||
|
||||
# Create the country codes file object.
|
||||
f = io.open(str(data_dir) + '/data/iso_3166-1.csv', 'r',
|
||||
encoding='utf-8')
|
||||
|
||||
# Create csv reader object.
|
||||
csv_reader = csv.reader(f, delimiter=',', quotechar='"')
|
||||
|
||||
# Iterate through the rows and add to the countries dictionary.
|
||||
for row in csv_reader:
|
||||
|
||||
# Retrieve the country code and name columns.
|
||||
code = row[0]
|
||||
name = row[1]
|
||||
|
||||
# Add to the countries dictionary.
|
||||
countries[code] = name
|
||||
|
||||
return countries
|
||||
|
||||
|
||||
def ipv4_is_defined(address):
|
||||
"""
|
||||
The function for checking if an IPv4 address is defined (does not need to
|
||||
be resolved).
|
||||
|
||||
Args:
|
||||
address: An IPv4 address in string format.
|
||||
|
||||
Returns:
|
||||
Tuple:
|
||||
|
||||
:Boolean: True if given address is defined, otherwise False
|
||||
:String: IETF assignment name if given address is defined, otherwise ''
|
||||
:String: IETF assignment RFC if given address is defined, otherwise ''
|
||||
"""
|
||||
|
||||
# Initialize the IP address object.
|
||||
query_ip = IPv4Address(str(address))
|
||||
|
||||
# This Network
|
||||
if query_ip in IPv4Network('0.0.0.0/8'):
|
||||
|
||||
return True, 'This Network', 'RFC 1122, Section 3.2.1.3'
|
||||
|
||||
# Loopback
|
||||
elif query_ip.is_loopback:
|
||||
|
||||
return True, 'Loopback', 'RFC 1122, Section 3.2.1.3'
|
||||
|
||||
# Link Local
|
||||
elif query_ip.is_link_local:
|
||||
|
||||
return True, 'Link Local', 'RFC 3927'
|
||||
|
||||
# IETF Protocol Assignments
|
||||
elif query_ip in IPv4Network('192.0.0.0/24'):
|
||||
|
||||
return True, 'IETF Protocol Assignments', 'RFC 5736'
|
||||
|
||||
# TEST-NET-1
|
||||
elif query_ip in IPv4Network('192.0.2.0/24'):
|
||||
|
||||
return True, 'TEST-NET-1', 'RFC 5737'
|
||||
|
||||
# 6to4 Relay Anycast
|
||||
elif query_ip in IPv4Network('192.88.99.0/24'):
|
||||
|
||||
return True, '6to4 Relay Anycast', 'RFC 3068'
|
||||
|
||||
# Network Interconnect Device Benchmark Testing
|
||||
elif query_ip in IPv4Network('198.18.0.0/15'):
|
||||
|
||||
return (True,
|
||||
'Network Interconnect Device Benchmark Testing',
|
||||
'RFC 2544')
|
||||
|
||||
# TEST-NET-2
|
||||
elif query_ip in IPv4Network('198.51.100.0/24'):
|
||||
|
||||
return True, 'TEST-NET-2', 'RFC 5737'
|
||||
|
||||
# TEST-NET-3
|
||||
elif query_ip in IPv4Network('203.0.113.0/24'):
|
||||
|
||||
return True, 'TEST-NET-3', 'RFC 5737'
|
||||
|
||||
# Multicast
|
||||
elif query_ip.is_multicast:
|
||||
|
||||
return True, 'Multicast', 'RFC 3171'
|
||||
|
||||
# Limited Broadcast
|
||||
elif query_ip in IPv4Network('255.255.255.255/32'):
|
||||
|
||||
return True, 'Limited Broadcast', 'RFC 919, Section 7'
|
||||
|
||||
# Private-Use Networks
|
||||
elif query_ip.is_private:
|
||||
|
||||
return True, 'Private-Use Networks', 'RFC 1918'
|
||||
|
||||
return False, '', ''
|
||||
|
||||
|
||||
def ipv6_is_defined(address):
|
||||
"""
|
||||
The function for checking if an IPv6 address is defined (does not need to
|
||||
be resolved).
|
||||
|
||||
Args:
|
||||
address: An IPv6 address in string format.
|
||||
|
||||
Returns:
|
||||
Tuple:
|
||||
|
||||
:Boolean: True if address is defined, otherwise False
|
||||
:String: IETF assignment name if address is defined, otherwise ''
|
||||
:String: IETF assignment RFC if address is defined, otherwise ''
|
||||
"""
|
||||
|
||||
# Initialize the IP address object.
|
||||
query_ip = IPv6Address(str(address))
|
||||
|
||||
# Multicast
|
||||
if query_ip.is_multicast:
|
||||
|
||||
return True, 'Multicast', 'RFC 4291, Section 2.7'
|
||||
|
||||
# Unspecified
|
||||
elif query_ip.is_unspecified:
|
||||
|
||||
return True, 'Unspecified', 'RFC 4291, Section 2.5.2'
|
||||
|
||||
# Loopback.
|
||||
elif query_ip.is_loopback:
|
||||
|
||||
return True, 'Loopback', 'RFC 4291, Section 2.5.3'
|
||||
|
||||
# Reserved
|
||||
elif query_ip.is_reserved:
|
||||
|
||||
return True, 'Reserved', 'RFC 4291'
|
||||
|
||||
# Link-Local
|
||||
elif query_ip.is_link_local:
|
||||
|
||||
return True, 'Link-Local', 'RFC 4291, Section 2.5.6'
|
||||
|
||||
# Site-Local
|
||||
elif query_ip.is_site_local:
|
||||
|
||||
return True, 'Site-Local', 'RFC 4291, Section 2.5.7'
|
||||
|
||||
# Unique Local Unicast
|
||||
elif query_ip.is_private:
|
||||
|
||||
return True, 'Unique Local Unicast', 'RFC 4193'
|
||||
|
||||
return False, '', ''
|
||||
|
||||
|
||||
def unique_everseen(iterable, key=None):
|
||||
"""
|
||||
The generator to list unique elements, preserving the order. Remember all
|
||||
elements ever seen. This was taken from the itertools recipes.
|
||||
|
||||
Args:
|
||||
iterable: An iterable to process.
|
||||
key: Optional function to run when checking elements (e.g., str.lower)
|
||||
|
||||
Returns:
|
||||
Generator: Yields a generator object.
|
||||
"""
|
||||
|
||||
seen = set()
|
||||
seen_add = seen.add
|
||||
|
||||
if key is None:
|
||||
|
||||
for element in filterfalse(seen.__contains__, iterable):
|
||||
|
||||
seen_add(element)
|
||||
yield element
|
||||
|
||||
else:
|
||||
|
||||
for element in iterable:
|
||||
|
||||
k = key(element)
|
||||
|
||||
if k not in seen:
|
||||
|
||||
seen_add(k)
|
||||
yield element
|
||||
|
||||
|
||||
def unique_addresses(data=None, file_path=None):
|
||||
"""
|
||||
The function to search an input string and/or file, extracting and
|
||||
counting IPv4/IPv6 addresses/networks. Summarizes ports with sub-counts.
|
||||
If both a string and file_path are provided, it will process them both.
|
||||
|
||||
Args:
|
||||
data: A string to process.
|
||||
file_path: An optional file path to process.
|
||||
|
||||
Returns:
|
||||
Dictionary:
|
||||
|
||||
:ip address/network: Each address or network found is a dictionary w/\:
|
||||
|
||||
:count: Total number of times seen (Integer)
|
||||
:ports: Dictionary with port numbers as keys and the number of
|
||||
times seen for this ip as values (Dictionary)
|
||||
|
||||
Raises:
|
||||
ValueError: Arguments provided are invalid.
|
||||
"""
|
||||
|
||||
if not data and not file_path:
|
||||
|
||||
raise ValueError('No data or file path provided.')
|
||||
|
||||
ret = {}
|
||||
base = {
|
||||
'count': 0,
|
||||
'ports': {}
|
||||
}
|
||||
|
||||
file_data = None
|
||||
if file_path:
|
||||
|
||||
log.debug('Opening file for unique address analysis: {0}'.format(
|
||||
str(file_path)))
|
||||
|
||||
f = open(str(file_path), 'r')
|
||||
|
||||
# Read the file.
|
||||
file_data = f.read()
|
||||
|
||||
pattern = re.compile(
|
||||
str(IP_REGEX),
|
||||
re.DOTALL
|
||||
)
|
||||
|
||||
# Check if there is data.
|
||||
log.debug('Analyzing input/file data'.format(
|
||||
str(file_path)))
|
||||
for input_data in [data, file_data]:
|
||||
|
||||
if input_data:
|
||||
|
||||
# Search for IPs.
|
||||
for match in pattern.finditer(input_data):
|
||||
|
||||
is_net = False
|
||||
port = None
|
||||
try:
|
||||
|
||||
found = match.group('ip')
|
||||
|
||||
if '.' in found and ':' in found:
|
||||
|
||||
split = found.split(':')
|
||||
ip_or_net = split[0]
|
||||
port = split[1]
|
||||
|
||||
elif '[' in found:
|
||||
|
||||
split = found.split(']:')
|
||||
ip_or_net = split[0][1:]
|
||||
port = split[1]
|
||||
|
||||
elif '/' in found:
|
||||
|
||||
is_net = True
|
||||
ip_or_net = found
|
||||
|
||||
else:
|
||||
|
||||
ip_or_net = found
|
||||
|
||||
if is_net:
|
||||
|
||||
ip_obj = ip_network(ip_or_net)
|
||||
|
||||
else:
|
||||
ip_obj = ip_address(ip_or_net)
|
||||
|
||||
obj_str = ip_obj.__str__()
|
||||
|
||||
if obj_str not in ret.keys():
|
||||
|
||||
ret[obj_str] = copy.deepcopy(base)
|
||||
|
||||
ret[obj_str]['count'] += 1
|
||||
|
||||
if port:
|
||||
|
||||
try:
|
||||
|
||||
ret[obj_str]['ports'][str(port)] += 1
|
||||
|
||||
except KeyError:
|
||||
|
||||
ret[obj_str]['ports'][str(port)] = 1
|
||||
|
||||
except (KeyError, ValueError):
|
||||
|
||||
continue
|
||||
|
||||
return ret
|
683
lib/ipwhois/whois.py
Normal file
683
lib/ipwhois/whois.py
Normal file
|
@ -0,0 +1,683 @@
|
|||
# Copyright (c) 2013, 2014, 2015, 2016 Philip Hane
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import sys
|
||||
import re
|
||||
import copy
|
||||
from datetime import datetime
|
||||
import logging
|
||||
from .utils import unique_everseen
|
||||
from . import (BlacklistError, WhoisLookupError, NetError)
|
||||
|
||||
if sys.version_info >= (3, 3): # pragma: no cover
|
||||
from ipaddress import (ip_address,
|
||||
ip_network,
|
||||
summarize_address_range,
|
||||
collapse_addresses)
|
||||
else: # pragma: no cover
|
||||
from ipaddr import (IPAddress as ip_address,
|
||||
IPNetwork as ip_network,
|
||||
summarize_address_range,
|
||||
collapse_address_list as collapse_addresses)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Legacy base whois output dictionary.
|
||||
BASE_NET = {
|
||||
'cidr': None,
|
||||
'name': None,
|
||||
'handle': None,
|
||||
'range': None,
|
||||
'description': None,
|
||||
'country': None,
|
||||
'state': None,
|
||||
'city': None,
|
||||
'address': None,
|
||||
'postal_code': None,
|
||||
'emails': None,
|
||||
'created': None,
|
||||
'updated': None
|
||||
}
|
||||
|
||||
RIR_WHOIS = {
|
||||
'arin': {
|
||||
'server': 'whois.arin.net',
|
||||
'fields': {
|
||||
'name': r'(NetName):[^\S\n]+(?P<val>.+?)\n',
|
||||
'handle': r'(NetHandle):[^\S\n]+(?P<val>.+?)\n',
|
||||
'description': r'(OrgName|CustName):[^\S\n]+(?P<val>.+?)'
|
||||
'(?=(\n\S):?)',
|
||||
'country': r'(Country):[^\S\n]+(?P<val>.+?)\n',
|
||||
'state': r'(StateProv):[^\S\n]+(?P<val>.+?)\n',
|
||||
'city': r'(City):[^\S\n]+(?P<val>.+?)\n',
|
||||
'address': r'(Address):[^\S\n]+(?P<val>.+?)(?=(\n\S):?)',
|
||||
'postal_code': r'(PostalCode):[^\S\n]+(?P<val>.+?)\n',
|
||||
'emails': (
|
||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||
'[^\S\n]+.*?)*?\n'
|
||||
),
|
||||
'created': r'(RegDate):[^\S\n]+(?P<val>.+?)\n',
|
||||
'updated': r'(Updated):[^\S\n]+(?P<val>.+?)\n',
|
||||
},
|
||||
'dt_format': '%Y-%m-%d'
|
||||
},
|
||||
'ripencc': {
|
||||
'server': 'whois.ripe.net',
|
||||
'fields': {
|
||||
'name': r'(netname):[^\S\n]+(?P<val>.+?)\n',
|
||||
'handle': r'(nic-hdl):[^\S\n]+(?P<val>.+?)\n',
|
||||
'description': r'(descr):[^\S\n]+(?P<val>.+?)(?=(\n\S):?)',
|
||||
'country': r'(country):[^\S\n]+(?P<val>.+?)\n',
|
||||
'address': r'(address):[^\S\n]+(?P<val>.+?)(?=(\n\S):?)',
|
||||
'emails': (
|
||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||
'[^\S\n]+.*?)*?\n'
|
||||
),
|
||||
'created': (
|
||||
r'(created):[^\S\n]+(?P<val>[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]'
|
||||
'{2}:[0-9]{2}:[0-9]{2}Z).*?\n'
|
||||
),
|
||||
'updated': (
|
||||
r'(last-modified):[^\S\n]+(?P<val>[0-9]{4}-[0-9]{2}-[0-9]{2}T'
|
||||
'[0-9]{2}:[0-9]{2}:[0-9]{2}Z).*?\n'
|
||||
)
|
||||
},
|
||||
'dt_format': '%Y-%m-%dT%H:%M:%SZ'
|
||||
},
|
||||
'apnic': {
|
||||
'server': 'whois.apnic.net',
|
||||
'fields': {
|
||||
'name': r'(netname):[^\S\n]+(?P<val>.+?)\n',
|
||||
'handle': r'(nic-hdl):[^\S\n]+(?P<val>.+?)\n',
|
||||
'description': r'(descr):[^\S\n]+(?P<val>.+?)(?=(\n\S):?)',
|
||||
'country': r'(country):[^\S\n]+(?P<val>.+?)\n',
|
||||
'address': r'(address):[^\S\n]+(?P<val>.+?)(?=(\n\S):?)',
|
||||
'emails': (
|
||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||
'[^\S\n]+.*?)*?\n'
|
||||
),
|
||||
'updated': r'(changed):[^\S\n]+.*(?P<val>[0-9]{8}).*?\n'
|
||||
},
|
||||
'dt_format': '%Y%m%d'
|
||||
},
|
||||
'lacnic': {
|
||||
'server': 'whois.lacnic.net',
|
||||
'fields': {
|
||||
'handle': r'(nic-hdl):[^\S\n]+(?P<val>.+?)\n',
|
||||
'description': r'(owner):[^\S\n]+(?P<val>.+?)(?=(\n\S):?)',
|
||||
'country': r'(country):[^\S\n]+(?P<val>.+?)\n',
|
||||
'emails': (
|
||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||
'[^\S\n]+.*?)*?\n'
|
||||
),
|
||||
'created': r'(created):[^\S\n]+(?P<val>[0-9]{8}).*?\n',
|
||||
'updated': r'(changed):[^\S\n]+(?P<val>[0-9]{8}).*?\n'
|
||||
},
|
||||
'dt_format': '%Y%m%d'
|
||||
},
|
||||
'afrinic': {
|
||||
'server': 'whois.afrinic.net',
|
||||
'fields': {
|
||||
'name': r'(netname):[^\S\n]+(?P<val>.+?)\n',
|
||||
'handle': r'(nic-hdl):[^\S\n]+(?P<val>.+?)\n',
|
||||
'description': r'(descr):[^\S\n]+(?P<val>.+?)(?=(\n\S):?)',
|
||||
'country': r'(country):[^\S\n]+(?P<val>.+?)\n',
|
||||
'address': r'(address):[^\S\n]+(?P<val>.+?)(?=(\n\S):?)',
|
||||
'emails': (
|
||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||
'[^\S\n]+.*?)*?\n'
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
RWHOIS = {
|
||||
'fields': {
|
||||
'cidr': r'(network:IP-Network):(?P<val>.+?)\n',
|
||||
'name': r'(network:ID):(?P<val>.+?)\n',
|
||||
'description': (
|
||||
r'(network:(Org-Name|Organization(;I)?)):(?P<val>.+?)\n'
|
||||
),
|
||||
'country': r'(network:(Country|Country-Code)):(?P<val>.+?)\n',
|
||||
'state': r'(network:State):(?P<val>.+?)\n',
|
||||
'city': r'(network:City):(?P<val>.+?)\n',
|
||||
'address': r'(network:Street-Address):(?P<val>.+?)\n',
|
||||
'postal_code': r'(network:Postal-Code):(?P<val>.+?)\n',
|
||||
'emails': (
|
||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||
'[^\S\n]+.*?)*?\n'
|
||||
),
|
||||
'created': r'(network:Created):(?P<val>.+?)\n',
|
||||
'updated': r'(network:Updated):(?P<val>.+?)\n'
|
||||
}
|
||||
}
|
||||
|
||||
ASN_REFERRALS = {
|
||||
'whois://whois.ripe.net': 'ripencc',
|
||||
'whois://whois.apnic.net': 'apnic',
|
||||
'whois://whois.lacnic.net': 'lacnic',
|
||||
'whois://whois.afrinic.net': 'afrinic',
|
||||
}
|
||||
|
||||
|
||||
class Whois:
|
||||
"""
|
||||
The class for parsing via whois
|
||||
|
||||
Args:
|
||||
net: A ipwhois.net.Net object.
|
||||
|
||||
Raises:
|
||||
NetError: The parameter provided is not an instance of
|
||||
ipwhois.net.Net
|
||||
IPDefinedError: The address provided is defined (does not need to be
|
||||
resolved).
|
||||
"""
|
||||
|
||||
def __init__(self, net):
|
||||
|
||||
from .net import Net
|
||||
|
||||
# ipwhois.net.Net validation
|
||||
if isinstance(net, Net):
|
||||
|
||||
self._net = net
|
||||
|
||||
else:
|
||||
|
||||
raise NetError('The provided net parameter is not an instance of '
|
||||
'ipwhois.net.Net')
|
||||
|
||||
def _parse_fields(self, response, fields_dict, net_start=None,
|
||||
net_end=None, dt_format=None, field_list=None):
|
||||
"""
|
||||
The function for parsing whois fields from a data input.
|
||||
|
||||
Args:
|
||||
response: The response from the whois/rwhois server.
|
||||
fields_dict: The dictionary of fields -> regex search values.
|
||||
net_start: The starting point of the network (if parsing multiple
|
||||
networks).
|
||||
net_end: The ending point of the network (if parsing multiple
|
||||
networks).
|
||||
dt_format: The format of datetime fields if known.
|
||||
field_list: If provided, a list of fields to parse:
|
||||
['name', 'handle', 'description', 'country', 'state', 'city',
|
||||
'address', 'postal_code', 'emails', 'created', 'updated']
|
||||
|
||||
Returns:
|
||||
Dictionary: A dictionary of fields provided in fields_dict.
|
||||
"""
|
||||
|
||||
ret = {}
|
||||
|
||||
if not field_list:
|
||||
|
||||
field_list = ['name', 'handle', 'description', 'country', 'state',
|
||||
'city', 'address', 'postal_code', 'emails',
|
||||
'created', 'updated']
|
||||
|
||||
generate = ((field, pattern) for (field, pattern) in
|
||||
fields_dict.items() if field in field_list)
|
||||
|
||||
for field, pattern in generate:
|
||||
|
||||
pattern = re.compile(
|
||||
str(pattern),
|
||||
re.DOTALL
|
||||
)
|
||||
|
||||
if net_start is not None:
|
||||
|
||||
match = pattern.finditer(response, net_end, net_start)
|
||||
|
||||
elif net_end is not None:
|
||||
|
||||
match = pattern.finditer(response, net_end)
|
||||
|
||||
else:
|
||||
|
||||
match = pattern.finditer(response)
|
||||
|
||||
values = []
|
||||
sub_section_end = None
|
||||
for m in match:
|
||||
|
||||
if sub_section_end:
|
||||
|
||||
if field not in (
|
||||
'emails'
|
||||
) and (sub_section_end != (m.start() - 1)):
|
||||
|
||||
break
|
||||
|
||||
try:
|
||||
|
||||
values.append(m.group('val').strip())
|
||||
|
||||
except IndexError:
|
||||
|
||||
pass
|
||||
|
||||
sub_section_end = m.end()
|
||||
|
||||
if len(values) > 0:
|
||||
|
||||
value = None
|
||||
try:
|
||||
|
||||
if field == 'country':
|
||||
|
||||
value = values[0].upper()
|
||||
|
||||
elif field in ['created', 'updated'] and dt_format:
|
||||
|
||||
value = datetime.strptime(
|
||||
values[0],
|
||||
str(dt_format)).isoformat('T')
|
||||
|
||||
else:
|
||||
|
||||
values = unique_everseen(values)
|
||||
value = '\n'.join(values)
|
||||
|
||||
except ValueError as e:
|
||||
|
||||
log.debug('Whois field parsing failed for {0}: {1}'.format(
|
||||
field, e))
|
||||
pass
|
||||
|
||||
ret[field] = value
|
||||
|
||||
return ret
|
||||
|
||||
def _get_nets_arin(self, response):
|
||||
"""
|
||||
The function for parsing network blocks from ARIN whois data.
|
||||
|
||||
Args:
|
||||
response: The response from the ARIN whois server.
|
||||
|
||||
Returns:
|
||||
List: A of dictionaries containing keys: cidr, start, end.
|
||||
"""
|
||||
|
||||
nets = []
|
||||
|
||||
# Find the first NetRange value.
|
||||
pattern = re.compile(
|
||||
r'^NetRange:[^\S\n]+(.+)$',
|
||||
re.MULTILINE
|
||||
)
|
||||
temp = pattern.search(response)
|
||||
net_range = None
|
||||
net_range_start = None
|
||||
if temp is not None:
|
||||
net_range = temp.group(1).strip()
|
||||
net_range_start = temp.start()
|
||||
|
||||
# Iterate through all of the networks found, storing the CIDR value
|
||||
# and the start and end positions.
|
||||
for match in re.finditer(
|
||||
r'^CIDR:[^\S\n]+(.+?,[^\S\n].+|.+)$',
|
||||
response,
|
||||
re.MULTILINE
|
||||
):
|
||||
|
||||
try:
|
||||
|
||||
net = copy.deepcopy(BASE_NET)
|
||||
|
||||
if len(nets) > 0:
|
||||
temp = pattern.search(response, match.start())
|
||||
net_range = None
|
||||
net_range_start = None
|
||||
if temp is not None:
|
||||
net_range = temp.group(1).strip()
|
||||
net_range_start = temp.start()
|
||||
|
||||
if net_range is not None:
|
||||
if net_range_start < match.start() or len(nets) > 0:
|
||||
net['range'] = net_range
|
||||
|
||||
net['cidr'] = ', '.join(
|
||||
[ip_network(c.strip()).__str__()
|
||||
for c in match.group(1).split(', ')]
|
||||
)
|
||||
net['start'] = match.start()
|
||||
net['end'] = match.end()
|
||||
nets.append(net)
|
||||
|
||||
except ValueError:
|
||||
|
||||
pass
|
||||
|
||||
return nets
|
||||
|
||||
def _get_nets_lacnic(self, response):
|
||||
"""
|
||||
The function for parsing network blocks from LACNIC whois data.
|
||||
|
||||
Args:
|
||||
response: The response from the LACNIC whois server.
|
||||
|
||||
Returns:
|
||||
List: A of dictionaries containing keys: cidr, start, end.
|
||||
"""
|
||||
|
||||
nets = []
|
||||
|
||||
# Iterate through all of the networks found, storing the CIDR value
|
||||
# and the start and end positions.
|
||||
for match in re.finditer(
|
||||
r'^(inetnum|inet6num|route):[^\S\n]+(.+?,[^\S\n].+|.+)$',
|
||||
response,
|
||||
re.MULTILINE
|
||||
):
|
||||
|
||||
try:
|
||||
|
||||
net = copy.deepcopy(BASE_NET)
|
||||
net['range'] = match.group(2).strip()
|
||||
|
||||
temp = []
|
||||
for addr in match.group(2).strip().split(', '):
|
||||
|
||||
count = addr.count('.')
|
||||
if count is not 0 and count < 4:
|
||||
|
||||
addr_split = addr.strip().split('/')
|
||||
for i in range(count + 1, 4):
|
||||
addr_split[0] += '.0'
|
||||
|
||||
addr = '/'.join(addr_split)
|
||||
|
||||
temp.append(ip_network(addr.strip()).__str__())
|
||||
|
||||
net['cidr'] = ', '.join(temp)
|
||||
net['start'] = match.start()
|
||||
net['end'] = match.end()
|
||||
nets.append(net)
|
||||
|
||||
except ValueError:
|
||||
|
||||
pass
|
||||
|
||||
return nets
|
||||
|
||||
def _get_nets_other(self, response):
|
||||
"""
|
||||
The function for parsing network blocks from generic whois data.
|
||||
|
||||
Args:
|
||||
response: The response from the whois/rwhois server.
|
||||
|
||||
Returns:
|
||||
List: A of dictionaries containing keys: cidr, start, end.
|
||||
"""
|
||||
|
||||
nets = []
|
||||
|
||||
# Iterate through all of the networks found, storing the CIDR value
|
||||
# and the start and end positions.
|
||||
for match in re.finditer(
|
||||
r'^(inetnum|inet6num|route):[^\S\n]+((.+?)[^\S\n]-[^\S\n](.+)|'
|
||||
'.+)$',
|
||||
response,
|
||||
re.MULTILINE
|
||||
):
|
||||
|
||||
try:
|
||||
|
||||
net = copy.deepcopy(BASE_NET)
|
||||
net['range'] = match.group(2)
|
||||
|
||||
if match.group(3) and match.group(4):
|
||||
|
||||
addrs = []
|
||||
addrs.extend(summarize_address_range(
|
||||
ip_address(match.group(3).strip()),
|
||||
ip_address(match.group(4).strip())))
|
||||
|
||||
cidr = ', '.join(
|
||||
[i.__str__() for i in collapse_addresses(addrs)]
|
||||
)
|
||||
|
||||
else:
|
||||
|
||||
cidr = ip_network(match.group(2).strip()).__str__()
|
||||
|
||||
net['cidr'] = cidr
|
||||
net['start'] = match.start()
|
||||
net['end'] = match.end()
|
||||
nets.append(net)
|
||||
|
||||
except (ValueError, TypeError):
|
||||
|
||||
pass
|
||||
|
||||
return nets
|
||||
|
||||
def lookup(self, inc_raw=False, retry_count=3, response=None,
|
||||
get_referral=False, extra_blacklist=None,
|
||||
ignore_referral_errors=False, asn_data=None,
|
||||
field_list=None, is_offline=False):
|
||||
"""
|
||||
The function for retrieving and parsing whois information for an IP
|
||||
address via port 43/tcp (WHOIS).
|
||||
|
||||
Args:
|
||||
inc_raw: Boolean for whether to include the raw results in the
|
||||
returned dictionary.
|
||||
retry_count: The number of times to retry in case socket errors,
|
||||
timeouts, connection resets, etc. are encountered.
|
||||
response: Optional response object, this bypasses the Whois lookup.
|
||||
get_referral: Boolean for whether to retrieve referral whois
|
||||
information, if available.
|
||||
extra_blacklist: A list of blacklisted whois servers in addition to
|
||||
the global BLACKLIST.
|
||||
ignore_referral_errors: Boolean for whether to ignore and continue
|
||||
when an exception is encountered on referral whois lookups.
|
||||
asn_data: Optional ASN result object, this bypasses the ASN lookup.
|
||||
field_list: If provided, a list of fields to parse:
|
||||
['name', 'handle', 'description', 'country', 'state', 'city',
|
||||
'address', 'postal_code', 'emails', 'created', 'updated']
|
||||
is_offline: Boolean for whether to perform lookups offline. If
|
||||
True, response and asn_data must be provided. Primarily used
|
||||
for testing.
|
||||
|
||||
Returns:
|
||||
Dictionary:
|
||||
|
||||
:query: The IP address (String)
|
||||
:asn: The Autonomous System Number (String)
|
||||
:asn_date: The ASN Allocation date (String)
|
||||
:asn_registry: The assigned ASN registry (String)
|
||||
:asn_cidr: The assigned ASN CIDR (String)
|
||||
:asn_country_code: The assigned ASN country code (String)
|
||||
:nets: Dictionaries containing network information which consists
|
||||
of the fields listed in the NIC_WHOIS dictionary. (List)
|
||||
:raw: Raw whois results if the inc_raw parameter is True. (String)
|
||||
:referral: Dictionary of referral whois information if get_referral
|
||||
is True and the server isn't blacklisted. Consists of fields
|
||||
listed in the RWHOIS dictionary.
|
||||
:raw_referral: Raw referral whois results if the inc_raw parameter
|
||||
is True. (String)
|
||||
"""
|
||||
|
||||
# Create the return dictionary.
|
||||
results = {
|
||||
'query': self._net.address_str,
|
||||
'nets': [],
|
||||
'raw': None,
|
||||
'referral': None,
|
||||
'raw_referral': None
|
||||
}
|
||||
|
||||
# The referral server and port. Only used if get_referral is True.
|
||||
referral_server = None
|
||||
referral_port = 0
|
||||
|
||||
# Only fetch the response if we haven't already.
|
||||
if response is None or (not is_offline and
|
||||
asn_data['asn_registry'] is not 'arin'):
|
||||
|
||||
log.debug('Response not given, perform WHOIS lookup for {0}'
|
||||
.format(self._net.address_str))
|
||||
|
||||
# Retrieve the whois data.
|
||||
response = self._net.get_whois(
|
||||
asn_registry=asn_data['asn_registry'], retry_count=retry_count,
|
||||
extra_blacklist=extra_blacklist
|
||||
)
|
||||
|
||||
if get_referral:
|
||||
|
||||
# Search for a referral server.
|
||||
for match in re.finditer(
|
||||
r'^ReferralServer:[^\S\n]+(.+:[0-9]+)$',
|
||||
response,
|
||||
re.MULTILINE
|
||||
):
|
||||
|
||||
try:
|
||||
|
||||
temp = match.group(1)
|
||||
if 'rwhois://' not in temp: # pragma: no cover
|
||||
raise ValueError
|
||||
|
||||
temp = temp.replace('rwhois://', '').split(':')
|
||||
|
||||
if int(temp[1]) > 65535: # pragma: no cover
|
||||
raise ValueError
|
||||
|
||||
referral_server = temp[0]
|
||||
referral_port = int(temp[1])
|
||||
|
||||
except (ValueError, KeyError): # pragma: no cover
|
||||
|
||||
continue
|
||||
|
||||
break
|
||||
|
||||
# Retrieve the referral whois data.
|
||||
if get_referral and referral_server:
|
||||
|
||||
log.debug('Perform referral WHOIS lookup')
|
||||
|
||||
response_ref = None
|
||||
|
||||
try:
|
||||
|
||||
response_ref = self._net.get_whois(
|
||||
asn_registry='', retry_count=retry_count,
|
||||
server=referral_server, port=referral_port,
|
||||
extra_blacklist=extra_blacklist
|
||||
)
|
||||
|
||||
except (BlacklistError, WhoisLookupError):
|
||||
|
||||
if ignore_referral_errors:
|
||||
|
||||
pass
|
||||
|
||||
else:
|
||||
|
||||
raise
|
||||
|
||||
if response_ref:
|
||||
|
||||
log.debug('Parsing referral WHOIS data')
|
||||
|
||||
if inc_raw:
|
||||
|
||||
results['raw_referral'] = response_ref
|
||||
|
||||
temp_rnet = self._parse_fields(
|
||||
response_ref,
|
||||
RWHOIS['fields'],
|
||||
field_list=field_list
|
||||
)
|
||||
|
||||
# Add the networks to the return dictionary.
|
||||
results['referral'] = temp_rnet
|
||||
|
||||
# If inc_raw parameter is True, add the response to return dictionary.
|
||||
if inc_raw:
|
||||
|
||||
results['raw'] = response
|
||||
|
||||
nets = []
|
||||
|
||||
if asn_data['asn_registry'] == 'arin':
|
||||
|
||||
nets_response = self._get_nets_arin(response)
|
||||
|
||||
elif asn_data['asn_registry'] == 'lacnic':
|
||||
|
||||
nets_response = self._get_nets_lacnic(response)
|
||||
|
||||
else:
|
||||
|
||||
nets_response = self._get_nets_other(response)
|
||||
|
||||
nets.extend(nets_response)
|
||||
|
||||
# Iterate through all of the network sections and parse out the
|
||||
# appropriate fields for each.
|
||||
log.debug('Parsing WHOIS data')
|
||||
for index, net in enumerate(nets):
|
||||
|
||||
section_end = None
|
||||
if index + 1 < len(nets):
|
||||
|
||||
section_end = nets[index + 1]['start']
|
||||
|
||||
try:
|
||||
|
||||
dt_format = RIR_WHOIS[results['asn_registry']]['dt_format']
|
||||
|
||||
except KeyError:
|
||||
|
||||
dt_format = None
|
||||
|
||||
temp_net = self._parse_fields(
|
||||
response,
|
||||
RIR_WHOIS[asn_data['asn_registry']]['fields'],
|
||||
section_end,
|
||||
net['end'],
|
||||
dt_format,
|
||||
field_list
|
||||
)
|
||||
|
||||
# Merge the net dictionaries.
|
||||
net.update(temp_net)
|
||||
|
||||
# The start and end values are no longer needed.
|
||||
del net['start'], net['end']
|
||||
|
||||
# Add the networks to the return dictionary.
|
||||
results['nets'] = nets
|
||||
|
||||
return results
|
Loading…
Add table
Add a link
Reference in a new issue