mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-06 05:01:14 -07:00
Update ipwhois-1.2.0
This commit is contained in:
parent
4d62245cf5
commit
2c4cc34b2b
13 changed files with 289 additions and 368 deletions
|
@ -1,4 +1,4 @@
|
||||||
# Copyright (c) 2013-2019 Philip Hane
|
# Copyright (c) 2013-2020 Philip Hane
|
||||||
# All rights reserved.
|
# All rights reserved.
|
||||||
#
|
#
|
||||||
# Redistribution and use in source and binary forms, with or without
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
@ -26,4 +26,4 @@ from .exceptions import *
|
||||||
from .net import Net
|
from .net import Net
|
||||||
from .ipwhois import IPWhois
|
from .ipwhois import IPWhois
|
||||||
|
|
||||||
__version__ = '1.1.0'
|
__version__ = '1.2.0'
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Copyright (c) 2013-2019 Philip Hane
|
# Copyright (c) 2013-2020 Philip Hane
|
||||||
# All rights reserved.
|
# All rights reserved.
|
||||||
#
|
#
|
||||||
# Redistribution and use in source and binary forms, with or without
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
@ -61,21 +61,21 @@ ASN_ORIGIN_WHOIS = {
|
||||||
|
|
||||||
ASN_ORIGIN_HTTP = {
|
ASN_ORIGIN_HTTP = {
|
||||||
'radb': {
|
'radb': {
|
||||||
'url': 'http://www.radb.net/query/',
|
'url': 'http://www.radb.net/query',
|
||||||
'form_data_asn_field': 'keywords',
|
'form_data_asn_field': 'keywords',
|
||||||
'form_data': {
|
'form_data': {
|
||||||
'advanced_query': '1',
|
'advanced_query': '1',
|
||||||
'query': 'Query',
|
'query': 'Query',
|
||||||
'-T option': 'inet-rtr',
|
# '-T option': 'inet-rtr',
|
||||||
'ip_option': '',
|
'ip_option': '',
|
||||||
'-i': '1',
|
'-i': '1',
|
||||||
'-i option': 'origin'
|
'-i option': 'origin'
|
||||||
},
|
},
|
||||||
'fields': {
|
'fields': {
|
||||||
'description': r'(descr):[^\S\n]+(?P<val>.+?)\<br\>',
|
'description': r'(descr):[^\S\n]+(?P<val>.+?)\n',
|
||||||
'maintainer': r'(mnt-by):[^\S\n]+(?P<val>.+?)\<br\>',
|
'maintainer': r'(mnt-by):[^\S\n]+(?P<val>.+?)\n',
|
||||||
'updated': r'(changed):[^\S\n]+(?P<val>.+?)\<br\>',
|
'updated': r'(changed):[^\S\n]+(?P<val>.+?)\n',
|
||||||
'source': r'(source):[^\S\n]+(?P<val>.+?)\<br\>',
|
'source': r'(source):[^\S\n]+(?P<val>.+?)\<',
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -169,16 +169,6 @@ class IPASN:
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def _parse_fields_dns(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('IPASN._parse_fields_dns() has been deprecated and will be '
|
|
||||||
'removed. You should now use IPASN.parse_fields_dns().')
|
|
||||||
return self.parse_fields_dns(*args, **kwargs)
|
|
||||||
|
|
||||||
def parse_fields_verbose_dns(self, response):
|
def parse_fields_verbose_dns(self, response):
|
||||||
"""
|
"""
|
||||||
The function for parsing ASN fields from a verbose dns response.
|
The function for parsing ASN fields from a verbose dns response.
|
||||||
|
@ -293,16 +283,6 @@ class IPASN:
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def _parse_fields_whois(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('IPASN._parse_fields_whois() has been deprecated and will be '
|
|
||||||
'removed. You should now use IPASN.parse_fields_whois().')
|
|
||||||
return self.parse_fields_whois(*args, **kwargs)
|
|
||||||
|
|
||||||
def parse_fields_http(self, response, extra_org_map=None):
|
def parse_fields_http(self, response, extra_org_map=None):
|
||||||
"""
|
"""
|
||||||
The function for parsing ASN fields from a http response.
|
The function for parsing ASN fields from a http response.
|
||||||
|
@ -403,19 +383,8 @@ class IPASN:
|
||||||
|
|
||||||
return asn_data
|
return asn_data
|
||||||
|
|
||||||
def _parse_fields_http(self, *args, **kwargs):
|
def lookup(self, inc_raw=False, retry_count=3, extra_org_map=None,
|
||||||
"""
|
asn_methods=None, get_asn_description=True):
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('IPASN._parse_fields_http() has been deprecated and will be '
|
|
||||||
'removed. You should now use IPASN.parse_fields_http().')
|
|
||||||
return self.parse_fields_http(*args, **kwargs)
|
|
||||||
|
|
||||||
def lookup(self, inc_raw=False, retry_count=3, asn_alts=None,
|
|
||||||
extra_org_map=None, asn_methods=None,
|
|
||||||
get_asn_description=True):
|
|
||||||
"""
|
"""
|
||||||
The wrapper function for retrieving and parsing ASN information for an
|
The wrapper function for retrieving and parsing ASN information for an
|
||||||
IP address.
|
IP address.
|
||||||
|
@ -426,10 +395,6 @@ class IPASN:
|
||||||
retry_count (:obj:`int`): The number of times to retry in case
|
retry_count (:obj:`int`): The number of times to retry in case
|
||||||
socket errors, timeouts, connection resets, etc. are
|
socket errors, timeouts, connection resets, etc. are
|
||||||
encountered. Defaults to 3.
|
encountered. Defaults to 3.
|
||||||
asn_alts (:obj:`list`): Additional lookup types to attempt if the
|
|
||||||
ASN dns lookup fails. Allow permutations must be enabled.
|
|
||||||
Defaults to all ['whois', 'http']. *WARNING* deprecated in
|
|
||||||
favor of new argument asn_methods. Defaults to None.
|
|
||||||
extra_org_map (:obj:`dict`): Mapping org handles to RIRs. This is
|
extra_org_map (:obj:`dict`): Mapping org handles to RIRs. This is
|
||||||
for limited cases where ARIN REST (ASN fallback HTTP lookup)
|
for limited cases where ARIN REST (ASN fallback HTTP lookup)
|
||||||
does not show an RIR as the org handle e.g., DNIC (which is
|
does not show an RIR as the org handle e.g., DNIC (which is
|
||||||
|
@ -466,17 +431,7 @@ class IPASN:
|
||||||
|
|
||||||
if asn_methods is None:
|
if asn_methods is None:
|
||||||
|
|
||||||
if asn_alts is None:
|
lookups = ['dns', 'whois', 'http']
|
||||||
|
|
||||||
lookups = ['dns', 'whois', 'http']
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('IPASN.lookup() asn_alts argument has been deprecated '
|
|
||||||
'and will be removed. You should now use the asn_methods '
|
|
||||||
'argument.')
|
|
||||||
lookups = ['dns'] + asn_alts
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
|
@ -492,12 +447,6 @@ class IPASN:
|
||||||
dns_success = False
|
dns_success = False
|
||||||
for index, lookup_method in enumerate(lookups):
|
for index, lookup_method in enumerate(lookups):
|
||||||
|
|
||||||
if index > 0 and not asn_methods and not (
|
|
||||||
self._net.allow_permutations):
|
|
||||||
|
|
||||||
raise ASNRegistryError('ASN registry lookup failed. '
|
|
||||||
'Permutations not allowed.')
|
|
||||||
|
|
||||||
if lookup_method == 'dns':
|
if lookup_method == 'dns':
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -706,16 +655,6 @@ class ASNOrigin:
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def _parse_fields(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('ASNOrigin._parse_fields() has been deprecated and will be '
|
|
||||||
'removed. You should now use ASNOrigin.parse_fields().')
|
|
||||||
return self.parse_fields(*args, **kwargs)
|
|
||||||
|
|
||||||
def get_nets_radb(self, response, is_http=False):
|
def get_nets_radb(self, response, is_http=False):
|
||||||
"""
|
"""
|
||||||
The function for parsing network blocks from ASN origin data.
|
The function for parsing network blocks from ASN origin data.
|
||||||
|
@ -743,7 +682,7 @@ class ASNOrigin:
|
||||||
nets = []
|
nets = []
|
||||||
|
|
||||||
if is_http:
|
if is_http:
|
||||||
regex = r'route(?:6)?:[^\S\n]+(?P<val>.+?)<br>'
|
regex = r'route(?:6)?:[^\S\n]+(?P<val>.+?)\n'
|
||||||
else:
|
else:
|
||||||
regex = r'^route(?:6)?:[^\S\n]+(?P<val>.+|.+)$'
|
regex = r'^route(?:6)?:[^\S\n]+(?P<val>.+|.+)$'
|
||||||
|
|
||||||
|
@ -769,18 +708,8 @@ class ASNOrigin:
|
||||||
|
|
||||||
return nets
|
return nets
|
||||||
|
|
||||||
def _get_nets_radb(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('ASNOrigin._get_nets_radb() has been deprecated and will be '
|
|
||||||
'removed. You should now use ASNOrigin.get_nets_radb().')
|
|
||||||
return self.get_nets_radb(*args, **kwargs)
|
|
||||||
|
|
||||||
def lookup(self, asn=None, inc_raw=False, retry_count=3, response=None,
|
def lookup(self, asn=None, inc_raw=False, retry_count=3, response=None,
|
||||||
field_list=None, asn_alts=None, asn_methods=None):
|
field_list=None, asn_methods=None):
|
||||||
"""
|
"""
|
||||||
The function for retrieving and parsing ASN origin whois information
|
The function for retrieving and parsing ASN origin whois information
|
||||||
via port 43/tcp (WHOIS).
|
via port 43/tcp (WHOIS).
|
||||||
|
@ -797,9 +726,6 @@ class ASNOrigin:
|
||||||
field_list (:obj:`list`): If provided, fields to parse:
|
field_list (:obj:`list`): If provided, fields to parse:
|
||||||
['description', 'maintainer', 'updated', 'source']
|
['description', 'maintainer', 'updated', 'source']
|
||||||
If None, defaults to all.
|
If None, defaults to all.
|
||||||
asn_alts (:obj:`list`): Additional lookup types to attempt if the
|
|
||||||
ASN whois lookup fails. If None, defaults to all ['http'].
|
|
||||||
*WARNING* deprecated in favor of new argument asn_methods.
|
|
||||||
asn_methods (:obj:`list`): ASN lookup types to attempt, in order.
|
asn_methods (:obj:`list`): ASN lookup types to attempt, in order.
|
||||||
If None, defaults to all ['whois', 'http'].
|
If None, defaults to all ['whois', 'http'].
|
||||||
|
|
||||||
|
@ -828,17 +754,7 @@ class ASNOrigin:
|
||||||
|
|
||||||
if asn_methods is None:
|
if asn_methods is None:
|
||||||
|
|
||||||
if asn_alts is None:
|
lookups = ['whois', 'http']
|
||||||
|
|
||||||
lookups = ['whois', 'http']
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('ASNOrigin.lookup() asn_alts argument has been deprecated'
|
|
||||||
' and will be removed. You should now use the asn_methods'
|
|
||||||
' argument.')
|
|
||||||
lookups = ['whois'] + asn_alts
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
|
@ -875,6 +791,8 @@ class ASNOrigin:
|
||||||
asn=asn, retry_count=retry_count
|
asn=asn, retry_count=retry_count
|
||||||
)
|
)
|
||||||
|
|
||||||
|
break
|
||||||
|
|
||||||
except (WhoisLookupError, WhoisRateLimitError) as e:
|
except (WhoisLookupError, WhoisRateLimitError) as e:
|
||||||
|
|
||||||
log.debug('ASN origin WHOIS lookup failed: {0}'
|
log.debug('ASN origin WHOIS lookup failed: {0}'
|
||||||
|
@ -888,17 +806,22 @@ class ASNOrigin:
|
||||||
log.debug('Response not given, perform ASN origin '
|
log.debug('Response not given, perform ASN origin '
|
||||||
'HTTP lookup for: {0}'.format(asn))
|
'HTTP lookup for: {0}'.format(asn))
|
||||||
|
|
||||||
tmp = ASN_ORIGIN_HTTP['radb']['form_data']
|
# tmp = ASN_ORIGIN_HTTP['radb']['form_data']
|
||||||
tmp[str(ASN_ORIGIN_HTTP['radb']['form_data_asn_field']
|
# tmp[str(
|
||||||
)] = asn
|
# ASN_ORIGIN_HTTP['radb']['form_data_asn_field']
|
||||||
|
# )] = asn
|
||||||
response = self._net.get_http_raw(
|
response = self._net.get_http_raw(
|
||||||
url=ASN_ORIGIN_HTTP['radb']['url'],
|
url=('{0}?advanced_query=1&keywords={1}&-T+option'
|
||||||
|
'=&ip_option=&-i=1&-i+option=origin'
|
||||||
|
).format(ASN_ORIGIN_HTTP['radb']['url'], asn),
|
||||||
retry_count=retry_count,
|
retry_count=retry_count,
|
||||||
request_type='POST',
|
request_type='GET',
|
||||||
form_data=tmp
|
# form_data=tmp
|
||||||
)
|
)
|
||||||
is_http = True # pragma: no cover
|
is_http = True # pragma: no cover
|
||||||
|
|
||||||
|
break
|
||||||
|
|
||||||
except HTTPLookupError as e:
|
except HTTPLookupError as e:
|
||||||
|
|
||||||
log.debug('ASN origin HTTP lookup failed: {0}'
|
log.debug('ASN origin HTTP lookup failed: {0}'
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Copyright (c) 2013-2019 Philip Hane
|
# Copyright (c) 2013-2020 Philip Hane
|
||||||
# All rights reserved.
|
# All rights reserved.
|
||||||
#
|
#
|
||||||
# Redistribution and use in source and binary forms, with or without
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
|
|
@ -158,11 +158,14 @@ def bulk_lookup_rdap(addresses=None, inc_raw=False, retry_count=3, depth=0,
|
||||||
'ip_lookup_total' (int) - The total number of addresses that
|
'ip_lookup_total' (int) - The total number of addresses that
|
||||||
lookups were attempted for, excluding any that failed ASN
|
lookups were attempted for, excluding any that failed ASN
|
||||||
registry checks.
|
registry checks.
|
||||||
|
'ip_failed_total' (int) - The total number of addresses that
|
||||||
|
lookups failed for. Excludes any that failed initially, but
|
||||||
|
succeeded after further retries.
|
||||||
'lacnic' (dict) -
|
'lacnic' (dict) -
|
||||||
{
|
{
|
||||||
'failed' (list) - The addresses that failed to lookup.
|
'failed' (list) - The addresses that failed to lookup.
|
||||||
Excludes any that failed initially, but succeeded after
|
Excludes any that failed initially, but succeeded after
|
||||||
futher retries.
|
further retries.
|
||||||
'rate_limited' (list) - The addresses that encountered
|
'rate_limited' (list) - The addresses that encountered
|
||||||
rate-limiting. Unless an address is also in 'failed',
|
rate-limiting. Unless an address is also in 'failed',
|
||||||
it eventually succeeded.
|
it eventually succeeded.
|
||||||
|
@ -196,6 +199,7 @@ def bulk_lookup_rdap(addresses=None, inc_raw=False, retry_count=3, depth=0,
|
||||||
'ip_input_total': len(addresses),
|
'ip_input_total': len(addresses),
|
||||||
'ip_unique_total': 0,
|
'ip_unique_total': 0,
|
||||||
'ip_lookup_total': 0,
|
'ip_lookup_total': 0,
|
||||||
|
'ip_failed_total': 0,
|
||||||
'lacnic': {'failed': [], 'rate_limited': [], 'total': 0},
|
'lacnic': {'failed': [], 'rate_limited': [], 'total': 0},
|
||||||
'ripencc': {'failed': [], 'rate_limited': [], 'total': 0},
|
'ripencc': {'failed': [], 'rate_limited': [], 'total': 0},
|
||||||
'apnic': {'failed': [], 'rate_limited': [], 'total': 0},
|
'apnic': {'failed': [], 'rate_limited': [], 'total': 0},
|
||||||
|
@ -253,15 +257,15 @@ def bulk_lookup_rdap(addresses=None, inc_raw=False, retry_count=3, depth=0,
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
||||||
results = ipasn.parse_fields_whois(asn_result)
|
asn_parsed = ipasn.parse_fields_whois(asn_result)
|
||||||
|
|
||||||
except ASNRegistryError: # pragma: no cover
|
except ASNRegistryError: # pragma: no cover
|
||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Add valid IP ASN result to asn_parsed_results for RDAP lookup
|
# Add valid IP ASN result to asn_parsed_results for RDAP lookup
|
||||||
asn_parsed_results[ip] = results
|
asn_parsed_results[ip] = asn_parsed
|
||||||
stats[results['asn_registry']]['total'] += 1
|
stats[asn_parsed['asn_registry']]['total'] += 1
|
||||||
|
|
||||||
# Set the list of IPs that are not allocated/failed ASN lookup
|
# Set the list of IPs that are not allocated/failed ASN lookup
|
||||||
stats['unallocated_addresses'] = list(k for k in addresses if k not in
|
stats['unallocated_addresses'] = list(k for k in addresses if k not in
|
||||||
|
@ -362,7 +366,7 @@ def bulk_lookup_rdap(addresses=None, inc_raw=False, retry_count=3, depth=0,
|
||||||
|
|
||||||
# Perform the RDAP lookup. retry_count is set to 0
|
# Perform the RDAP lookup. retry_count is set to 0
|
||||||
# here since we handle that in this function
|
# here since we handle that in this function
|
||||||
results = rdap.lookup(
|
rdap_result = rdap.lookup(
|
||||||
inc_raw=inc_raw, retry_count=0, asn_data=asn_data,
|
inc_raw=inc_raw, retry_count=0, asn_data=asn_data,
|
||||||
depth=depth, excluded_entities=excluded_entities
|
depth=depth, excluded_entities=excluded_entities
|
||||||
)
|
)
|
||||||
|
@ -373,7 +377,9 @@ def bulk_lookup_rdap(addresses=None, inc_raw=False, retry_count=3, depth=0,
|
||||||
# Lookup was successful, add to result. Set the nir
|
# Lookup was successful, add to result. Set the nir
|
||||||
# key to None as this is not supported
|
# key to None as this is not supported
|
||||||
# (yet - requires more queries)
|
# (yet - requires more queries)
|
||||||
results[ip] = results
|
results[ip] = asn_data
|
||||||
|
results[ip].update(rdap_result)
|
||||||
|
|
||||||
results[ip]['nir'] = None
|
results[ip]['nir'] = None
|
||||||
|
|
||||||
# Remove the IP from the lookup queue
|
# Remove the IP from the lookup queue
|
||||||
|
@ -423,6 +429,7 @@ def bulk_lookup_rdap(addresses=None, inc_raw=False, retry_count=3, depth=0,
|
||||||
|
|
||||||
del asn_parsed_results[ip]
|
del asn_parsed_results[ip]
|
||||||
stats[rir]['failed'].append(ip)
|
stats[rir]['failed'].append(ip)
|
||||||
|
stats['ip_failed_total'] += 1
|
||||||
|
|
||||||
if rir == 'lacnic':
|
if rir == 'lacnic':
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Copyright (c) 2013-2019 Philip Hane
|
# Copyright (c) 2013-2020 Philip Hane
|
||||||
# All rights reserved.
|
# All rights reserved.
|
||||||
#
|
#
|
||||||
# Redistribution and use in source and binary forms, with or without
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Copyright (c) 2013-2019 Philip Hane
|
# Copyright (c) 2013-2020 Philip Hane
|
||||||
# All rights reserved.
|
# All rights reserved.
|
||||||
#
|
#
|
||||||
# Redistribution and use in source and binary forms, with or without
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
@ -42,17 +42,12 @@ class IPWhois:
|
||||||
seconds. Defaults to 5.
|
seconds. Defaults to 5.
|
||||||
proxy_opener (:obj:`urllib.request.OpenerDirector`): The request for
|
proxy_opener (:obj:`urllib.request.OpenerDirector`): The request for
|
||||||
proxy support. Defaults to None.
|
proxy support. Defaults to None.
|
||||||
allow_permutations (:obj:`bool`): Allow net.Net() to use additional
|
|
||||||
methods if DNS lookups to Cymru fail. *WARNING* deprecated in
|
|
||||||
favor of new argument asn_methods. Defaults to False.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, address, timeout=5, proxy_opener=None,
|
def __init__(self, address, timeout=5, proxy_opener=None):
|
||||||
allow_permutations=False):
|
|
||||||
|
|
||||||
self.net = Net(
|
self.net = Net(
|
||||||
address=address, timeout=timeout, proxy_opener=proxy_opener,
|
address=address, timeout=timeout, proxy_opener=proxy_opener
|
||||||
allow_permutations=allow_permutations
|
|
||||||
)
|
)
|
||||||
self.ipasn = IPASN(self.net)
|
self.ipasn = IPASN(self.net)
|
||||||
|
|
||||||
|
@ -71,7 +66,7 @@ class IPWhois:
|
||||||
|
|
||||||
def lookup_whois(self, inc_raw=False, retry_count=3, get_referral=False,
|
def lookup_whois(self, inc_raw=False, retry_count=3, get_referral=False,
|
||||||
extra_blacklist=None, ignore_referral_errors=False,
|
extra_blacklist=None, ignore_referral_errors=False,
|
||||||
field_list=None, asn_alts=None, extra_org_map=None,
|
field_list=None, extra_org_map=None,
|
||||||
inc_nir=True, nir_field_list=None, asn_methods=None,
|
inc_nir=True, nir_field_list=None, asn_methods=None,
|
||||||
get_asn_description=True):
|
get_asn_description=True):
|
||||||
"""
|
"""
|
||||||
|
@ -95,10 +90,6 @@ class IPWhois:
|
||||||
['name', 'handle', 'description', 'country', 'state', 'city',
|
['name', 'handle', 'description', 'country', 'state', 'city',
|
||||||
'address', 'postal_code', 'emails', 'created', 'updated']
|
'address', 'postal_code', 'emails', 'created', 'updated']
|
||||||
If None, defaults to all.
|
If None, defaults to all.
|
||||||
asn_alts (:obj:`list`): Additional lookup types to attempt if the
|
|
||||||
ASN dns lookup fails. Allow permutations must be enabled.
|
|
||||||
If None, defaults to all ['whois', 'http']. *WARNING*
|
|
||||||
deprecated in favor of new argument asn_methods.
|
|
||||||
extra_org_map (:obj:`dict`): Dictionary mapping org handles to
|
extra_org_map (:obj:`dict`): Dictionary mapping org handles to
|
||||||
RIRs. This is for limited cases where ARIN REST (ASN fallback
|
RIRs. This is for limited cases where ARIN REST (ASN fallback
|
||||||
HTTP lookup) does not show an RIR as the org handle e.g., DNIC
|
HTTP lookup) does not show an RIR as the org handle e.g., DNIC
|
||||||
|
@ -161,7 +152,7 @@ class IPWhois:
|
||||||
log.debug('ASN lookup for {0}'.format(self.address_str))
|
log.debug('ASN lookup for {0}'.format(self.address_str))
|
||||||
|
|
||||||
asn_data = self.ipasn.lookup(
|
asn_data = self.ipasn.lookup(
|
||||||
inc_raw=inc_raw, retry_count=retry_count, asn_alts=asn_alts,
|
inc_raw=inc_raw, retry_count=retry_count,
|
||||||
extra_org_map=extra_org_map, asn_methods=asn_methods,
|
extra_org_map=extra_org_map, asn_methods=asn_methods,
|
||||||
get_asn_description=get_asn_description
|
get_asn_description=get_asn_description
|
||||||
)
|
)
|
||||||
|
@ -206,9 +197,9 @@ class IPWhois:
|
||||||
|
|
||||||
def lookup_rdap(self, inc_raw=False, retry_count=3, depth=0,
|
def lookup_rdap(self, inc_raw=False, retry_count=3, depth=0,
|
||||||
excluded_entities=None, bootstrap=False,
|
excluded_entities=None, bootstrap=False,
|
||||||
rate_limit_timeout=120, asn_alts=None, extra_org_map=None,
|
rate_limit_timeout=120, extra_org_map=None,
|
||||||
inc_nir=True, nir_field_list=None, asn_methods=None,
|
inc_nir=True, nir_field_list=None, asn_methods=None,
|
||||||
get_asn_description=True):
|
get_asn_description=True, root_ent_check=True):
|
||||||
"""
|
"""
|
||||||
The function for retrieving and parsing whois information for an IP
|
The function for retrieving and parsing whois information for an IP
|
||||||
address via HTTP (RDAP).
|
address via HTTP (RDAP).
|
||||||
|
@ -233,10 +224,6 @@ class IPWhois:
|
||||||
rate_limit_timeout (:obj:`int`): The number of seconds to wait
|
rate_limit_timeout (:obj:`int`): The number of seconds to wait
|
||||||
before retrying when a rate limit notice is returned via
|
before retrying when a rate limit notice is returned via
|
||||||
rdap+json. Defaults to 120.
|
rdap+json. Defaults to 120.
|
||||||
asn_alts (:obj:`list`): Additional lookup types to attempt if the
|
|
||||||
ASN dns lookup fails. Allow permutations must be enabled.
|
|
||||||
If None, defaults to all ['whois', 'http']. *WARNING*
|
|
||||||
deprecated in favor of new argument asn_methods.
|
|
||||||
extra_org_map (:obj:`dict`): Dictionary mapping org handles to
|
extra_org_map (:obj:`dict`): Dictionary mapping org handles to
|
||||||
RIRs. This is for limited cases where ARIN REST (ASN fallback
|
RIRs. This is for limited cases where ARIN REST (ASN fallback
|
||||||
HTTP lookup) does not show an RIR as the org handle e.g., DNIC
|
HTTP lookup) does not show an RIR as the org handle e.g., DNIC
|
||||||
|
@ -260,6 +247,9 @@ class IPWhois:
|
||||||
get_asn_description (:obj:`bool`): Whether to run an additional
|
get_asn_description (:obj:`bool`): Whether to run an additional
|
||||||
query when pulling ASN information via dns, in order to get
|
query when pulling ASN information via dns, in order to get
|
||||||
the ASN description. Defaults to True.
|
the ASN description. Defaults to True.
|
||||||
|
root_ent_check (:obj:`bool`): If True, will perform
|
||||||
|
additional RDAP HTTP queries for missing entity data at the
|
||||||
|
root level. Defaults to True.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict: The IP RDAP lookup results
|
dict: The IP RDAP lookup results
|
||||||
|
@ -303,7 +293,7 @@ class IPWhois:
|
||||||
# Retrieve the ASN information.
|
# Retrieve the ASN information.
|
||||||
log.debug('ASN lookup for {0}'.format(self.address_str))
|
log.debug('ASN lookup for {0}'.format(self.address_str))
|
||||||
asn_data = self.ipasn.lookup(
|
asn_data = self.ipasn.lookup(
|
||||||
inc_raw=inc_raw, retry_count=retry_count, asn_alts=asn_alts,
|
inc_raw=inc_raw, retry_count=retry_count,
|
||||||
extra_org_map=extra_org_map, asn_methods=asn_methods,
|
extra_org_map=extra_org_map, asn_methods=asn_methods,
|
||||||
get_asn_description=get_asn_description
|
get_asn_description=get_asn_description
|
||||||
)
|
)
|
||||||
|
@ -318,7 +308,8 @@ class IPWhois:
|
||||||
inc_raw=inc_raw, retry_count=retry_count, asn_data=asn_data,
|
inc_raw=inc_raw, retry_count=retry_count, asn_data=asn_data,
|
||||||
depth=depth, excluded_entities=excluded_entities,
|
depth=depth, excluded_entities=excluded_entities,
|
||||||
response=response, bootstrap=bootstrap,
|
response=response, bootstrap=bootstrap,
|
||||||
rate_limit_timeout=rate_limit_timeout
|
rate_limit_timeout=rate_limit_timeout,
|
||||||
|
root_ent_check=root_ent_check
|
||||||
)
|
)
|
||||||
|
|
||||||
# Add the RDAP information to the return dictionary.
|
# Add the RDAP information to the return dictionary.
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Copyright (c) 2013-2019 Philip Hane
|
# Copyright (c) 2013-2020 Philip Hane
|
||||||
# All rights reserved.
|
# All rights reserved.
|
||||||
#
|
#
|
||||||
# Redistribution and use in source and binary forms, with or without
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
@ -103,17 +103,13 @@ class Net:
|
||||||
seconds. Defaults to 5.
|
seconds. Defaults to 5.
|
||||||
proxy_opener (:obj:`urllib.request.OpenerDirector`): The request for
|
proxy_opener (:obj:`urllib.request.OpenerDirector`): The request for
|
||||||
proxy support. Defaults to None.
|
proxy support. Defaults to None.
|
||||||
allow_permutations (:obj:`bool`): Allow net.Net() to use additional
|
|
||||||
methods if DNS lookups to Cymru fail. *WARNING* deprecated in
|
|
||||||
favor of new argument asn_methods. Defaults to False.
|
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
IPDefinedError: The address provided is defined (does not need to be
|
IPDefinedError: The address provided is defined (does not need to be
|
||||||
resolved).
|
resolved).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, address, timeout=5, proxy_opener=None,
|
def __init__(self, address, timeout=5, proxy_opener=None):
|
||||||
allow_permutations=False):
|
|
||||||
|
|
||||||
# IPv4Address or IPv6Address
|
# IPv4Address or IPv6Address
|
||||||
if isinstance(address, IPv4Address) or isinstance(
|
if isinstance(address, IPv4Address) or isinstance(
|
||||||
|
@ -129,16 +125,6 @@ class Net:
|
||||||
# Default timeout for socket connections.
|
# Default timeout for socket connections.
|
||||||
self.timeout = timeout
|
self.timeout = timeout
|
||||||
|
|
||||||
# Allow other than DNS lookups for ASNs.
|
|
||||||
self.allow_permutations = allow_permutations
|
|
||||||
|
|
||||||
if self.allow_permutations:
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('allow_permutations has been deprecated and will be removed. '
|
|
||||||
'It is no longer needed, due to the deprecation of asn_alts, '
|
|
||||||
'and the addition of the asn_methods argument.')
|
|
||||||
|
|
||||||
self.dns_resolver = dns.resolver.Resolver()
|
self.dns_resolver = dns.resolver.Resolver()
|
||||||
self.dns_resolver.timeout = timeout
|
self.dns_resolver.timeout = timeout
|
||||||
self.dns_resolver.lifetime = timeout
|
self.dns_resolver.lifetime = timeout
|
||||||
|
@ -219,21 +205,6 @@ class Net:
|
||||||
|
|
||||||
self.dns_zone = IPV6_DNS_ZONE.format(self.reversed)
|
self.dns_zone = IPV6_DNS_ZONE.format(self.reversed)
|
||||||
|
|
||||||
def lookup_asn(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Temporary wrapper for IP ASN lookups (moved to
|
|
||||||
asn.IPASN.lookup()). This will be removed in a future
|
|
||||||
release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('Net.lookup_asn() has been deprecated and will be removed. '
|
|
||||||
'You should now use asn.IPASN.lookup() for IP ASN lookups.')
|
|
||||||
from .asn import IPASN
|
|
||||||
response = None
|
|
||||||
ipasn = IPASN(self)
|
|
||||||
return ipasn.lookup(*args, **kwargs), response
|
|
||||||
|
|
||||||
def get_asn_dns(self):
|
def get_asn_dns(self):
|
||||||
"""
|
"""
|
||||||
The function for retrieving ASN information for an IP address from
|
The function for retrieving ASN information for an IP address from
|
||||||
|
@ -830,7 +801,7 @@ class Net:
|
||||||
|
|
||||||
results = namedtuple('get_host_results', 'hostname, aliaslist, '
|
results = namedtuple('get_host_results', 'hostname, aliaslist, '
|
||||||
'ipaddrlist')
|
'ipaddrlist')
|
||||||
return results(*ret)
|
return results(ret)
|
||||||
|
|
||||||
except (socket.timeout, socket.error) as e:
|
except (socket.timeout, socket.error) as e:
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Copyright (c) 2013-2019 Philip Hane
|
# Copyright (c) 2013-2020 Philip Hane
|
||||||
# All rights reserved.
|
# All rights reserved.
|
||||||
#
|
#
|
||||||
# Redistribution and use in source and binary forms, with or without
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
@ -87,9 +87,9 @@ NIR_WHOIS = {
|
||||||
'updated': r'(\[Last Update\])[^\S\n]+(?P<val>.*?)\n',
|
'updated': r'(\[Last Update\])[^\S\n]+(?P<val>.*?)\n',
|
||||||
'nameservers': r'(\[Nameserver\])[^\S\n]+(?P<val>.*?)\n',
|
'nameservers': r'(\[Nameserver\])[^\S\n]+(?P<val>.*?)\n',
|
||||||
'contact_admin': r'(\[Administrative Contact\])[^\S\n]+.+?\>'
|
'contact_admin': r'(\[Administrative Contact\])[^\S\n]+.+?\>'
|
||||||
'(?P<val>.+?)\<\/A\>\n',
|
'(?P<val>.+?)\\<\\/A\\>\n',
|
||||||
'contact_tech': r'(\[Technical Contact\])[^\S\n]+.+?\>'
|
'contact_tech': r'(\[Technical Contact\])[^\S\n]+.+?\>'
|
||||||
'(?P<val>.+?)\<\/A\>\n'
|
'(?P<val>.+?)\\<\\/A\\>\n'
|
||||||
},
|
},
|
||||||
'contact_fields': {
|
'contact_fields': {
|
||||||
'name': r'(\[Last, First\])[^\S\n]+(?P<val>.*?)\n',
|
'name': r'(\[Last, First\])[^\S\n]+(?P<val>.*?)\n',
|
||||||
|
@ -108,9 +108,14 @@ NIR_WHOIS = {
|
||||||
},
|
},
|
||||||
'krnic': {
|
'krnic': {
|
||||||
'country_code': 'KR',
|
'country_code': 'KR',
|
||||||
'url': 'https://whois.kisa.or.kr/eng/whois.jsc',
|
'url': 'https://xn--c79as89aj0e29b77z.xn--3e0b707e/eng/whois.jsc',
|
||||||
'request_type': 'POST',
|
'request_type': 'POST',
|
||||||
'request_headers': {'Accept': 'text/html'},
|
'request_headers': {
|
||||||
|
'Accept': 'text/html',
|
||||||
|
'Referer': (
|
||||||
|
'https://xn--c79as89aj0e29b77z.xn--3e0b707e/eng/whois.jsp'
|
||||||
|
),
|
||||||
|
},
|
||||||
'form_data_ip_field': 'query',
|
'form_data_ip_field': 'query',
|
||||||
'fields': {
|
'fields': {
|
||||||
'name': r'(Organization Name)[\s]+\:[^\S\n]+(?P<val>.+?)\n',
|
'name': r'(Organization Name)[\s]+\:[^\S\n]+(?P<val>.+?)\n',
|
||||||
|
@ -120,9 +125,9 @@ NIR_WHOIS = {
|
||||||
'postal_code': r'(Zip Code)[\s]+\:[^\S\n]+(?P<val>.+?)\n',
|
'postal_code': r'(Zip Code)[\s]+\:[^\S\n]+(?P<val>.+?)\n',
|
||||||
'created': r'(Registration Date)[\s]+\:[^\S\n]+(?P<val>.+?)\n',
|
'created': r'(Registration Date)[\s]+\:[^\S\n]+(?P<val>.+?)\n',
|
||||||
'contact_admin': r'(id="eng_isp_contact").+?\>(?P<val>.*?)\<'
|
'contact_admin': r'(id="eng_isp_contact").+?\>(?P<val>.*?)\<'
|
||||||
'\/div\>\n',
|
'\\/div\\>\n',
|
||||||
'contact_tech': r'(id="eng_user_contact").+?\>(?P<val>.*?)\<'
|
'contact_tech': r'(id="eng_user_contact").+?\>(?P<val>.*?)\<'
|
||||||
'\/div\>\n'
|
'\\/div\\>\n'
|
||||||
},
|
},
|
||||||
'contact_fields': {
|
'contact_fields': {
|
||||||
'name': r'(Name)[^\S\n]+?:[^\S\n]+?(?P<val>.*?)\n',
|
'name': r'(Name)[^\S\n]+?:[^\S\n]+?(?P<val>.*?)\n',
|
||||||
|
@ -260,12 +265,20 @@ class NIRWhois:
|
||||||
|
|
||||||
if field in ['created', 'updated'] and dt_format:
|
if field in ['created', 'updated'] and dt_format:
|
||||||
|
|
||||||
value = (
|
try:
|
||||||
datetime.strptime(
|
value = (
|
||||||
values[0],
|
datetime.strptime(
|
||||||
str(dt_format)
|
values[0],
|
||||||
) - timedelta(hours=hourdelta)
|
str(dt_format)
|
||||||
).isoformat('T')
|
) - timedelta(hours=hourdelta)
|
||||||
|
).isoformat('T')
|
||||||
|
except ValueError:
|
||||||
|
value = (
|
||||||
|
datetime.strptime(
|
||||||
|
values[0],
|
||||||
|
'%Y/%m/%d'
|
||||||
|
)
|
||||||
|
).isoformat('T')
|
||||||
|
|
||||||
elif field in ['nameservers']:
|
elif field in ['nameservers']:
|
||||||
|
|
||||||
|
@ -286,16 +299,6 @@ class NIRWhois:
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def _parse_fields(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('NIRWhois._parse_fields() has been deprecated and will be '
|
|
||||||
'removed. You should now use NIRWhois.parse_fields().')
|
|
||||||
return self.parse_fields(*args, **kwargs)
|
|
||||||
|
|
||||||
def get_nets_jpnic(self, response):
|
def get_nets_jpnic(self, response):
|
||||||
"""
|
"""
|
||||||
The function for parsing network blocks from jpnic whois data.
|
The function for parsing network blocks from jpnic whois data.
|
||||||
|
@ -359,16 +362,6 @@ class NIRWhois:
|
||||||
|
|
||||||
return nets
|
return nets
|
||||||
|
|
||||||
def _get_nets_jpnic(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('NIRWhois._get_nets_jpnic() has been deprecated and will be '
|
|
||||||
'removed. You should now use NIRWhois.get_nets_jpnic().')
|
|
||||||
return self.get_nets_jpnic(*args, **kwargs)
|
|
||||||
|
|
||||||
def get_nets_krnic(self, response):
|
def get_nets_krnic(self, response):
|
||||||
"""
|
"""
|
||||||
The function for parsing network blocks from krnic whois data.
|
The function for parsing network blocks from krnic whois data.
|
||||||
|
@ -394,7 +387,7 @@ class NIRWhois:
|
||||||
# and the start and end positions.
|
# and the start and end positions.
|
||||||
for match in re.finditer(
|
for match in re.finditer(
|
||||||
r'^(IPv4 Address)[\s]+:[^\S\n]+((.+?)[^\S\n]-[^\S\n](.+?)'
|
r'^(IPv4 Address)[\s]+:[^\S\n]+((.+?)[^\S\n]-[^\S\n](.+?)'
|
||||||
'[^\S\n]\((.+?)\)|.+)$',
|
'[^\\S\n]\\((.+?)\\)|.+)$',
|
||||||
response,
|
response,
|
||||||
re.MULTILINE
|
re.MULTILINE
|
||||||
):
|
):
|
||||||
|
@ -434,16 +427,6 @@ class NIRWhois:
|
||||||
|
|
||||||
return nets
|
return nets
|
||||||
|
|
||||||
def _get_nets_krnic(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('NIRWhois._get_nets_krnic() has been deprecated and will be '
|
|
||||||
'removed. You should now use NIRWhois.get_nets_krnic().')
|
|
||||||
return self.get_nets_krnic(*args, **kwargs)
|
|
||||||
|
|
||||||
def get_contact(self, response=None, nir=None, handle=None,
|
def get_contact(self, response=None, nir=None, handle=None,
|
||||||
retry_count=3, dt_format=None):
|
retry_count=3, dt_format=None):
|
||||||
"""
|
"""
|
||||||
|
@ -491,16 +474,6 @@ class NIRWhois:
|
||||||
is_contact=True
|
is_contact=True
|
||||||
)
|
)
|
||||||
|
|
||||||
def _get_contact(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('NIRWhois._get_contact() has been deprecated and will be '
|
|
||||||
'removed. You should now use NIRWhois.get_contact().')
|
|
||||||
return self.get_contact(*args, **kwargs)
|
|
||||||
|
|
||||||
def lookup(self, nir=None, inc_raw=False, retry_count=3, response=None,
|
def lookup(self, nir=None, inc_raw=False, retry_count=3, response=None,
|
||||||
field_list=None, is_offline=False):
|
field_list=None, is_offline=False):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Copyright (c) 2013-2019 Philip Hane
|
# Copyright (c) 2013-2020 Philip Hane
|
||||||
# All rights reserved.
|
# All rights reserved.
|
||||||
#
|
#
|
||||||
# Redistribution and use in source and binary forms, with or without
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
@ -28,6 +28,7 @@ from .utils import ipv4_lstrip_zeros, calculate_cidr, unique_everseen
|
||||||
from .net import ip_address
|
from .net import ip_address
|
||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
|
from collections import namedtuple
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -553,7 +554,7 @@ class _RDAPNetwork(_RDAPCommon):
|
||||||
|
|
||||||
self.vars[v] = self.json[v].strip()
|
self.vars[v] = self.json[v].strip()
|
||||||
|
|
||||||
except (KeyError, ValueError):
|
except (KeyError, ValueError, AttributeError):
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -688,9 +689,95 @@ class RDAP:
|
||||||
raise NetError('The provided net parameter is not an instance of '
|
raise NetError('The provided net parameter is not an instance of '
|
||||||
'ipwhois.net.Net')
|
'ipwhois.net.Net')
|
||||||
|
|
||||||
|
def _get_entity(self, entity=None, roles=None, inc_raw=False, retry_count=3,
|
||||||
|
asn_data=None, bootstrap=False, rate_limit_timeout=120):
|
||||||
|
"""
|
||||||
|
The function for retrieving and parsing information for an entity via
|
||||||
|
RDAP (HTTP).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
entity (:obj:`str`): The entity name to lookup.
|
||||||
|
roles (:obj:`dict`): The mapping of entity handles to roles.
|
||||||
|
inc_raw (:obj:`bool`, optional): Whether to include the raw
|
||||||
|
results in the returned dictionary. Defaults to False.
|
||||||
|
retry_count (:obj:`int`): The number of times to retry in case
|
||||||
|
socket errors, timeouts, connection resets, etc. are
|
||||||
|
encountered. Defaults to 3.
|
||||||
|
asn_data (:obj:`dict`): Result from
|
||||||
|
:obj:`ipwhois.asn.IPASN.lookup`. Optional if the bootstrap
|
||||||
|
parameter is True.
|
||||||
|
bootstrap (:obj:`bool`): If True, performs lookups via ARIN
|
||||||
|
bootstrap rather than lookups based on ASN data. Defaults to
|
||||||
|
False.
|
||||||
|
rate_limit_timeout (:obj:`int`): The number of seconds to wait
|
||||||
|
before retrying when a rate limit notice is returned via
|
||||||
|
rdap+json. Defaults to 120.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
namedtuple:
|
||||||
|
|
||||||
|
:result (dict): Consists of the fields listed in the
|
||||||
|
ipwhois.rdap._RDAPEntity dict. The raw result is included for
|
||||||
|
each object if the inc_raw parameter is True.
|
||||||
|
:roles (dict): The mapping of entity handles to roles.
|
||||||
|
"""
|
||||||
|
|
||||||
|
result = {}
|
||||||
|
|
||||||
|
if bootstrap:
|
||||||
|
entity_url = '{0}/entity/{1}'.format(
|
||||||
|
BOOTSTRAP_URL, entity)
|
||||||
|
else:
|
||||||
|
tmp_reg = asn_data['asn_registry']
|
||||||
|
entity_url = RIR_RDAP[tmp_reg]['entity_url']
|
||||||
|
entity_url = str(entity_url).format(entity)
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
# RDAP entity query
|
||||||
|
response = self._net.get_http_json(
|
||||||
|
url=entity_url, retry_count=retry_count,
|
||||||
|
rate_limit_timeout=rate_limit_timeout
|
||||||
|
)
|
||||||
|
|
||||||
|
# Parse the entity
|
||||||
|
result_ent = _RDAPEntity(response)
|
||||||
|
result_ent.parse()
|
||||||
|
result = result_ent.vars
|
||||||
|
|
||||||
|
result['roles'] = None
|
||||||
|
try:
|
||||||
|
|
||||||
|
result['roles'] = roles[entity]
|
||||||
|
|
||||||
|
except KeyError: # pragma: no cover
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
for tmp in response['entities']:
|
||||||
|
|
||||||
|
if tmp['handle'] not in roles:
|
||||||
|
roles[tmp['handle']] = tmp['roles']
|
||||||
|
|
||||||
|
except (IndexError, KeyError):
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
if inc_raw:
|
||||||
|
result['raw'] = response
|
||||||
|
|
||||||
|
except (HTTPLookupError, InvalidEntityObject):
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
return_tuple = namedtuple('return_tuple', ['result', 'roles'])
|
||||||
|
return return_tuple(result, roles)
|
||||||
|
|
||||||
def lookup(self, inc_raw=False, retry_count=3, asn_data=None, depth=0,
|
def lookup(self, inc_raw=False, retry_count=3, asn_data=None, depth=0,
|
||||||
excluded_entities=None, response=None, bootstrap=False,
|
excluded_entities=None, response=None, bootstrap=False,
|
||||||
rate_limit_timeout=120):
|
rate_limit_timeout=120, root_ent_check=True):
|
||||||
"""
|
"""
|
||||||
The function for retrieving and parsing information for an IP
|
The function for retrieving and parsing information for an IP
|
||||||
address via RDAP (HTTP).
|
address via RDAP (HTTP).
|
||||||
|
@ -716,6 +803,9 @@ class RDAP:
|
||||||
rate_limit_timeout (:obj:`int`): The number of seconds to wait
|
rate_limit_timeout (:obj:`int`): The number of seconds to wait
|
||||||
before retrying when a rate limit notice is returned via
|
before retrying when a rate limit notice is returned via
|
||||||
rdap+json. Defaults to 120.
|
rdap+json. Defaults to 120.
|
||||||
|
root_ent_check (:obj:`bool`): If True, will perform
|
||||||
|
additional RDAP HTTP queries for missing entity data at the
|
||||||
|
root level. Defaults to True.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict: The IP RDAP lookup results
|
dict: The IP RDAP lookup results
|
||||||
|
@ -792,10 +882,23 @@ class RDAP:
|
||||||
if ent['handle'] not in [results['entities'],
|
if ent['handle'] not in [results['entities'],
|
||||||
excluded_entities]:
|
excluded_entities]:
|
||||||
|
|
||||||
result_ent = _RDAPEntity(ent)
|
if 'vcardArray' not in ent and root_ent_check:
|
||||||
result_ent.parse()
|
entity_object, roles = self._get_entity(
|
||||||
|
entity=ent['handle'],
|
||||||
|
roles=roles,
|
||||||
|
inc_raw=inc_raw,
|
||||||
|
retry_count=retry_count,
|
||||||
|
asn_data=asn_data,
|
||||||
|
bootstrap=bootstrap,
|
||||||
|
rate_limit_timeout=rate_limit_timeout
|
||||||
|
)
|
||||||
|
results['objects'][ent['handle']] = entity_object
|
||||||
|
|
||||||
results['objects'][ent['handle']] = result_ent.vars
|
else:
|
||||||
|
result_ent = _RDAPEntity(ent)
|
||||||
|
result_ent.parse()
|
||||||
|
|
||||||
|
results['objects'][ent['handle']] = result_ent.vars
|
||||||
|
|
||||||
results['entities'].append(ent['handle'])
|
results['entities'].append(ent['handle'])
|
||||||
|
|
||||||
|
@ -835,57 +938,18 @@ class RDAP:
|
||||||
list(new_objects.keys()) +
|
list(new_objects.keys()) +
|
||||||
excluded_entities):
|
excluded_entities):
|
||||||
|
|
||||||
if bootstrap:
|
entity_object, roles = self._get_entity(
|
||||||
entity_url = '{0}/entity/{1}'.format(
|
entity=ent,
|
||||||
BOOTSTRAP_URL, ent)
|
roles=roles,
|
||||||
else:
|
inc_raw=inc_raw,
|
||||||
tmp_reg = asn_data['asn_registry']
|
retry_count=retry_count,
|
||||||
entity_url = RIR_RDAP[tmp_reg]['entity_url']
|
asn_data=asn_data,
|
||||||
entity_url = str(entity_url).format(ent)
|
bootstrap=bootstrap,
|
||||||
|
rate_limit_timeout=rate_limit_timeout
|
||||||
|
)
|
||||||
|
new_objects[ent] = entity_object
|
||||||
|
|
||||||
try:
|
except (KeyError, TypeError):
|
||||||
|
|
||||||
# RDAP entity query
|
|
||||||
response = self._net.get_http_json(
|
|
||||||
url=entity_url, retry_count=retry_count,
|
|
||||||
rate_limit_timeout=rate_limit_timeout
|
|
||||||
)
|
|
||||||
|
|
||||||
# Parse the entity
|
|
||||||
result_ent = _RDAPEntity(response)
|
|
||||||
result_ent.parse()
|
|
||||||
new_objects[ent] = result_ent.vars
|
|
||||||
|
|
||||||
new_objects[ent]['roles'] = None
|
|
||||||
try:
|
|
||||||
|
|
||||||
new_objects[ent]['roles'] = roles[ent]
|
|
||||||
|
|
||||||
except KeyError: # pragma: no cover
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
|
|
||||||
for tmp in response['entities']:
|
|
||||||
|
|
||||||
if tmp['handle'] not in roles:
|
|
||||||
|
|
||||||
roles[tmp['handle']] = tmp['roles']
|
|
||||||
|
|
||||||
except (IndexError, KeyError):
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
if inc_raw:
|
|
||||||
|
|
||||||
new_objects[ent]['raw'] = response
|
|
||||||
|
|
||||||
except (HTTPLookupError, InvalidEntityObject):
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
except TypeError:
|
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Copyright (c) 2013-2019 Philip Hane
|
# Copyright (c) 2013-2020 Philip Hane
|
||||||
# All rights reserved.
|
# All rights reserved.
|
||||||
#
|
#
|
||||||
# Redistribution and use in source and binary forms, with or without
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
@ -166,17 +166,6 @@ group.add_argument(
|
||||||
help='The number of times to retry in case socket errors, timeouts, '
|
help='The number of times to retry in case socket errors, timeouts, '
|
||||||
'connection resets, etc. are encountered.'
|
'connection resets, etc. are encountered.'
|
||||||
)
|
)
|
||||||
group.add_argument(
|
|
||||||
'--asn_alts',
|
|
||||||
type=str,
|
|
||||||
nargs=1,
|
|
||||||
default='whois,http',
|
|
||||||
metavar='"ASN_ALTS"',
|
|
||||||
help='A comma delimited list of additional lookup types to attempt if the '
|
|
||||||
'ASN dns lookup fails. Allow permutations must be enabled. '
|
|
||||||
'Defaults to all: "whois,http" *WARNING* deprecated in '
|
|
||||||
'favor of new argument asn_methods.'
|
|
||||||
)
|
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
'--asn_methods',
|
'--asn_methods',
|
||||||
type=str,
|
type=str,
|
||||||
|
@ -1456,9 +1445,6 @@ if script_args.addr:
|
||||||
field_list=script_args.field_list.split(',') if (
|
field_list=script_args.field_list.split(',') if (
|
||||||
script_args.field_list and
|
script_args.field_list and
|
||||||
len(script_args.field_list) > 0) else None,
|
len(script_args.field_list) > 0) else None,
|
||||||
asn_alts=script_args.asn_alts.split(',') if (
|
|
||||||
script_args.asn_alts and not script_args.asn_methods and
|
|
||||||
len(script_args.asn_alts) > 0) else None,
|
|
||||||
extra_org_map=script_args.extra_org_map,
|
extra_org_map=script_args.extra_org_map,
|
||||||
inc_nir=(not script_args.exclude_nir),
|
inc_nir=(not script_args.exclude_nir),
|
||||||
nir_field_list=script_args.nir_field_list.split(',') if (
|
nir_field_list=script_args.nir_field_list.split(',') if (
|
||||||
|
@ -1484,9 +1470,6 @@ if script_args.addr:
|
||||||
len(script_args.excluded_entities) > 0) else None,
|
len(script_args.excluded_entities) > 0) else None,
|
||||||
bootstrap=script_args.bootstrap,
|
bootstrap=script_args.bootstrap,
|
||||||
rate_limit_timeout=script_args.rate_limit_timeout,
|
rate_limit_timeout=script_args.rate_limit_timeout,
|
||||||
asn_alts=script_args.asn_alts.split(',') if (
|
|
||||||
script_args.asn_alts and not script_args.asn_methods and
|
|
||||||
len(script_args.asn_alts) > 0) else None,
|
|
||||||
extra_org_map=script_args.extra_org_map,
|
extra_org_map=script_args.extra_org_map,
|
||||||
inc_nir=(not script_args.exclude_nir),
|
inc_nir=(not script_args.exclude_nir),
|
||||||
nir_field_list=script_args.nir_field_list.split(',') if (
|
nir_field_list=script_args.nir_field_list.split(',') if (
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Copyright (c) 2013-2019 Philip Hane
|
# Copyright (c) 2013-2020 Philip Hane
|
||||||
# All rights reserved.
|
# All rights reserved.
|
||||||
#
|
#
|
||||||
# Redistribution and use in source and binary forms, with or without
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
@ -28,8 +28,9 @@ import argparse
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
import json
|
import json
|
||||||
from ipwhois.utils import (ipv4_lstrip_zeros, calculate_cidr, get_countries,
|
from ipwhois.utils import (ipv4_lstrip_zeros, calculate_cidr, get_countries,
|
||||||
ipv4_is_defined, ipv6_is_defined, unique_everseen,
|
ipv4_is_defined, ipv6_is_defined,
|
||||||
unique_addresses)
|
ipv4_generate_random, ipv6_generate_random,
|
||||||
|
unique_everseen, unique_addresses)
|
||||||
|
|
||||||
# CLI ANSI rendering
|
# CLI ANSI rendering
|
||||||
ANSI = {
|
ANSI = {
|
||||||
|
@ -86,6 +87,22 @@ parser.add_argument(
|
||||||
metavar='"IP ADDRESS"',
|
metavar='"IP ADDRESS"',
|
||||||
help='Check if an IPv6 address is defined (in a reserved address range).'
|
help='Check if an IPv6 address is defined (in a reserved address range).'
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--ipv4_generate_random',
|
||||||
|
type=int,
|
||||||
|
nargs=1,
|
||||||
|
metavar='TOTAL',
|
||||||
|
help='Generate random, unique IPv4 addresses that are not defined (can be '
|
||||||
|
'looked up using ipwhois).'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--ipv6_generate_random',
|
||||||
|
type=int,
|
||||||
|
nargs=1,
|
||||||
|
metavar='TOTAL',
|
||||||
|
help='Generate random, unique IPv6 addresses that are not defined (can be '
|
||||||
|
'looked up using ipwhois).'
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--unique_everseen',
|
'--unique_everseen',
|
||||||
type=json.loads,
|
type=json.loads,
|
||||||
|
@ -224,6 +241,34 @@ elif script_args.ipv6_is_defined:
|
||||||
|
|
||||||
print('{0}Error{1}: {2}'.format(ANSI['red'], ANSI['end'], str(e)))
|
print('{0}Error{1}: {2}'.format(ANSI['red'], ANSI['end'], str(e)))
|
||||||
|
|
||||||
|
elif script_args.ipv4_generate_random:
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
result = ipv4_generate_random(total=script_args.ipv4_generate_random[0])
|
||||||
|
|
||||||
|
for random_ip in result:
|
||||||
|
|
||||||
|
print(random_ip)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
|
||||||
|
print('{0}Error{1}: {2}'.format(ANSI['red'], ANSI['end'], str(e)))
|
||||||
|
|
||||||
|
elif script_args.ipv6_generate_random:
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
result = ipv6_generate_random(total=script_args.ipv6_generate_random[0])
|
||||||
|
|
||||||
|
for random_ip in result:
|
||||||
|
|
||||||
|
print(random_ip)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
|
||||||
|
print('{0}Error{1}: {2}'.format(ANSI['red'], ANSI['end'], str(e)))
|
||||||
|
|
||||||
elif script_args.unique_everseen:
|
elif script_args.unique_everseen:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Copyright (c) 2013-2019 Philip Hane
|
# Copyright (c) 2013-2020 Philip Hane
|
||||||
# All rights reserved.
|
# All rights reserved.
|
||||||
#
|
#
|
||||||
# Redistribution and use in source and binary forms, with or without
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
@ -87,30 +87,30 @@ IETF_RFC_REFERENCES = {
|
||||||
IP_REGEX = (
|
IP_REGEX = (
|
||||||
r'(?P<ip>'
|
r'(?P<ip>'
|
||||||
# IPv4
|
# IPv4
|
||||||
'(((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.)){3}'
|
r'(((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.)){3}'
|
||||||
'(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)'
|
r'(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)'
|
||||||
# IPv6
|
# IPv6
|
||||||
'|\[?(((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:)'
|
r'|\[?(((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:)'
|
||||||
'{6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|'
|
r'{6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|'
|
||||||
'2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]'
|
r'2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]'
|
||||||
'{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d'
|
r'{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d'
|
||||||
'\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|'
|
r'\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|'
|
||||||
'((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|'
|
r'((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|'
|
||||||
'2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]'
|
r'2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]'
|
||||||
'{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)'
|
r'{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)'
|
||||||
'(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(('
|
r'(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(('
|
||||||
'(:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1'
|
r'(:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1'
|
||||||
'\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(('
|
r'\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(('
|
||||||
'[0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4})'
|
r'[0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4})'
|
||||||
'{0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]'
|
r'{0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]'
|
||||||
'?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:(('
|
r'?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:(('
|
||||||
'25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})'
|
r'25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})'
|
||||||
')|:)))(%.+)?))\]?'
|
r')|:)))(%.+)?))\]?'
|
||||||
# Optional IPv4 Port
|
# Optional IPv4 Port
|
||||||
'((:(6553[0-5]|655[0-2]\d|65[0-4]\d{2}|6[0-4]\d{3}|[1-5]\d{4}|[1-9]\d{0,3}'
|
r'((:(6553[0-5]|655[0-2]\d|65[0-4]\d{2}|6[0-4]\d{3}|[1-5]\d{4}|[1-9]\d{0,3}'
|
||||||
# Optional CIDR block
|
# Optional CIDR block
|
||||||
'))|(\/(?:[012]\d?|3[012]?|[4-9])))?'
|
r'))|(\/(?:[012]\d?|3[012]?|[4-9])))?'
|
||||||
')'
|
r')'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -212,6 +212,7 @@ def get_countries(is_legacy_xml=False):
|
||||||
|
|
||||||
# Read the file.
|
# Read the file.
|
||||||
data = f.read()
|
data = f.read()
|
||||||
|
f.close()
|
||||||
|
|
||||||
# Check if there is data.
|
# Check if there is data.
|
||||||
if not data: # pragma: no cover
|
if not data: # pragma: no cover
|
||||||
|
@ -258,6 +259,8 @@ def get_countries(is_legacy_xml=False):
|
||||||
# Add to the countries dictionary.
|
# Add to the countries dictionary.
|
||||||
countries[code] = name
|
countries[code] = name
|
||||||
|
|
||||||
|
f.close()
|
||||||
|
|
||||||
return countries
|
return countries
|
||||||
|
|
||||||
|
|
||||||
|
@ -506,6 +509,7 @@ def unique_addresses(data=None, file_path=None):
|
||||||
|
|
||||||
# Read the file.
|
# Read the file.
|
||||||
file_data = f.read()
|
file_data = f.read()
|
||||||
|
f.close()
|
||||||
|
|
||||||
pattern = re.compile(
|
pattern = re.compile(
|
||||||
str(IP_REGEX),
|
str(IP_REGEX),
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Copyright (c) 2013-2019 Philip Hane
|
# Copyright (c) 2013-2020 Philip Hane
|
||||||
# All rights reserved.
|
# All rights reserved.
|
||||||
#
|
#
|
||||||
# Redistribution and use in source and binary forms, with or without
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
@ -67,7 +67,7 @@ RIR_WHOIS = {
|
||||||
'name': r'(NetName):[^\S\n]+(?P<val>.+?)\n',
|
'name': r'(NetName):[^\S\n]+(?P<val>.+?)\n',
|
||||||
'handle': r'(NetHandle):[^\S\n]+(?P<val>.+?)\n',
|
'handle': r'(NetHandle):[^\S\n]+(?P<val>.+?)\n',
|
||||||
'description': r'(OrgName|CustName):[^\S\n]+(?P<val>.+?)'
|
'description': r'(OrgName|CustName):[^\S\n]+(?P<val>.+?)'
|
||||||
'(?=(\n\S):?)',
|
'(?=(\n\\S):?)',
|
||||||
'country': r'(Country):[^\S\n]+(?P<val>.+?)\n',
|
'country': r'(Country):[^\S\n]+(?P<val>.+?)\n',
|
||||||
'state': r'(StateProv):[^\S\n]+(?P<val>.+?)\n',
|
'state': r'(StateProv):[^\S\n]+(?P<val>.+?)\n',
|
||||||
'city': r'(City):[^\S\n]+(?P<val>.+?)\n',
|
'city': r'(City):[^\S\n]+(?P<val>.+?)\n',
|
||||||
|
@ -75,7 +75,7 @@ RIR_WHOIS = {
|
||||||
'postal_code': r'(PostalCode):[^\S\n]+(?P<val>.+?)\n',
|
'postal_code': r'(PostalCode):[^\S\n]+(?P<val>.+?)\n',
|
||||||
'emails': (
|
'emails': (
|
||||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||||
'[^\S\n]+.*?)*?\n'
|
'[^\\S\n]+.*?)*?\n'
|
||||||
),
|
),
|
||||||
'created': r'(RegDate):[^\S\n]+(?P<val>.+?)\n',
|
'created': r'(RegDate):[^\S\n]+(?P<val>.+?)\n',
|
||||||
'updated': r'(Updated):[^\S\n]+(?P<val>.+?)\n',
|
'updated': r'(Updated):[^\S\n]+(?P<val>.+?)\n',
|
||||||
|
@ -92,7 +92,7 @@ RIR_WHOIS = {
|
||||||
'address': r'(address):[^\S\n]+(?P<val>.+?)(?=(\n\S):?)',
|
'address': r'(address):[^\S\n]+(?P<val>.+?)(?=(\n\S):?)',
|
||||||
'emails': (
|
'emails': (
|
||||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||||
'[^\S\n]+.*?)*?\n'
|
'[^\\S\n]+.*?)*?\n'
|
||||||
),
|
),
|
||||||
'created': (
|
'created': (
|
||||||
r'(created):[^\S\n]+(?P<val>[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]'
|
r'(created):[^\S\n]+(?P<val>[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]'
|
||||||
|
@ -115,7 +115,7 @@ RIR_WHOIS = {
|
||||||
'address': r'(address):[^\S\n]+(?P<val>.+?)(?=(\n\S):?)',
|
'address': r'(address):[^\S\n]+(?P<val>.+?)(?=(\n\S):?)',
|
||||||
'emails': (
|
'emails': (
|
||||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||||
'[^\S\n]+.*?)*?\n'
|
'[^\\S\n]+.*?)*?\n'
|
||||||
),
|
),
|
||||||
'updated': r'(changed):[^\S\n]+.*(?P<val>[0-9]{8}).*?\n'
|
'updated': r'(changed):[^\S\n]+.*(?P<val>[0-9]{8}).*?\n'
|
||||||
},
|
},
|
||||||
|
@ -129,7 +129,7 @@ RIR_WHOIS = {
|
||||||
'country': r'(country):[^\S\n]+(?P<val>.+?)\n',
|
'country': r'(country):[^\S\n]+(?P<val>.+?)\n',
|
||||||
'emails': (
|
'emails': (
|
||||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||||
'[^\S\n]+.*?)*?\n'
|
'[^\\S\n]+.*?)*?\n'
|
||||||
),
|
),
|
||||||
'created': r'(created):[^\S\n]+(?P<val>[0-9]{8}).*?\n',
|
'created': r'(created):[^\S\n]+(?P<val>[0-9]{8}).*?\n',
|
||||||
'updated': r'(changed):[^\S\n]+(?P<val>[0-9]{8}).*?\n'
|
'updated': r'(changed):[^\S\n]+(?P<val>[0-9]{8}).*?\n'
|
||||||
|
@ -146,7 +146,7 @@ RIR_WHOIS = {
|
||||||
'address': r'(address):[^\S\n]+(?P<val>.+?)(?=(\n\S):?)',
|
'address': r'(address):[^\S\n]+(?P<val>.+?)(?=(\n\S):?)',
|
||||||
'emails': (
|
'emails': (
|
||||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||||
'[^\S\n]+.*?)*?\n'
|
'[^\\S\n]+.*?)*?\n'
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -166,7 +166,7 @@ RWHOIS = {
|
||||||
'postal_code': r'(network:Postal-Code):(?P<val>.+?)\n',
|
'postal_code': r'(network:Postal-Code):(?P<val>.+?)\n',
|
||||||
'emails': (
|
'emails': (
|
||||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||||
'[^\S\n]+.*?)*?\n'
|
'[^\\S\n]+.*?)*?\n'
|
||||||
),
|
),
|
||||||
'created': r'(network:Created):(?P<val>.+?)\n',
|
'created': r'(network:Created):(?P<val>.+?)\n',
|
||||||
'updated': r'(network:Updated):(?P<val>.+?)\n'
|
'updated': r'(network:Updated):(?P<val>.+?)\n'
|
||||||
|
@ -324,16 +324,6 @@ class Whois:
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def _parse_fields(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('Whois._parse_fields() has been deprecated and will be '
|
|
||||||
'removed. You should now use Whois.parse_fields().')
|
|
||||||
return self.parse_fields(*args, **kwargs)
|
|
||||||
|
|
||||||
def get_nets_arin(self, response):
|
def get_nets_arin(self, response):
|
||||||
"""
|
"""
|
||||||
The function for parsing network blocks from ARIN whois data.
|
The function for parsing network blocks from ARIN whois data.
|
||||||
|
@ -415,16 +405,6 @@ class Whois:
|
||||||
|
|
||||||
return nets
|
return nets
|
||||||
|
|
||||||
def _get_nets_arin(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('Whois._get_nets_arin() has been deprecated and will be '
|
|
||||||
'removed. You should now use Whois.get_nets_arin().')
|
|
||||||
return self.get_nets_arin(*args, **kwargs)
|
|
||||||
|
|
||||||
def get_nets_lacnic(self, response):
|
def get_nets_lacnic(self, response):
|
||||||
"""
|
"""
|
||||||
The function for parsing network blocks from LACNIC whois data.
|
The function for parsing network blocks from LACNIC whois data.
|
||||||
|
@ -495,16 +475,6 @@ class Whois:
|
||||||
|
|
||||||
return nets
|
return nets
|
||||||
|
|
||||||
def _get_nets_lacnic(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('Whois._get_nets_lacnic() has been deprecated and will be '
|
|
||||||
'removed. You should now use Whois.get_nets_lacnic().')
|
|
||||||
return self.get_nets_lacnic(*args, **kwargs)
|
|
||||||
|
|
||||||
def get_nets_other(self, response):
|
def get_nets_other(self, response):
|
||||||
"""
|
"""
|
||||||
The function for parsing network blocks from generic whois data.
|
The function for parsing network blocks from generic whois data.
|
||||||
|
@ -577,16 +547,6 @@ class Whois:
|
||||||
|
|
||||||
return nets
|
return nets
|
||||||
|
|
||||||
def _get_nets_other(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Deprecated. This will be removed in a future release.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from warnings import warn
|
|
||||||
warn('Whois._get_nets_other() has been deprecated and will be '
|
|
||||||
'removed. You should now use Whois.get_nets_other().')
|
|
||||||
return self.get_nets_other(*args, **kwargs)
|
|
||||||
|
|
||||||
def lookup(self, inc_raw=False, retry_count=3, response=None,
|
def lookup(self, inc_raw=False, retry_count=3, response=None,
|
||||||
get_referral=False, extra_blacklist=None,
|
get_referral=False, extra_blacklist=None,
|
||||||
ignore_referral_errors=False, asn_data=None,
|
ignore_referral_errors=False, asn_data=None,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue