mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-06 05:01:14 -07:00
Update ipwhois-1.2.0
This commit is contained in:
parent
4d62245cf5
commit
2c4cc34b2b
13 changed files with 289 additions and 368 deletions
|
@ -1,4 +1,4 @@
|
|||
# Copyright (c) 2013-2019 Philip Hane
|
||||
# Copyright (c) 2013-2020 Philip Hane
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
|
@ -26,4 +26,4 @@ from .exceptions import *
|
|||
from .net import Net
|
||||
from .ipwhois import IPWhois
|
||||
|
||||
__version__ = '1.1.0'
|
||||
__version__ = '1.2.0'
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright (c) 2013-2019 Philip Hane
|
||||
# Copyright (c) 2013-2020 Philip Hane
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
|
@ -61,21 +61,21 @@ ASN_ORIGIN_WHOIS = {
|
|||
|
||||
ASN_ORIGIN_HTTP = {
|
||||
'radb': {
|
||||
'url': 'http://www.radb.net/query/',
|
||||
'url': 'http://www.radb.net/query',
|
||||
'form_data_asn_field': 'keywords',
|
||||
'form_data': {
|
||||
'advanced_query': '1',
|
||||
'query': 'Query',
|
||||
'-T option': 'inet-rtr',
|
||||
# '-T option': 'inet-rtr',
|
||||
'ip_option': '',
|
||||
'-i': '1',
|
||||
'-i option': 'origin'
|
||||
},
|
||||
'fields': {
|
||||
'description': r'(descr):[^\S\n]+(?P<val>.+?)\<br\>',
|
||||
'maintainer': r'(mnt-by):[^\S\n]+(?P<val>.+?)\<br\>',
|
||||
'updated': r'(changed):[^\S\n]+(?P<val>.+?)\<br\>',
|
||||
'source': r'(source):[^\S\n]+(?P<val>.+?)\<br\>',
|
||||
'description': r'(descr):[^\S\n]+(?P<val>.+?)\n',
|
||||
'maintainer': r'(mnt-by):[^\S\n]+(?P<val>.+?)\n',
|
||||
'updated': r'(changed):[^\S\n]+(?P<val>.+?)\n',
|
||||
'source': r'(source):[^\S\n]+(?P<val>.+?)\<',
|
||||
}
|
||||
},
|
||||
}
|
||||
|
@ -169,16 +169,6 @@ class IPASN:
|
|||
|
||||
return ret
|
||||
|
||||
def _parse_fields_dns(self, *args, **kwargs):
|
||||
"""
|
||||
Deprecated. This will be removed in a future release.
|
||||
"""
|
||||
|
||||
from warnings import warn
|
||||
warn('IPASN._parse_fields_dns() has been deprecated and will be '
|
||||
'removed. You should now use IPASN.parse_fields_dns().')
|
||||
return self.parse_fields_dns(*args, **kwargs)
|
||||
|
||||
def parse_fields_verbose_dns(self, response):
|
||||
"""
|
||||
The function for parsing ASN fields from a verbose dns response.
|
||||
|
@ -293,16 +283,6 @@ class IPASN:
|
|||
|
||||
return ret
|
||||
|
||||
def _parse_fields_whois(self, *args, **kwargs):
|
||||
"""
|
||||
Deprecated. This will be removed in a future release.
|
||||
"""
|
||||
|
||||
from warnings import warn
|
||||
warn('IPASN._parse_fields_whois() has been deprecated and will be '
|
||||
'removed. You should now use IPASN.parse_fields_whois().')
|
||||
return self.parse_fields_whois(*args, **kwargs)
|
||||
|
||||
def parse_fields_http(self, response, extra_org_map=None):
|
||||
"""
|
||||
The function for parsing ASN fields from a http response.
|
||||
|
@ -403,19 +383,8 @@ class IPASN:
|
|||
|
||||
return asn_data
|
||||
|
||||
def _parse_fields_http(self, *args, **kwargs):
|
||||
"""
|
||||
Deprecated. This will be removed in a future release.
|
||||
"""
|
||||
|
||||
from warnings import warn
|
||||
warn('IPASN._parse_fields_http() has been deprecated and will be '
|
||||
'removed. You should now use IPASN.parse_fields_http().')
|
||||
return self.parse_fields_http(*args, **kwargs)
|
||||
|
||||
def lookup(self, inc_raw=False, retry_count=3, asn_alts=None,
|
||||
extra_org_map=None, asn_methods=None,
|
||||
get_asn_description=True):
|
||||
def lookup(self, inc_raw=False, retry_count=3, extra_org_map=None,
|
||||
asn_methods=None, get_asn_description=True):
|
||||
"""
|
||||
The wrapper function for retrieving and parsing ASN information for an
|
||||
IP address.
|
||||
|
@ -426,10 +395,6 @@ class IPASN:
|
|||
retry_count (:obj:`int`): The number of times to retry in case
|
||||
socket errors, timeouts, connection resets, etc. are
|
||||
encountered. Defaults to 3.
|
||||
asn_alts (:obj:`list`): Additional lookup types to attempt if the
|
||||
ASN dns lookup fails. Allow permutations must be enabled.
|
||||
Defaults to all ['whois', 'http']. *WARNING* deprecated in
|
||||
favor of new argument asn_methods. Defaults to None.
|
||||
extra_org_map (:obj:`dict`): Mapping org handles to RIRs. This is
|
||||
for limited cases where ARIN REST (ASN fallback HTTP lookup)
|
||||
does not show an RIR as the org handle e.g., DNIC (which is
|
||||
|
@ -466,18 +431,8 @@ class IPASN:
|
|||
|
||||
if asn_methods is None:
|
||||
|
||||
if asn_alts is None:
|
||||
|
||||
lookups = ['dns', 'whois', 'http']
|
||||
|
||||
else:
|
||||
|
||||
from warnings import warn
|
||||
warn('IPASN.lookup() asn_alts argument has been deprecated '
|
||||
'and will be removed. You should now use the asn_methods '
|
||||
'argument.')
|
||||
lookups = ['dns'] + asn_alts
|
||||
|
||||
else:
|
||||
|
||||
if {'dns', 'whois', 'http'}.isdisjoint(asn_methods):
|
||||
|
@ -492,12 +447,6 @@ class IPASN:
|
|||
dns_success = False
|
||||
for index, lookup_method in enumerate(lookups):
|
||||
|
||||
if index > 0 and not asn_methods and not (
|
||||
self._net.allow_permutations):
|
||||
|
||||
raise ASNRegistryError('ASN registry lookup failed. '
|
||||
'Permutations not allowed.')
|
||||
|
||||
if lookup_method == 'dns':
|
||||
|
||||
try:
|
||||
|
@ -706,16 +655,6 @@ class ASNOrigin:
|
|||
|
||||
return ret
|
||||
|
||||
def _parse_fields(self, *args, **kwargs):
|
||||
"""
|
||||
Deprecated. This will be removed in a future release.
|
||||
"""
|
||||
|
||||
from warnings import warn
|
||||
warn('ASNOrigin._parse_fields() has been deprecated and will be '
|
||||
'removed. You should now use ASNOrigin.parse_fields().')
|
||||
return self.parse_fields(*args, **kwargs)
|
||||
|
||||
def get_nets_radb(self, response, is_http=False):
|
||||
"""
|
||||
The function for parsing network blocks from ASN origin data.
|
||||
|
@ -743,7 +682,7 @@ class ASNOrigin:
|
|||
nets = []
|
||||
|
||||
if is_http:
|
||||
regex = r'route(?:6)?:[^\S\n]+(?P<val>.+?)<br>'
|
||||
regex = r'route(?:6)?:[^\S\n]+(?P<val>.+?)\n'
|
||||
else:
|
||||
regex = r'^route(?:6)?:[^\S\n]+(?P<val>.+|.+)$'
|
||||
|
||||
|
@ -769,18 +708,8 @@ class ASNOrigin:
|
|||
|
||||
return nets
|
||||
|
||||
def _get_nets_radb(self, *args, **kwargs):
|
||||
"""
|
||||
Deprecated. This will be removed in a future release.
|
||||
"""
|
||||
|
||||
from warnings import warn
|
||||
warn('ASNOrigin._get_nets_radb() has been deprecated and will be '
|
||||
'removed. You should now use ASNOrigin.get_nets_radb().')
|
||||
return self.get_nets_radb(*args, **kwargs)
|
||||
|
||||
def lookup(self, asn=None, inc_raw=False, retry_count=3, response=None,
|
||||
field_list=None, asn_alts=None, asn_methods=None):
|
||||
field_list=None, asn_methods=None):
|
||||
"""
|
||||
The function for retrieving and parsing ASN origin whois information
|
||||
via port 43/tcp (WHOIS).
|
||||
|
@ -797,9 +726,6 @@ class ASNOrigin:
|
|||
field_list (:obj:`list`): If provided, fields to parse:
|
||||
['description', 'maintainer', 'updated', 'source']
|
||||
If None, defaults to all.
|
||||
asn_alts (:obj:`list`): Additional lookup types to attempt if the
|
||||
ASN whois lookup fails. If None, defaults to all ['http'].
|
||||
*WARNING* deprecated in favor of new argument asn_methods.
|
||||
asn_methods (:obj:`list`): ASN lookup types to attempt, in order.
|
||||
If None, defaults to all ['whois', 'http'].
|
||||
|
||||
|
@ -828,18 +754,8 @@ class ASNOrigin:
|
|||
|
||||
if asn_methods is None:
|
||||
|
||||
if asn_alts is None:
|
||||
|
||||
lookups = ['whois', 'http']
|
||||
|
||||
else:
|
||||
|
||||
from warnings import warn
|
||||
warn('ASNOrigin.lookup() asn_alts argument has been deprecated'
|
||||
' and will be removed. You should now use the asn_methods'
|
||||
' argument.')
|
||||
lookups = ['whois'] + asn_alts
|
||||
|
||||
else:
|
||||
|
||||
if {'whois', 'http'}.isdisjoint(asn_methods):
|
||||
|
@ -875,6 +791,8 @@ class ASNOrigin:
|
|||
asn=asn, retry_count=retry_count
|
||||
)
|
||||
|
||||
break
|
||||
|
||||
except (WhoisLookupError, WhoisRateLimitError) as e:
|
||||
|
||||
log.debug('ASN origin WHOIS lookup failed: {0}'
|
||||
|
@ -888,17 +806,22 @@ class ASNOrigin:
|
|||
log.debug('Response not given, perform ASN origin '
|
||||
'HTTP lookup for: {0}'.format(asn))
|
||||
|
||||
tmp = ASN_ORIGIN_HTTP['radb']['form_data']
|
||||
tmp[str(ASN_ORIGIN_HTTP['radb']['form_data_asn_field']
|
||||
)] = asn
|
||||
# tmp = ASN_ORIGIN_HTTP['radb']['form_data']
|
||||
# tmp[str(
|
||||
# ASN_ORIGIN_HTTP['radb']['form_data_asn_field']
|
||||
# )] = asn
|
||||
response = self._net.get_http_raw(
|
||||
url=ASN_ORIGIN_HTTP['radb']['url'],
|
||||
url=('{0}?advanced_query=1&keywords={1}&-T+option'
|
||||
'=&ip_option=&-i=1&-i+option=origin'
|
||||
).format(ASN_ORIGIN_HTTP['radb']['url'], asn),
|
||||
retry_count=retry_count,
|
||||
request_type='POST',
|
||||
form_data=tmp
|
||||
request_type='GET',
|
||||
# form_data=tmp
|
||||
)
|
||||
is_http = True # pragma: no cover
|
||||
|
||||
break
|
||||
|
||||
except HTTPLookupError as e:
|
||||
|
||||
log.debug('ASN origin HTTP lookup failed: {0}'
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright (c) 2013-2019 Philip Hane
|
||||
# Copyright (c) 2013-2020 Philip Hane
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
|
|
|
@ -158,11 +158,14 @@ def bulk_lookup_rdap(addresses=None, inc_raw=False, retry_count=3, depth=0,
|
|||
'ip_lookup_total' (int) - The total number of addresses that
|
||||
lookups were attempted for, excluding any that failed ASN
|
||||
registry checks.
|
||||
'ip_failed_total' (int) - The total number of addresses that
|
||||
lookups failed for. Excludes any that failed initially, but
|
||||
succeeded after further retries.
|
||||
'lacnic' (dict) -
|
||||
{
|
||||
'failed' (list) - The addresses that failed to lookup.
|
||||
Excludes any that failed initially, but succeeded after
|
||||
futher retries.
|
||||
further retries.
|
||||
'rate_limited' (list) - The addresses that encountered
|
||||
rate-limiting. Unless an address is also in 'failed',
|
||||
it eventually succeeded.
|
||||
|
@ -196,6 +199,7 @@ def bulk_lookup_rdap(addresses=None, inc_raw=False, retry_count=3, depth=0,
|
|||
'ip_input_total': len(addresses),
|
||||
'ip_unique_total': 0,
|
||||
'ip_lookup_total': 0,
|
||||
'ip_failed_total': 0,
|
||||
'lacnic': {'failed': [], 'rate_limited': [], 'total': 0},
|
||||
'ripencc': {'failed': [], 'rate_limited': [], 'total': 0},
|
||||
'apnic': {'failed': [], 'rate_limited': [], 'total': 0},
|
||||
|
@ -253,15 +257,15 @@ def bulk_lookup_rdap(addresses=None, inc_raw=False, retry_count=3, depth=0,
|
|||
|
||||
try:
|
||||
|
||||
results = ipasn.parse_fields_whois(asn_result)
|
||||
asn_parsed = ipasn.parse_fields_whois(asn_result)
|
||||
|
||||
except ASNRegistryError: # pragma: no cover
|
||||
|
||||
continue
|
||||
|
||||
# Add valid IP ASN result to asn_parsed_results for RDAP lookup
|
||||
asn_parsed_results[ip] = results
|
||||
stats[results['asn_registry']]['total'] += 1
|
||||
asn_parsed_results[ip] = asn_parsed
|
||||
stats[asn_parsed['asn_registry']]['total'] += 1
|
||||
|
||||
# Set the list of IPs that are not allocated/failed ASN lookup
|
||||
stats['unallocated_addresses'] = list(k for k in addresses if k not in
|
||||
|
@ -362,7 +366,7 @@ def bulk_lookup_rdap(addresses=None, inc_raw=False, retry_count=3, depth=0,
|
|||
|
||||
# Perform the RDAP lookup. retry_count is set to 0
|
||||
# here since we handle that in this function
|
||||
results = rdap.lookup(
|
||||
rdap_result = rdap.lookup(
|
||||
inc_raw=inc_raw, retry_count=0, asn_data=asn_data,
|
||||
depth=depth, excluded_entities=excluded_entities
|
||||
)
|
||||
|
@ -373,7 +377,9 @@ def bulk_lookup_rdap(addresses=None, inc_raw=False, retry_count=3, depth=0,
|
|||
# Lookup was successful, add to result. Set the nir
|
||||
# key to None as this is not supported
|
||||
# (yet - requires more queries)
|
||||
results[ip] = results
|
||||
results[ip] = asn_data
|
||||
results[ip].update(rdap_result)
|
||||
|
||||
results[ip]['nir'] = None
|
||||
|
||||
# Remove the IP from the lookup queue
|
||||
|
@ -423,6 +429,7 @@ def bulk_lookup_rdap(addresses=None, inc_raw=False, retry_count=3, depth=0,
|
|||
|
||||
del asn_parsed_results[ip]
|
||||
stats[rir]['failed'].append(ip)
|
||||
stats['ip_failed_total'] += 1
|
||||
|
||||
if rir == 'lacnic':
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright (c) 2013-2019 Philip Hane
|
||||
# Copyright (c) 2013-2020 Philip Hane
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright (c) 2013-2019 Philip Hane
|
||||
# Copyright (c) 2013-2020 Philip Hane
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
|
@ -42,17 +42,12 @@ class IPWhois:
|
|||
seconds. Defaults to 5.
|
||||
proxy_opener (:obj:`urllib.request.OpenerDirector`): The request for
|
||||
proxy support. Defaults to None.
|
||||
allow_permutations (:obj:`bool`): Allow net.Net() to use additional
|
||||
methods if DNS lookups to Cymru fail. *WARNING* deprecated in
|
||||
favor of new argument asn_methods. Defaults to False.
|
||||
"""
|
||||
|
||||
def __init__(self, address, timeout=5, proxy_opener=None,
|
||||
allow_permutations=False):
|
||||
def __init__(self, address, timeout=5, proxy_opener=None):
|
||||
|
||||
self.net = Net(
|
||||
address=address, timeout=timeout, proxy_opener=proxy_opener,
|
||||
allow_permutations=allow_permutations
|
||||
address=address, timeout=timeout, proxy_opener=proxy_opener
|
||||
)
|
||||
self.ipasn = IPASN(self.net)
|
||||
|
||||
|
@ -71,7 +66,7 @@ class IPWhois:
|
|||
|
||||
def lookup_whois(self, inc_raw=False, retry_count=3, get_referral=False,
|
||||
extra_blacklist=None, ignore_referral_errors=False,
|
||||
field_list=None, asn_alts=None, extra_org_map=None,
|
||||
field_list=None, extra_org_map=None,
|
||||
inc_nir=True, nir_field_list=None, asn_methods=None,
|
||||
get_asn_description=True):
|
||||
"""
|
||||
|
@ -95,10 +90,6 @@ class IPWhois:
|
|||
['name', 'handle', 'description', 'country', 'state', 'city',
|
||||
'address', 'postal_code', 'emails', 'created', 'updated']
|
||||
If None, defaults to all.
|
||||
asn_alts (:obj:`list`): Additional lookup types to attempt if the
|
||||
ASN dns lookup fails. Allow permutations must be enabled.
|
||||
If None, defaults to all ['whois', 'http']. *WARNING*
|
||||
deprecated in favor of new argument asn_methods.
|
||||
extra_org_map (:obj:`dict`): Dictionary mapping org handles to
|
||||
RIRs. This is for limited cases where ARIN REST (ASN fallback
|
||||
HTTP lookup) does not show an RIR as the org handle e.g., DNIC
|
||||
|
@ -161,7 +152,7 @@ class IPWhois:
|
|||
log.debug('ASN lookup for {0}'.format(self.address_str))
|
||||
|
||||
asn_data = self.ipasn.lookup(
|
||||
inc_raw=inc_raw, retry_count=retry_count, asn_alts=asn_alts,
|
||||
inc_raw=inc_raw, retry_count=retry_count,
|
||||
extra_org_map=extra_org_map, asn_methods=asn_methods,
|
||||
get_asn_description=get_asn_description
|
||||
)
|
||||
|
@ -206,9 +197,9 @@ class IPWhois:
|
|||
|
||||
def lookup_rdap(self, inc_raw=False, retry_count=3, depth=0,
|
||||
excluded_entities=None, bootstrap=False,
|
||||
rate_limit_timeout=120, asn_alts=None, extra_org_map=None,
|
||||
rate_limit_timeout=120, extra_org_map=None,
|
||||
inc_nir=True, nir_field_list=None, asn_methods=None,
|
||||
get_asn_description=True):
|
||||
get_asn_description=True, root_ent_check=True):
|
||||
"""
|
||||
The function for retrieving and parsing whois information for an IP
|
||||
address via HTTP (RDAP).
|
||||
|
@ -233,10 +224,6 @@ class IPWhois:
|
|||
rate_limit_timeout (:obj:`int`): The number of seconds to wait
|
||||
before retrying when a rate limit notice is returned via
|
||||
rdap+json. Defaults to 120.
|
||||
asn_alts (:obj:`list`): Additional lookup types to attempt if the
|
||||
ASN dns lookup fails. Allow permutations must be enabled.
|
||||
If None, defaults to all ['whois', 'http']. *WARNING*
|
||||
deprecated in favor of new argument asn_methods.
|
||||
extra_org_map (:obj:`dict`): Dictionary mapping org handles to
|
||||
RIRs. This is for limited cases where ARIN REST (ASN fallback
|
||||
HTTP lookup) does not show an RIR as the org handle e.g., DNIC
|
||||
|
@ -260,6 +247,9 @@ class IPWhois:
|
|||
get_asn_description (:obj:`bool`): Whether to run an additional
|
||||
query when pulling ASN information via dns, in order to get
|
||||
the ASN description. Defaults to True.
|
||||
root_ent_check (:obj:`bool`): If True, will perform
|
||||
additional RDAP HTTP queries for missing entity data at the
|
||||
root level. Defaults to True.
|
||||
|
||||
Returns:
|
||||
dict: The IP RDAP lookup results
|
||||
|
@ -303,7 +293,7 @@ class IPWhois:
|
|||
# Retrieve the ASN information.
|
||||
log.debug('ASN lookup for {0}'.format(self.address_str))
|
||||
asn_data = self.ipasn.lookup(
|
||||
inc_raw=inc_raw, retry_count=retry_count, asn_alts=asn_alts,
|
||||
inc_raw=inc_raw, retry_count=retry_count,
|
||||
extra_org_map=extra_org_map, asn_methods=asn_methods,
|
||||
get_asn_description=get_asn_description
|
||||
)
|
||||
|
@ -318,7 +308,8 @@ class IPWhois:
|
|||
inc_raw=inc_raw, retry_count=retry_count, asn_data=asn_data,
|
||||
depth=depth, excluded_entities=excluded_entities,
|
||||
response=response, bootstrap=bootstrap,
|
||||
rate_limit_timeout=rate_limit_timeout
|
||||
rate_limit_timeout=rate_limit_timeout,
|
||||
root_ent_check=root_ent_check
|
||||
)
|
||||
|
||||
# Add the RDAP information to the return dictionary.
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright (c) 2013-2019 Philip Hane
|
||||
# Copyright (c) 2013-2020 Philip Hane
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
|
@ -103,17 +103,13 @@ class Net:
|
|||
seconds. Defaults to 5.
|
||||
proxy_opener (:obj:`urllib.request.OpenerDirector`): The request for
|
||||
proxy support. Defaults to None.
|
||||
allow_permutations (:obj:`bool`): Allow net.Net() to use additional
|
||||
methods if DNS lookups to Cymru fail. *WARNING* deprecated in
|
||||
favor of new argument asn_methods. Defaults to False.
|
||||
|
||||
Raises:
|
||||
IPDefinedError: The address provided is defined (does not need to be
|
||||
resolved).
|
||||
"""
|
||||
|
||||
def __init__(self, address, timeout=5, proxy_opener=None,
|
||||
allow_permutations=False):
|
||||
def __init__(self, address, timeout=5, proxy_opener=None):
|
||||
|
||||
# IPv4Address or IPv6Address
|
||||
if isinstance(address, IPv4Address) or isinstance(
|
||||
|
@ -129,16 +125,6 @@ class Net:
|
|||
# Default timeout for socket connections.
|
||||
self.timeout = timeout
|
||||
|
||||
# Allow other than DNS lookups for ASNs.
|
||||
self.allow_permutations = allow_permutations
|
||||
|
||||
if self.allow_permutations:
|
||||
|
||||
from warnings import warn
|
||||
warn('allow_permutations has been deprecated and will be removed. '
|
||||
'It is no longer needed, due to the deprecation of asn_alts, '
|
||||
'and the addition of the asn_methods argument.')
|
||||
|
||||
self.dns_resolver = dns.resolver.Resolver()
|
||||
self.dns_resolver.timeout = timeout
|
||||
self.dns_resolver.lifetime = timeout
|
||||
|
@ -219,21 +205,6 @@ class Net:
|
|||
|
||||
self.dns_zone = IPV6_DNS_ZONE.format(self.reversed)
|
||||
|
||||
def lookup_asn(self, *args, **kwargs):
|
||||
"""
|
||||
Temporary wrapper for IP ASN lookups (moved to
|
||||
asn.IPASN.lookup()). This will be removed in a future
|
||||
release.
|
||||
"""
|
||||
|
||||
from warnings import warn
|
||||
warn('Net.lookup_asn() has been deprecated and will be removed. '
|
||||
'You should now use asn.IPASN.lookup() for IP ASN lookups.')
|
||||
from .asn import IPASN
|
||||
response = None
|
||||
ipasn = IPASN(self)
|
||||
return ipasn.lookup(*args, **kwargs), response
|
||||
|
||||
def get_asn_dns(self):
|
||||
"""
|
||||
The function for retrieving ASN information for an IP address from
|
||||
|
@ -830,7 +801,7 @@ class Net:
|
|||
|
||||
results = namedtuple('get_host_results', 'hostname, aliaslist, '
|
||||
'ipaddrlist')
|
||||
return results(*ret)
|
||||
return results(ret)
|
||||
|
||||
except (socket.timeout, socket.error) as e:
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright (c) 2013-2019 Philip Hane
|
||||
# Copyright (c) 2013-2020 Philip Hane
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
|
@ -87,9 +87,9 @@ NIR_WHOIS = {
|
|||
'updated': r'(\[Last Update\])[^\S\n]+(?P<val>.*?)\n',
|
||||
'nameservers': r'(\[Nameserver\])[^\S\n]+(?P<val>.*?)\n',
|
||||
'contact_admin': r'(\[Administrative Contact\])[^\S\n]+.+?\>'
|
||||
'(?P<val>.+?)\<\/A\>\n',
|
||||
'(?P<val>.+?)\\<\\/A\\>\n',
|
||||
'contact_tech': r'(\[Technical Contact\])[^\S\n]+.+?\>'
|
||||
'(?P<val>.+?)\<\/A\>\n'
|
||||
'(?P<val>.+?)\\<\\/A\\>\n'
|
||||
},
|
||||
'contact_fields': {
|
||||
'name': r'(\[Last, First\])[^\S\n]+(?P<val>.*?)\n',
|
||||
|
@ -108,9 +108,14 @@ NIR_WHOIS = {
|
|||
},
|
||||
'krnic': {
|
||||
'country_code': 'KR',
|
||||
'url': 'https://whois.kisa.or.kr/eng/whois.jsc',
|
||||
'url': 'https://xn--c79as89aj0e29b77z.xn--3e0b707e/eng/whois.jsc',
|
||||
'request_type': 'POST',
|
||||
'request_headers': {'Accept': 'text/html'},
|
||||
'request_headers': {
|
||||
'Accept': 'text/html',
|
||||
'Referer': (
|
||||
'https://xn--c79as89aj0e29b77z.xn--3e0b707e/eng/whois.jsp'
|
||||
),
|
||||
},
|
||||
'form_data_ip_field': 'query',
|
||||
'fields': {
|
||||
'name': r'(Organization Name)[\s]+\:[^\S\n]+(?P<val>.+?)\n',
|
||||
|
@ -120,9 +125,9 @@ NIR_WHOIS = {
|
|||
'postal_code': r'(Zip Code)[\s]+\:[^\S\n]+(?P<val>.+?)\n',
|
||||
'created': r'(Registration Date)[\s]+\:[^\S\n]+(?P<val>.+?)\n',
|
||||
'contact_admin': r'(id="eng_isp_contact").+?\>(?P<val>.*?)\<'
|
||||
'\/div\>\n',
|
||||
'\\/div\\>\n',
|
||||
'contact_tech': r'(id="eng_user_contact").+?\>(?P<val>.*?)\<'
|
||||
'\/div\>\n'
|
||||
'\\/div\\>\n'
|
||||
},
|
||||
'contact_fields': {
|
||||
'name': r'(Name)[^\S\n]+?:[^\S\n]+?(?P<val>.*?)\n',
|
||||
|
@ -260,12 +265,20 @@ class NIRWhois:
|
|||
|
||||
if field in ['created', 'updated'] and dt_format:
|
||||
|
||||
try:
|
||||
value = (
|
||||
datetime.strptime(
|
||||
values[0],
|
||||
str(dt_format)
|
||||
) - timedelta(hours=hourdelta)
|
||||
).isoformat('T')
|
||||
except ValueError:
|
||||
value = (
|
||||
datetime.strptime(
|
||||
values[0],
|
||||
'%Y/%m/%d'
|
||||
)
|
||||
).isoformat('T')
|
||||
|
||||
elif field in ['nameservers']:
|
||||
|
||||
|
@ -286,16 +299,6 @@ class NIRWhois:
|
|||
|
||||
return ret
|
||||
|
||||
def _parse_fields(self, *args, **kwargs):
|
||||
"""
|
||||
Deprecated. This will be removed in a future release.
|
||||
"""
|
||||
|
||||
from warnings import warn
|
||||
warn('NIRWhois._parse_fields() has been deprecated and will be '
|
||||
'removed. You should now use NIRWhois.parse_fields().')
|
||||
return self.parse_fields(*args, **kwargs)
|
||||
|
||||
def get_nets_jpnic(self, response):
|
||||
"""
|
||||
The function for parsing network blocks from jpnic whois data.
|
||||
|
@ -359,16 +362,6 @@ class NIRWhois:
|
|||
|
||||
return nets
|
||||
|
||||
def _get_nets_jpnic(self, *args, **kwargs):
|
||||
"""
|
||||
Deprecated. This will be removed in a future release.
|
||||
"""
|
||||
|
||||
from warnings import warn
|
||||
warn('NIRWhois._get_nets_jpnic() has been deprecated and will be '
|
||||
'removed. You should now use NIRWhois.get_nets_jpnic().')
|
||||
return self.get_nets_jpnic(*args, **kwargs)
|
||||
|
||||
def get_nets_krnic(self, response):
|
||||
"""
|
||||
The function for parsing network blocks from krnic whois data.
|
||||
|
@ -394,7 +387,7 @@ class NIRWhois:
|
|||
# and the start and end positions.
|
||||
for match in re.finditer(
|
||||
r'^(IPv4 Address)[\s]+:[^\S\n]+((.+?)[^\S\n]-[^\S\n](.+?)'
|
||||
'[^\S\n]\((.+?)\)|.+)$',
|
||||
'[^\\S\n]\\((.+?)\\)|.+)$',
|
||||
response,
|
||||
re.MULTILINE
|
||||
):
|
||||
|
@ -434,16 +427,6 @@ class NIRWhois:
|
|||
|
||||
return nets
|
||||
|
||||
def _get_nets_krnic(self, *args, **kwargs):
|
||||
"""
|
||||
Deprecated. This will be removed in a future release.
|
||||
"""
|
||||
|
||||
from warnings import warn
|
||||
warn('NIRWhois._get_nets_krnic() has been deprecated and will be '
|
||||
'removed. You should now use NIRWhois.get_nets_krnic().')
|
||||
return self.get_nets_krnic(*args, **kwargs)
|
||||
|
||||
def get_contact(self, response=None, nir=None, handle=None,
|
||||
retry_count=3, dt_format=None):
|
||||
"""
|
||||
|
@ -491,16 +474,6 @@ class NIRWhois:
|
|||
is_contact=True
|
||||
)
|
||||
|
||||
def _get_contact(self, *args, **kwargs):
|
||||
"""
|
||||
Deprecated. This will be removed in a future release.
|
||||
"""
|
||||
|
||||
from warnings import warn
|
||||
warn('NIRWhois._get_contact() has been deprecated and will be '
|
||||
'removed. You should now use NIRWhois.get_contact().')
|
||||
return self.get_contact(*args, **kwargs)
|
||||
|
||||
def lookup(self, nir=None, inc_raw=False, retry_count=3, response=None,
|
||||
field_list=None, is_offline=False):
|
||||
"""
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright (c) 2013-2019 Philip Hane
|
||||
# Copyright (c) 2013-2020 Philip Hane
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
|
@ -28,6 +28,7 @@ from .utils import ipv4_lstrip_zeros, calculate_cidr, unique_everseen
|
|||
from .net import ip_address
|
||||
import logging
|
||||
import json
|
||||
from collections import namedtuple
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -553,7 +554,7 @@ class _RDAPNetwork(_RDAPCommon):
|
|||
|
||||
self.vars[v] = self.json[v].strip()
|
||||
|
||||
except (KeyError, ValueError):
|
||||
except (KeyError, ValueError, AttributeError):
|
||||
|
||||
pass
|
||||
|
||||
|
@ -688,9 +689,95 @@ class RDAP:
|
|||
raise NetError('The provided net parameter is not an instance of '
|
||||
'ipwhois.net.Net')
|
||||
|
||||
def _get_entity(self, entity=None, roles=None, inc_raw=False, retry_count=3,
|
||||
asn_data=None, bootstrap=False, rate_limit_timeout=120):
|
||||
"""
|
||||
The function for retrieving and parsing information for an entity via
|
||||
RDAP (HTTP).
|
||||
|
||||
Args:
|
||||
entity (:obj:`str`): The entity name to lookup.
|
||||
roles (:obj:`dict`): The mapping of entity handles to roles.
|
||||
inc_raw (:obj:`bool`, optional): Whether to include the raw
|
||||
results in the returned dictionary. Defaults to False.
|
||||
retry_count (:obj:`int`): The number of times to retry in case
|
||||
socket errors, timeouts, connection resets, etc. are
|
||||
encountered. Defaults to 3.
|
||||
asn_data (:obj:`dict`): Result from
|
||||
:obj:`ipwhois.asn.IPASN.lookup`. Optional if the bootstrap
|
||||
parameter is True.
|
||||
bootstrap (:obj:`bool`): If True, performs lookups via ARIN
|
||||
bootstrap rather than lookups based on ASN data. Defaults to
|
||||
False.
|
||||
rate_limit_timeout (:obj:`int`): The number of seconds to wait
|
||||
before retrying when a rate limit notice is returned via
|
||||
rdap+json. Defaults to 120.
|
||||
|
||||
Returns:
|
||||
namedtuple:
|
||||
|
||||
:result (dict): Consists of the fields listed in the
|
||||
ipwhois.rdap._RDAPEntity dict. The raw result is included for
|
||||
each object if the inc_raw parameter is True.
|
||||
:roles (dict): The mapping of entity handles to roles.
|
||||
"""
|
||||
|
||||
result = {}
|
||||
|
||||
if bootstrap:
|
||||
entity_url = '{0}/entity/{1}'.format(
|
||||
BOOTSTRAP_URL, entity)
|
||||
else:
|
||||
tmp_reg = asn_data['asn_registry']
|
||||
entity_url = RIR_RDAP[tmp_reg]['entity_url']
|
||||
entity_url = str(entity_url).format(entity)
|
||||
|
||||
try:
|
||||
|
||||
# RDAP entity query
|
||||
response = self._net.get_http_json(
|
||||
url=entity_url, retry_count=retry_count,
|
||||
rate_limit_timeout=rate_limit_timeout
|
||||
)
|
||||
|
||||
# Parse the entity
|
||||
result_ent = _RDAPEntity(response)
|
||||
result_ent.parse()
|
||||
result = result_ent.vars
|
||||
|
||||
result['roles'] = None
|
||||
try:
|
||||
|
||||
result['roles'] = roles[entity]
|
||||
|
||||
except KeyError: # pragma: no cover
|
||||
|
||||
pass
|
||||
|
||||
try:
|
||||
|
||||
for tmp in response['entities']:
|
||||
|
||||
if tmp['handle'] not in roles:
|
||||
roles[tmp['handle']] = tmp['roles']
|
||||
|
||||
except (IndexError, KeyError):
|
||||
|
||||
pass
|
||||
|
||||
if inc_raw:
|
||||
result['raw'] = response
|
||||
|
||||
except (HTTPLookupError, InvalidEntityObject):
|
||||
|
||||
pass
|
||||
|
||||
return_tuple = namedtuple('return_tuple', ['result', 'roles'])
|
||||
return return_tuple(result, roles)
|
||||
|
||||
def lookup(self, inc_raw=False, retry_count=3, asn_data=None, depth=0,
|
||||
excluded_entities=None, response=None, bootstrap=False,
|
||||
rate_limit_timeout=120):
|
||||
rate_limit_timeout=120, root_ent_check=True):
|
||||
"""
|
||||
The function for retrieving and parsing information for an IP
|
||||
address via RDAP (HTTP).
|
||||
|
@ -716,6 +803,9 @@ class RDAP:
|
|||
rate_limit_timeout (:obj:`int`): The number of seconds to wait
|
||||
before retrying when a rate limit notice is returned via
|
||||
rdap+json. Defaults to 120.
|
||||
root_ent_check (:obj:`bool`): If True, will perform
|
||||
additional RDAP HTTP queries for missing entity data at the
|
||||
root level. Defaults to True.
|
||||
|
||||
Returns:
|
||||
dict: The IP RDAP lookup results
|
||||
|
@ -792,6 +882,19 @@ class RDAP:
|
|||
if ent['handle'] not in [results['entities'],
|
||||
excluded_entities]:
|
||||
|
||||
if 'vcardArray' not in ent and root_ent_check:
|
||||
entity_object, roles = self._get_entity(
|
||||
entity=ent['handle'],
|
||||
roles=roles,
|
||||
inc_raw=inc_raw,
|
||||
retry_count=retry_count,
|
||||
asn_data=asn_data,
|
||||
bootstrap=bootstrap,
|
||||
rate_limit_timeout=rate_limit_timeout
|
||||
)
|
||||
results['objects'][ent['handle']] = entity_object
|
||||
|
||||
else:
|
||||
result_ent = _RDAPEntity(ent)
|
||||
result_ent.parse()
|
||||
|
||||
|
@ -835,57 +938,18 @@ class RDAP:
|
|||
list(new_objects.keys()) +
|
||||
excluded_entities):
|
||||
|
||||
if bootstrap:
|
||||
entity_url = '{0}/entity/{1}'.format(
|
||||
BOOTSTRAP_URL, ent)
|
||||
else:
|
||||
tmp_reg = asn_data['asn_registry']
|
||||
entity_url = RIR_RDAP[tmp_reg]['entity_url']
|
||||
entity_url = str(entity_url).format(ent)
|
||||
|
||||
try:
|
||||
|
||||
# RDAP entity query
|
||||
response = self._net.get_http_json(
|
||||
url=entity_url, retry_count=retry_count,
|
||||
entity_object, roles = self._get_entity(
|
||||
entity=ent,
|
||||
roles=roles,
|
||||
inc_raw=inc_raw,
|
||||
retry_count=retry_count,
|
||||
asn_data=asn_data,
|
||||
bootstrap=bootstrap,
|
||||
rate_limit_timeout=rate_limit_timeout
|
||||
)
|
||||
new_objects[ent] = entity_object
|
||||
|
||||
# Parse the entity
|
||||
result_ent = _RDAPEntity(response)
|
||||
result_ent.parse()
|
||||
new_objects[ent] = result_ent.vars
|
||||
|
||||
new_objects[ent]['roles'] = None
|
||||
try:
|
||||
|
||||
new_objects[ent]['roles'] = roles[ent]
|
||||
|
||||
except KeyError: # pragma: no cover
|
||||
|
||||
pass
|
||||
|
||||
try:
|
||||
|
||||
for tmp in response['entities']:
|
||||
|
||||
if tmp['handle'] not in roles:
|
||||
|
||||
roles[tmp['handle']] = tmp['roles']
|
||||
|
||||
except (IndexError, KeyError):
|
||||
|
||||
pass
|
||||
|
||||
if inc_raw:
|
||||
|
||||
new_objects[ent]['raw'] = response
|
||||
|
||||
except (HTTPLookupError, InvalidEntityObject):
|
||||
|
||||
pass
|
||||
|
||||
except TypeError:
|
||||
except (KeyError, TypeError):
|
||||
|
||||
pass
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright (c) 2013-2019 Philip Hane
|
||||
# Copyright (c) 2013-2020 Philip Hane
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
|
@ -166,17 +166,6 @@ group.add_argument(
|
|||
help='The number of times to retry in case socket errors, timeouts, '
|
||||
'connection resets, etc. are encountered.'
|
||||
)
|
||||
group.add_argument(
|
||||
'--asn_alts',
|
||||
type=str,
|
||||
nargs=1,
|
||||
default='whois,http',
|
||||
metavar='"ASN_ALTS"',
|
||||
help='A comma delimited list of additional lookup types to attempt if the '
|
||||
'ASN dns lookup fails. Allow permutations must be enabled. '
|
||||
'Defaults to all: "whois,http" *WARNING* deprecated in '
|
||||
'favor of new argument asn_methods.'
|
||||
)
|
||||
group.add_argument(
|
||||
'--asn_methods',
|
||||
type=str,
|
||||
|
@ -1456,9 +1445,6 @@ if script_args.addr:
|
|||
field_list=script_args.field_list.split(',') if (
|
||||
script_args.field_list and
|
||||
len(script_args.field_list) > 0) else None,
|
||||
asn_alts=script_args.asn_alts.split(',') if (
|
||||
script_args.asn_alts and not script_args.asn_methods and
|
||||
len(script_args.asn_alts) > 0) else None,
|
||||
extra_org_map=script_args.extra_org_map,
|
||||
inc_nir=(not script_args.exclude_nir),
|
||||
nir_field_list=script_args.nir_field_list.split(',') if (
|
||||
|
@ -1484,9 +1470,6 @@ if script_args.addr:
|
|||
len(script_args.excluded_entities) > 0) else None,
|
||||
bootstrap=script_args.bootstrap,
|
||||
rate_limit_timeout=script_args.rate_limit_timeout,
|
||||
asn_alts=script_args.asn_alts.split(',') if (
|
||||
script_args.asn_alts and not script_args.asn_methods and
|
||||
len(script_args.asn_alts) > 0) else None,
|
||||
extra_org_map=script_args.extra_org_map,
|
||||
inc_nir=(not script_args.exclude_nir),
|
||||
nir_field_list=script_args.nir_field_list.split(',') if (
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright (c) 2013-2019 Philip Hane
|
||||
# Copyright (c) 2013-2020 Philip Hane
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
|
@ -28,8 +28,9 @@ import argparse
|
|||
from collections import OrderedDict
|
||||
import json
|
||||
from ipwhois.utils import (ipv4_lstrip_zeros, calculate_cidr, get_countries,
|
||||
ipv4_is_defined, ipv6_is_defined, unique_everseen,
|
||||
unique_addresses)
|
||||
ipv4_is_defined, ipv6_is_defined,
|
||||
ipv4_generate_random, ipv6_generate_random,
|
||||
unique_everseen, unique_addresses)
|
||||
|
||||
# CLI ANSI rendering
|
||||
ANSI = {
|
||||
|
@ -86,6 +87,22 @@ parser.add_argument(
|
|||
metavar='"IP ADDRESS"',
|
||||
help='Check if an IPv6 address is defined (in a reserved address range).'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--ipv4_generate_random',
|
||||
type=int,
|
||||
nargs=1,
|
||||
metavar='TOTAL',
|
||||
help='Generate random, unique IPv4 addresses that are not defined (can be '
|
||||
'looked up using ipwhois).'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--ipv6_generate_random',
|
||||
type=int,
|
||||
nargs=1,
|
||||
metavar='TOTAL',
|
||||
help='Generate random, unique IPv6 addresses that are not defined (can be '
|
||||
'looked up using ipwhois).'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--unique_everseen',
|
||||
type=json.loads,
|
||||
|
@ -224,6 +241,34 @@ elif script_args.ipv6_is_defined:
|
|||
|
||||
print('{0}Error{1}: {2}'.format(ANSI['red'], ANSI['end'], str(e)))
|
||||
|
||||
elif script_args.ipv4_generate_random:
|
||||
|
||||
try:
|
||||
|
||||
result = ipv4_generate_random(total=script_args.ipv4_generate_random[0])
|
||||
|
||||
for random_ip in result:
|
||||
|
||||
print(random_ip)
|
||||
|
||||
except Exception as e:
|
||||
|
||||
print('{0}Error{1}: {2}'.format(ANSI['red'], ANSI['end'], str(e)))
|
||||
|
||||
elif script_args.ipv6_generate_random:
|
||||
|
||||
try:
|
||||
|
||||
result = ipv6_generate_random(total=script_args.ipv6_generate_random[0])
|
||||
|
||||
for random_ip in result:
|
||||
|
||||
print(random_ip)
|
||||
|
||||
except Exception as e:
|
||||
|
||||
print('{0}Error{1}: {2}'.format(ANSI['red'], ANSI['end'], str(e)))
|
||||
|
||||
elif script_args.unique_everseen:
|
||||
|
||||
try:
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright (c) 2013-2019 Philip Hane
|
||||
# Copyright (c) 2013-2020 Philip Hane
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
|
@ -87,30 +87,30 @@ IETF_RFC_REFERENCES = {
|
|||
IP_REGEX = (
|
||||
r'(?P<ip>'
|
||||
# IPv4
|
||||
'(((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.)){3}'
|
||||
'(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)'
|
||||
r'(((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.)){3}'
|
||||
r'(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)'
|
||||
# IPv6
|
||||
'|\[?(((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:)'
|
||||
'{6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|'
|
||||
'2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]'
|
||||
'{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d'
|
||||
'\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|'
|
||||
'((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|'
|
||||
'2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]'
|
||||
'{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)'
|
||||
'(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(('
|
||||
'(:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1'
|
||||
'\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(('
|
||||
'[0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4})'
|
||||
'{0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]'
|
||||
'?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:(('
|
||||
'25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})'
|
||||
')|:)))(%.+)?))\]?'
|
||||
r'|\[?(((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:)'
|
||||
r'{6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|'
|
||||
r'2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]'
|
||||
r'{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d'
|
||||
r'\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|'
|
||||
r'((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|'
|
||||
r'2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]'
|
||||
r'{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)'
|
||||
r'(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(('
|
||||
r'(:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1'
|
||||
r'\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(('
|
||||
r'[0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4})'
|
||||
r'{0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]'
|
||||
r'?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:(('
|
||||
r'25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})'
|
||||
r')|:)))(%.+)?))\]?'
|
||||
# Optional IPv4 Port
|
||||
'((:(6553[0-5]|655[0-2]\d|65[0-4]\d{2}|6[0-4]\d{3}|[1-5]\d{4}|[1-9]\d{0,3}'
|
||||
r'((:(6553[0-5]|655[0-2]\d|65[0-4]\d{2}|6[0-4]\d{3}|[1-5]\d{4}|[1-9]\d{0,3}'
|
||||
# Optional CIDR block
|
||||
'))|(\/(?:[012]\d?|3[012]?|[4-9])))?'
|
||||
')'
|
||||
r'))|(\/(?:[012]\d?|3[012]?|[4-9])))?'
|
||||
r')'
|
||||
)
|
||||
|
||||
|
||||
|
@ -212,6 +212,7 @@ def get_countries(is_legacy_xml=False):
|
|||
|
||||
# Read the file.
|
||||
data = f.read()
|
||||
f.close()
|
||||
|
||||
# Check if there is data.
|
||||
if not data: # pragma: no cover
|
||||
|
@ -258,6 +259,8 @@ def get_countries(is_legacy_xml=False):
|
|||
# Add to the countries dictionary.
|
||||
countries[code] = name
|
||||
|
||||
f.close()
|
||||
|
||||
return countries
|
||||
|
||||
|
||||
|
@ -506,6 +509,7 @@ def unique_addresses(data=None, file_path=None):
|
|||
|
||||
# Read the file.
|
||||
file_data = f.read()
|
||||
f.close()
|
||||
|
||||
pattern = re.compile(
|
||||
str(IP_REGEX),
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright (c) 2013-2019 Philip Hane
|
||||
# Copyright (c) 2013-2020 Philip Hane
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
|
@ -67,7 +67,7 @@ RIR_WHOIS = {
|
|||
'name': r'(NetName):[^\S\n]+(?P<val>.+?)\n',
|
||||
'handle': r'(NetHandle):[^\S\n]+(?P<val>.+?)\n',
|
||||
'description': r'(OrgName|CustName):[^\S\n]+(?P<val>.+?)'
|
||||
'(?=(\n\S):?)',
|
||||
'(?=(\n\\S):?)',
|
||||
'country': r'(Country):[^\S\n]+(?P<val>.+?)\n',
|
||||
'state': r'(StateProv):[^\S\n]+(?P<val>.+?)\n',
|
||||
'city': r'(City):[^\S\n]+(?P<val>.+?)\n',
|
||||
|
@ -75,7 +75,7 @@ RIR_WHOIS = {
|
|||
'postal_code': r'(PostalCode):[^\S\n]+(?P<val>.+?)\n',
|
||||
'emails': (
|
||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||
'[^\S\n]+.*?)*?\n'
|
||||
'[^\\S\n]+.*?)*?\n'
|
||||
),
|
||||
'created': r'(RegDate):[^\S\n]+(?P<val>.+?)\n',
|
||||
'updated': r'(Updated):[^\S\n]+(?P<val>.+?)\n',
|
||||
|
@ -92,7 +92,7 @@ RIR_WHOIS = {
|
|||
'address': r'(address):[^\S\n]+(?P<val>.+?)(?=(\n\S):?)',
|
||||
'emails': (
|
||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||
'[^\S\n]+.*?)*?\n'
|
||||
'[^\\S\n]+.*?)*?\n'
|
||||
),
|
||||
'created': (
|
||||
r'(created):[^\S\n]+(?P<val>[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]'
|
||||
|
@ -115,7 +115,7 @@ RIR_WHOIS = {
|
|||
'address': r'(address):[^\S\n]+(?P<val>.+?)(?=(\n\S):?)',
|
||||
'emails': (
|
||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||
'[^\S\n]+.*?)*?\n'
|
||||
'[^\\S\n]+.*?)*?\n'
|
||||
),
|
||||
'updated': r'(changed):[^\S\n]+.*(?P<val>[0-9]{8}).*?\n'
|
||||
},
|
||||
|
@ -129,7 +129,7 @@ RIR_WHOIS = {
|
|||
'country': r'(country):[^\S\n]+(?P<val>.+?)\n',
|
||||
'emails': (
|
||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||
'[^\S\n]+.*?)*?\n'
|
||||
'[^\\S\n]+.*?)*?\n'
|
||||
),
|
||||
'created': r'(created):[^\S\n]+(?P<val>[0-9]{8}).*?\n',
|
||||
'updated': r'(changed):[^\S\n]+(?P<val>[0-9]{8}).*?\n'
|
||||
|
@ -146,7 +146,7 @@ RIR_WHOIS = {
|
|||
'address': r'(address):[^\S\n]+(?P<val>.+?)(?=(\n\S):?)',
|
||||
'emails': (
|
||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||
'[^\S\n]+.*?)*?\n'
|
||||
'[^\\S\n]+.*?)*?\n'
|
||||
),
|
||||
}
|
||||
}
|
||||
|
@ -166,7 +166,7 @@ RWHOIS = {
|
|||
'postal_code': r'(network:Postal-Code):(?P<val>.+?)\n',
|
||||
'emails': (
|
||||
r'.+?:.*?[^\S\n]+(?P<val>[\w\-\.]+?@[\w\-\.]+\.[\w\-]+)('
|
||||
'[^\S\n]+.*?)*?\n'
|
||||
'[^\\S\n]+.*?)*?\n'
|
||||
),
|
||||
'created': r'(network:Created):(?P<val>.+?)\n',
|
||||
'updated': r'(network:Updated):(?P<val>.+?)\n'
|
||||
|
@ -324,16 +324,6 @@ class Whois:
|
|||
|
||||
return ret
|
||||
|
||||
def _parse_fields(self, *args, **kwargs):
|
||||
"""
|
||||
Deprecated. This will be removed in a future release.
|
||||
"""
|
||||
|
||||
from warnings import warn
|
||||
warn('Whois._parse_fields() has been deprecated and will be '
|
||||
'removed. You should now use Whois.parse_fields().')
|
||||
return self.parse_fields(*args, **kwargs)
|
||||
|
||||
def get_nets_arin(self, response):
|
||||
"""
|
||||
The function for parsing network blocks from ARIN whois data.
|
||||
|
@ -415,16 +405,6 @@ class Whois:
|
|||
|
||||
return nets
|
||||
|
||||
def _get_nets_arin(self, *args, **kwargs):
|
||||
"""
|
||||
Deprecated. This will be removed in a future release.
|
||||
"""
|
||||
|
||||
from warnings import warn
|
||||
warn('Whois._get_nets_arin() has been deprecated and will be '
|
||||
'removed. You should now use Whois.get_nets_arin().')
|
||||
return self.get_nets_arin(*args, **kwargs)
|
||||
|
||||
def get_nets_lacnic(self, response):
|
||||
"""
|
||||
The function for parsing network blocks from LACNIC whois data.
|
||||
|
@ -495,16 +475,6 @@ class Whois:
|
|||
|
||||
return nets
|
||||
|
||||
def _get_nets_lacnic(self, *args, **kwargs):
|
||||
"""
|
||||
Deprecated. This will be removed in a future release.
|
||||
"""
|
||||
|
||||
from warnings import warn
|
||||
warn('Whois._get_nets_lacnic() has been deprecated and will be '
|
||||
'removed. You should now use Whois.get_nets_lacnic().')
|
||||
return self.get_nets_lacnic(*args, **kwargs)
|
||||
|
||||
def get_nets_other(self, response):
|
||||
"""
|
||||
The function for parsing network blocks from generic whois data.
|
||||
|
@ -577,16 +547,6 @@ class Whois:
|
|||
|
||||
return nets
|
||||
|
||||
def _get_nets_other(self, *args, **kwargs):
|
||||
"""
|
||||
Deprecated. This will be removed in a future release.
|
||||
"""
|
||||
|
||||
from warnings import warn
|
||||
warn('Whois._get_nets_other() has been deprecated and will be '
|
||||
'removed. You should now use Whois.get_nets_other().')
|
||||
return self.get_nets_other(*args, **kwargs)
|
||||
|
||||
def lookup(self, inc_raw=False, retry_count=3, response=None,
|
||||
get_referral=False, extra_blacklist=None,
|
||||
ignore_referral_errors=False, asn_data=None,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue