mirror of
https://github.com/bettercap/bettercap
synced 2025-08-19 13:09:49 -07:00
new: the events.stream will now parse and display properly interesting http requests and responses
This commit is contained in:
parent
38a87e38b2
commit
1220874473
4 changed files with 323 additions and 59 deletions
|
@ -122,40 +122,14 @@ func (s *EventsStream) viewModuleEvent(e session.Event) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *EventsStream) viewSnifferEvent(e session.Event) {
|
func (s *EventsStream) viewSnifferEvent(e session.Event) {
|
||||||
se := e.Data.(SnifferEvent)
|
if strings.HasPrefix(e.Tag, "net.sniff.http.") {
|
||||||
misc := ""
|
s.viewHttpEvent(e)
|
||||||
|
} else {
|
||||||
if e.Tag == "net.sniff.leak.http" {
|
fmt.Fprintf(s.output, "[%s] [%s] %s\n",
|
||||||
req := se.Data.(HTTPRequest)
|
e.Time.Format(eventTimeFormat),
|
||||||
if req.Method != "GET" {
|
core.Green(e.Tag),
|
||||||
misc += "\n\n"
|
e.Data.(SnifferEvent).Message)
|
||||||
misc += fmt.Sprintf(" Method: %s\n", core.Yellow(req.Method))
|
|
||||||
misc += fmt.Sprintf(" URL: %s\n", core.Yellow(req.URL))
|
|
||||||
misc += fmt.Sprintf(" Headers:\n")
|
|
||||||
for name, values := range req.Headers {
|
|
||||||
misc += fmt.Sprintf(" %s => %s\n", core.Green(name), strings.Join(values, ", "))
|
|
||||||
}
|
|
||||||
|
|
||||||
if req.Form != nil {
|
|
||||||
misc += " \n Form:\n\n"
|
|
||||||
if len(req.Form) == 0 {
|
|
||||||
misc += fmt.Sprintf(" %s\n", core.Dim("<empty>"))
|
|
||||||
} else {
|
|
||||||
for key, values := range req.Form {
|
|
||||||
misc += fmt.Sprintf(" %s => %s\n", core.Green(key), core.Bold(strings.Join(values, ", ")))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if req.Body != nil {
|
|
||||||
misc += fmt.Sprintf(" \n %s:\n\n %s\n", core.Bold("Body"), string(req.Body))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Fprintf(s.output, "[%s] [%s] %s %s\n",
|
|
||||||
e.Time.Format(eventTimeFormat),
|
|
||||||
core.Green(e.Tag),
|
|
||||||
se.Message,
|
|
||||||
misc)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *EventsStream) viewSynScanEvent(e session.Event) {
|
func (s *EventsStream) viewSynScanEvent(e session.Event) {
|
||||||
|
|
209
modules/events_view_http.go
Normal file
209
modules/events_view_http.go
Normal file
|
@ -0,0 +1,209 @@
|
||||||
|
package modules
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"compress/gzip"
|
||||||
|
"encoding/hex"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"net/url"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/bettercap/bettercap/core"
|
||||||
|
"github.com/bettercap/bettercap/session"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
cookieFilter = map[string]bool{
|
||||||
|
"__cfduid": true,
|
||||||
|
"_ga": true,
|
||||||
|
"_gat": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
reJsonKey = regexp.MustCompile(`("[^"]+"):`)
|
||||||
|
)
|
||||||
|
|
||||||
|
func (s *EventsStream) shouldDumpHttpRequest(req HTTPRequest) bool {
|
||||||
|
// dump if it's not just a GET
|
||||||
|
if req.Method != "GET" {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
// search for interesting headers and cookies
|
||||||
|
for name, values := range req.Headers {
|
||||||
|
headerName := strings.ToLower(name)
|
||||||
|
if strings.Contains(headerName, "auth") || strings.Contains(headerName, "token") {
|
||||||
|
return true
|
||||||
|
} else if headerName == "cookie" {
|
||||||
|
for _, value := range values {
|
||||||
|
cookies := strings.Split(value, ";")
|
||||||
|
for _, cookie := range cookies {
|
||||||
|
parts := strings.Split(cookie, "=")
|
||||||
|
if _, found := cookieFilter[parts[0]]; found == false {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EventsStream) shouldDumpHttpResponse(res HTTPResponse) bool {
|
||||||
|
if strings.Contains(res.ContentType, "text/plain") {
|
||||||
|
return true
|
||||||
|
} else if strings.Contains(res.ContentType, "application/json") {
|
||||||
|
return true
|
||||||
|
} else if strings.Contains(res.ContentType, "text/xml") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// search for interesting headers
|
||||||
|
for name, _ := range res.Headers {
|
||||||
|
headerName := strings.ToLower(name)
|
||||||
|
if strings.Contains(headerName, "auth") || strings.Contains(headerName, "token") || strings.Contains(headerName, "cookie") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EventsStream) dumpForm(body []byte) string {
|
||||||
|
form := []string{}
|
||||||
|
for _, v := range strings.Split(string(body), "&") {
|
||||||
|
if strings.Contains(v, "=") {
|
||||||
|
parts := strings.SplitN(v, "=", 2)
|
||||||
|
name := parts[0]
|
||||||
|
value, err := url.QueryUnescape(parts[1])
|
||||||
|
if err != nil {
|
||||||
|
value = parts[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
form = append(form, fmt.Sprintf("%s=%s", core.Green(name), core.Bold(core.Red(value))))
|
||||||
|
} else {
|
||||||
|
value, err := url.QueryUnescape(v)
|
||||||
|
if err != nil {
|
||||||
|
value = v
|
||||||
|
}
|
||||||
|
form = append(form, fmt.Sprintf("%s", core.Bold(core.Red(value))))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "\n" + strings.Join(form, "&") + "\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EventsStream) dumpText(body []byte) string {
|
||||||
|
return "\n" + core.Bold(core.Red(string(body))) + "\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EventsStream) dumpGZIP(body []byte) string {
|
||||||
|
buffer := bytes.NewBuffer(body)
|
||||||
|
uncompressed := bytes.Buffer{}
|
||||||
|
reader, err := gzip.NewReader(buffer)
|
||||||
|
if err != nil {
|
||||||
|
return s.dumpRaw(body)
|
||||||
|
} else if _, err = uncompressed.ReadFrom(reader); err != nil {
|
||||||
|
return s.dumpRaw(body)
|
||||||
|
}
|
||||||
|
return s.dumpRaw(uncompressed.Bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EventsStream) dumpJSON(body []byte) string {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
var pretty string
|
||||||
|
|
||||||
|
if err := json.Indent(&buf, body, "", " "); err != nil {
|
||||||
|
pretty = string(body)
|
||||||
|
} else {
|
||||||
|
pretty = string(buf.Bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
return "\n" + reJsonKey.ReplaceAllString(pretty, core.W(core.GREEN, `$1:`)) + "\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EventsStream) dumpXML(body []byte) string {
|
||||||
|
// TODO: indent xml
|
||||||
|
return "\n" + string(body) + "\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EventsStream) dumpRaw(body []byte) string {
|
||||||
|
return "\n" + hex.Dump(body) + "\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EventsStream) viewHttpRequest(e session.Event) {
|
||||||
|
se := e.Data.(SnifferEvent)
|
||||||
|
req := se.Data.(HTTPRequest)
|
||||||
|
|
||||||
|
fmt.Fprintf(s.output, "[%s] [%s] %s\n",
|
||||||
|
e.Time.Format(eventTimeFormat),
|
||||||
|
core.Green(e.Tag),
|
||||||
|
se.Message)
|
||||||
|
|
||||||
|
if s.shouldDumpHttpRequest(req) {
|
||||||
|
dump := fmt.Sprintf("%s %s %s\n", core.Bold(req.Method), req.URL, core.Dim(req.Proto))
|
||||||
|
dump += fmt.Sprintf("%s: %s\n", core.Blue("Host"), core.Yellow(req.Host))
|
||||||
|
for name, values := range req.Headers {
|
||||||
|
for _, value := range values {
|
||||||
|
dump += fmt.Sprintf("%s: %s\n", core.Blue(name), core.Yellow(value))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if req.Body != nil {
|
||||||
|
if strings.Contains(req.ContentType, "application/x-www-form-urlencoded") {
|
||||||
|
dump += s.dumpForm(req.Body)
|
||||||
|
} else if strings.Contains(req.ContentType, "text/plain") {
|
||||||
|
dump += s.dumpText(req.Body)
|
||||||
|
} else if strings.Contains(req.ContentType, "text/xml") {
|
||||||
|
dump += s.dumpXML(req.Body)
|
||||||
|
} else if strings.Contains(req.ContentType, "gzip") {
|
||||||
|
dump += s.dumpGZIP(req.Body)
|
||||||
|
} else if strings.Contains(req.ContentType, "application/json") {
|
||||||
|
dump += s.dumpJSON(req.Body)
|
||||||
|
} else {
|
||||||
|
dump += s.dumpRaw(req.Body)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(s.output, "\n%s\n", dump)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EventsStream) viewHttpResponse(e session.Event) {
|
||||||
|
se := e.Data.(SnifferEvent)
|
||||||
|
res := se.Data.(HTTPResponse)
|
||||||
|
|
||||||
|
fmt.Fprintf(s.output, "[%s] [%s] %s\n",
|
||||||
|
e.Time.Format(eventTimeFormat),
|
||||||
|
core.Green(e.Tag),
|
||||||
|
se.Message)
|
||||||
|
|
||||||
|
if s.shouldDumpHttpResponse(res) {
|
||||||
|
dump := fmt.Sprintf("%s %s\n", core.Dim(res.Protocol), res.Status)
|
||||||
|
for name, values := range res.Headers {
|
||||||
|
for _, value := range values {
|
||||||
|
dump += fmt.Sprintf("%s: %s\n", core.Blue(name), core.Yellow(value))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if res.Body != nil {
|
||||||
|
// TODO: add more interesting response types
|
||||||
|
if strings.Contains(res.ContentType, "text/plain") {
|
||||||
|
dump += s.dumpText(res.Body)
|
||||||
|
} else if strings.Contains(res.ContentType, "application/json") {
|
||||||
|
dump += s.dumpJSON(res.Body)
|
||||||
|
} else if strings.Contains(res.ContentType, "text/xml") {
|
||||||
|
dump += s.dumpXML(res.Body)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(s.output, "\n%s\n", dump)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EventsStream) viewHttpEvent(e session.Event) {
|
||||||
|
if e.Tag == "net.sniff.http.request" {
|
||||||
|
s.viewHttpRequest(e)
|
||||||
|
} else if e.Tag == "net.sniff.http.response" {
|
||||||
|
s.viewHttpResponse(e)
|
||||||
|
}
|
||||||
|
}
|
|
@ -30,6 +30,6 @@ func NewSnifferEvent(t time.Time, proto string, src string, dst string, data int
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e SnifferEvent) Push() {
|
func (e SnifferEvent) Push() {
|
||||||
session.I.Events.Add("net.sniff.leak."+e.Protocol, e)
|
session.I.Events.Add("net.sniff."+e.Protocol, e)
|
||||||
session.I.Refresh()
|
session.I.Refresh()
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,64 +3,145 @@ package modules
|
||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"compress/gzip"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"strings"
|
||||||
|
|
||||||
"github.com/bettercap/bettercap/core"
|
"github.com/bettercap/bettercap/core"
|
||||||
|
|
||||||
"github.com/google/gopacket"
|
"github.com/google/gopacket"
|
||||||
"github.com/google/gopacket/layers"
|
"github.com/google/gopacket/layers"
|
||||||
|
|
||||||
|
"github.com/dustin/go-humanize"
|
||||||
)
|
)
|
||||||
|
|
||||||
type HTTPRequest struct {
|
type HTTPRequest struct {
|
||||||
Method string `json:"method"`
|
Method string `json:"method"`
|
||||||
Host string `json:"host"`
|
Proto string `json:"proto"`
|
||||||
URL string `json:"url:"`
|
Host string `json:"host"`
|
||||||
Headers http.Header `json:"headers"`
|
URL string `json:"url:"`
|
||||||
Form url.Values `json:"form"`
|
Headers http.Header `json:"headers"`
|
||||||
Body []byte `json:"body"`
|
ContentType string `json:"content_type"`
|
||||||
|
Body []byte `json:"body"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type HTTPResponse struct {
|
||||||
|
Protocol string `json:"protocol"`
|
||||||
|
Status string `json:"status"`
|
||||||
|
StatusCode int `json:"status_code"`
|
||||||
|
Headers http.Header `json:"headers"`
|
||||||
|
Body []byte `json:"body"`
|
||||||
|
ContentLength int64 `json:"content_length"`
|
||||||
|
ContentType string `json:"content_type"`
|
||||||
|
TransferEncoding []string `json:"transfer_encoding"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func toSerializableRequest(req *http.Request) HTTPRequest {
|
func toSerializableRequest(req *http.Request) HTTPRequest {
|
||||||
body := []byte(nil)
|
body := []byte(nil)
|
||||||
form := (url.Values)(nil)
|
ctype := "?"
|
||||||
|
if req.Body != nil {
|
||||||
if err := req.ParseForm(); err == nil {
|
|
||||||
form = req.Form
|
|
||||||
} else if req.Body != nil {
|
|
||||||
body, _ = ioutil.ReadAll(req.Body)
|
body, _ = ioutil.ReadAll(req.Body)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for name, values := range req.Header {
|
||||||
|
if strings.ToLower(name) == "content-type" {
|
||||||
|
for _, value := range values {
|
||||||
|
ctype = value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return HTTPRequest{
|
return HTTPRequest{
|
||||||
Method: req.Method,
|
Method: req.Method,
|
||||||
Host: req.Host,
|
Proto: req.Proto,
|
||||||
URL: req.URL.String(),
|
Host: req.Host,
|
||||||
Headers: req.Header,
|
URL: req.URL.String(),
|
||||||
Form: form,
|
Headers: req.Header,
|
||||||
Body: body,
|
ContentType: ctype,
|
||||||
|
Body: body,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func toSerializableResponse(res *http.Response) HTTPResponse {
|
||||||
|
body := []byte(nil)
|
||||||
|
ctype := "?"
|
||||||
|
cenc := ""
|
||||||
|
for name, values := range res.Header {
|
||||||
|
name = strings.ToLower(name)
|
||||||
|
if name == "content-type" {
|
||||||
|
for _, value := range values {
|
||||||
|
ctype = value
|
||||||
|
}
|
||||||
|
} else if name == "content-encoding" {
|
||||||
|
for _, value := range values {
|
||||||
|
cenc = value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if res.Body != nil {
|
||||||
|
body, _ = ioutil.ReadAll(res.Body)
|
||||||
|
}
|
||||||
|
|
||||||
|
// attempt decompression, but since this has been parsed by just
|
||||||
|
// a tcp packet, it will probably fail
|
||||||
|
if body != nil && strings.Contains(cenc, "gzip") {
|
||||||
|
buffer := bytes.NewBuffer(body)
|
||||||
|
uncompressed := bytes.Buffer{}
|
||||||
|
if reader, err := gzip.NewReader(buffer); err == nil {
|
||||||
|
if _, err = uncompressed.ReadFrom(reader); err == nil {
|
||||||
|
body = uncompressed.Bytes()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return HTTPResponse{
|
||||||
|
Protocol: res.Proto,
|
||||||
|
Status: res.Status,
|
||||||
|
StatusCode: res.StatusCode,
|
||||||
|
Headers: res.Header,
|
||||||
|
Body: body,
|
||||||
|
ContentLength: res.ContentLength,
|
||||||
|
ContentType: ctype,
|
||||||
|
TransferEncoding: res.TransferEncoding,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func httpParser(ip *layers.IPv4, pkt gopacket.Packet, tcp *layers.TCP) bool {
|
func httpParser(ip *layers.IPv4, pkt gopacket.Packet, tcp *layers.TCP) bool {
|
||||||
data := tcp.Payload
|
data := tcp.Payload
|
||||||
reader := bufio.NewReader(bytes.NewReader(data))
|
if req, err := http.ReadRequest(bufio.NewReader(bytes.NewReader(data))); err == nil {
|
||||||
req, err := http.ReadRequest(reader)
|
|
||||||
|
|
||||||
if err == nil {
|
|
||||||
NewSnifferEvent(
|
NewSnifferEvent(
|
||||||
pkt.Metadata().Timestamp,
|
pkt.Metadata().Timestamp,
|
||||||
"http",
|
"http.request",
|
||||||
ip.SrcIP.String(),
|
ip.SrcIP.String(),
|
||||||
req.Host,
|
req.Host,
|
||||||
toSerializableRequest(req),
|
toSerializableRequest(req),
|
||||||
"%s %s %s %s%s %s",
|
"%s %s %s %s%s",
|
||||||
core.W(core.BG_RED+core.FG_BLACK, "http"),
|
core.W(core.BG_RED+core.FG_BLACK, "http"),
|
||||||
vIP(ip.SrcIP),
|
vIP(ip.SrcIP),
|
||||||
core.W(core.BG_LBLUE+core.FG_BLACK, req.Method),
|
core.W(core.BG_LBLUE+core.FG_BLACK, req.Method),
|
||||||
core.Yellow(req.Host),
|
core.Yellow(req.Host),
|
||||||
vURL(req.URL.String()),
|
vURL(req.URL.String()),
|
||||||
core.Dim(req.UserAgent()),
|
).Push()
|
||||||
|
|
||||||
|
return true
|
||||||
|
} else if res, err := http.ReadResponse(bufio.NewReader(bytes.NewReader(data)), nil); err == nil {
|
||||||
|
sres := toSerializableResponse(res)
|
||||||
|
NewSnifferEvent(
|
||||||
|
pkt.Metadata().Timestamp,
|
||||||
|
"http.response",
|
||||||
|
ip.SrcIP.String(),
|
||||||
|
ip.DstIP.String(),
|
||||||
|
sres,
|
||||||
|
"%s %s:%d %s -> %s (%s %s)",
|
||||||
|
core.W(core.BG_RED+core.FG_BLACK, "http"),
|
||||||
|
vIP(ip.SrcIP),
|
||||||
|
tcp.SrcPort,
|
||||||
|
core.Bold(res.Status),
|
||||||
|
vIP(ip.DstIP),
|
||||||
|
core.Dim(humanize.Bytes(uint64(len(sres.Body)))),
|
||||||
|
core.Yellow(sres.ContentType),
|
||||||
).Push()
|
).Push()
|
||||||
|
|
||||||
return true
|
return true
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue