From b5596d54d232ae773c6e28c23f0c32f0a18680d5 Mon Sep 17 00:00:00 2001 From: evilsocket Date: Sun, 1 Apr 2018 14:27:33 +0200 Subject: [PATCH] new: supporting wildcard expressions in dns.spoof module (closes #215) --- glide.lock | 12 +- modules/dns_spoof.go | 25 +- vendor/github.com/gobwas/glob/.gitignore | 8 + vendor/github.com/gobwas/glob/.travis.yml | 11 + vendor/github.com/gobwas/glob/LICENSE | 21 + vendor/github.com/gobwas/glob/bench.sh | 26 + .../gobwas/glob/cmd/globdraw/main.go | 44 ++ .../gobwas/glob/cmd/globtest/main.go | 82 +++ .../gobwas/glob/compiler/compiler.go | 525 +++++++++++++++ .../gobwas/glob/compiler/compiler_test.go | 624 ++++++++++++++++++ vendor/github.com/gobwas/glob/glob.go | 80 +++ vendor/github.com/gobwas/glob/glob_test.go | 527 +++++++++++++++ vendor/github.com/gobwas/glob/match/any.go | 45 ++ vendor/github.com/gobwas/glob/match/any_of.go | 82 +++ .../gobwas/glob/match/any_of_test.go | 53 ++ .../github.com/gobwas/glob/match/any_test.go | 57 ++ vendor/github.com/gobwas/glob/match/btree.go | 185 ++++++ .../gobwas/glob/match/btree_test.go | 90 +++ .../github.com/gobwas/glob/match/contains.go | 58 ++ .../gobwas/glob/match/contains_test.go | 74 +++ .../gobwas/glob/match/debug/debug.go | 55 ++ .../github.com/gobwas/glob/match/every_of.go | 99 +++ .../gobwas/glob/match/every_of_test.go | 45 ++ vendor/github.com/gobwas/glob/match/list.go | 49 ++ .../github.com/gobwas/glob/match/list_test.go | 58 ++ vendor/github.com/gobwas/glob/match/match.go | 81 +++ .../gobwas/glob/match/match_test.go | 90 +++ vendor/github.com/gobwas/glob/match/max.go | 49 ++ .../github.com/gobwas/glob/match/max_test.go | 57 ++ vendor/github.com/gobwas/glob/match/min.go | 57 ++ .../github.com/gobwas/glob/match/min_test.go | 57 ++ .../github.com/gobwas/glob/match/nothing.go | 27 + .../gobwas/glob/match/nothing_test.go | 54 ++ vendor/github.com/gobwas/glob/match/prefix.go | 50 ++ .../gobwas/glob/match/prefix_any.go | 55 ++ .../gobwas/glob/match/prefix_any_test.go | 47 ++ .../gobwas/glob/match/prefix_suffix.go | 62 ++ .../gobwas/glob/match/prefix_suffix_test.go | 67 ++ .../gobwas/glob/match/prefix_test.go | 57 ++ vendor/github.com/gobwas/glob/match/range.go | 48 ++ .../gobwas/glob/match/range_test.go | 67 ++ vendor/github.com/gobwas/glob/match/row.go | 77 +++ .../github.com/gobwas/glob/match/row_test.go | 82 +++ .../github.com/gobwas/glob/match/segments.go | 91 +++ .../gobwas/glob/match/segments_test.go | 83 +++ vendor/github.com/gobwas/glob/match/single.go | 43 ++ .../gobwas/glob/match/single_test.go | 57 ++ vendor/github.com/gobwas/glob/match/suffix.go | 35 + .../gobwas/glob/match/suffix_any.go | 43 ++ .../gobwas/glob/match/suffix_any_test.go | 47 ++ .../gobwas/glob/match/suffix_test.go | 57 ++ vendor/github.com/gobwas/glob/match/super.go | 33 + .../gobwas/glob/match/super_test.go | 54 ++ vendor/github.com/gobwas/glob/match/text.go | 45 ++ .../github.com/gobwas/glob/match/text_test.go | 57 ++ vendor/github.com/gobwas/glob/readme.md | 148 +++++ .../github.com/gobwas/glob/syntax/ast/ast.go | 122 ++++ .../gobwas/glob/syntax/ast/parser.go | 157 +++++ .../gobwas/glob/syntax/ast/parser_test.go | 218 ++++++ .../gobwas/glob/syntax/lexer/lexer.go | 273 ++++++++ .../gobwas/glob/syntax/lexer/lexer_test.go | 192 ++++++ .../gobwas/glob/syntax/lexer/token.go | 88 +++ .../github.com/gobwas/glob/syntax/syntax.go | 14 + .../gobwas/glob/util/runes/runes.go | 154 +++++ .../gobwas/glob/util/runes/runes_test.go | 222 +++++++ .../gobwas/glob/util/strings/strings.go | 39 ++ 66 files changed, 6281 insertions(+), 10 deletions(-) create mode 100644 vendor/github.com/gobwas/glob/.gitignore create mode 100644 vendor/github.com/gobwas/glob/.travis.yml create mode 100644 vendor/github.com/gobwas/glob/LICENSE create mode 100755 vendor/github.com/gobwas/glob/bench.sh create mode 100644 vendor/github.com/gobwas/glob/cmd/globdraw/main.go create mode 100644 vendor/github.com/gobwas/glob/cmd/globtest/main.go create mode 100644 vendor/github.com/gobwas/glob/compiler/compiler.go create mode 100644 vendor/github.com/gobwas/glob/compiler/compiler_test.go create mode 100644 vendor/github.com/gobwas/glob/glob.go create mode 100644 vendor/github.com/gobwas/glob/glob_test.go create mode 100644 vendor/github.com/gobwas/glob/match/any.go create mode 100644 vendor/github.com/gobwas/glob/match/any_of.go create mode 100644 vendor/github.com/gobwas/glob/match/any_of_test.go create mode 100644 vendor/github.com/gobwas/glob/match/any_test.go create mode 100644 vendor/github.com/gobwas/glob/match/btree.go create mode 100644 vendor/github.com/gobwas/glob/match/btree_test.go create mode 100644 vendor/github.com/gobwas/glob/match/contains.go create mode 100644 vendor/github.com/gobwas/glob/match/contains_test.go create mode 100644 vendor/github.com/gobwas/glob/match/debug/debug.go create mode 100644 vendor/github.com/gobwas/glob/match/every_of.go create mode 100644 vendor/github.com/gobwas/glob/match/every_of_test.go create mode 100644 vendor/github.com/gobwas/glob/match/list.go create mode 100644 vendor/github.com/gobwas/glob/match/list_test.go create mode 100644 vendor/github.com/gobwas/glob/match/match.go create mode 100644 vendor/github.com/gobwas/glob/match/match_test.go create mode 100644 vendor/github.com/gobwas/glob/match/max.go create mode 100644 vendor/github.com/gobwas/glob/match/max_test.go create mode 100644 vendor/github.com/gobwas/glob/match/min.go create mode 100644 vendor/github.com/gobwas/glob/match/min_test.go create mode 100644 vendor/github.com/gobwas/glob/match/nothing.go create mode 100644 vendor/github.com/gobwas/glob/match/nothing_test.go create mode 100644 vendor/github.com/gobwas/glob/match/prefix.go create mode 100644 vendor/github.com/gobwas/glob/match/prefix_any.go create mode 100644 vendor/github.com/gobwas/glob/match/prefix_any_test.go create mode 100644 vendor/github.com/gobwas/glob/match/prefix_suffix.go create mode 100644 vendor/github.com/gobwas/glob/match/prefix_suffix_test.go create mode 100644 vendor/github.com/gobwas/glob/match/prefix_test.go create mode 100644 vendor/github.com/gobwas/glob/match/range.go create mode 100644 vendor/github.com/gobwas/glob/match/range_test.go create mode 100644 vendor/github.com/gobwas/glob/match/row.go create mode 100644 vendor/github.com/gobwas/glob/match/row_test.go create mode 100644 vendor/github.com/gobwas/glob/match/segments.go create mode 100644 vendor/github.com/gobwas/glob/match/segments_test.go create mode 100644 vendor/github.com/gobwas/glob/match/single.go create mode 100644 vendor/github.com/gobwas/glob/match/single_test.go create mode 100644 vendor/github.com/gobwas/glob/match/suffix.go create mode 100644 vendor/github.com/gobwas/glob/match/suffix_any.go create mode 100644 vendor/github.com/gobwas/glob/match/suffix_any_test.go create mode 100644 vendor/github.com/gobwas/glob/match/suffix_test.go create mode 100644 vendor/github.com/gobwas/glob/match/super.go create mode 100644 vendor/github.com/gobwas/glob/match/super_test.go create mode 100644 vendor/github.com/gobwas/glob/match/text.go create mode 100644 vendor/github.com/gobwas/glob/match/text_test.go create mode 100644 vendor/github.com/gobwas/glob/readme.md create mode 100644 vendor/github.com/gobwas/glob/syntax/ast/ast.go create mode 100644 vendor/github.com/gobwas/glob/syntax/ast/parser.go create mode 100644 vendor/github.com/gobwas/glob/syntax/ast/parser_test.go create mode 100644 vendor/github.com/gobwas/glob/syntax/lexer/lexer.go create mode 100644 vendor/github.com/gobwas/glob/syntax/lexer/lexer_test.go create mode 100644 vendor/github.com/gobwas/glob/syntax/lexer/token.go create mode 100644 vendor/github.com/gobwas/glob/syntax/syntax.go create mode 100644 vendor/github.com/gobwas/glob/util/runes/runes.go create mode 100644 vendor/github.com/gobwas/glob/util/runes/runes_test.go create mode 100644 vendor/github.com/gobwas/glob/util/strings/strings.go diff --git a/glide.lock b/glide.lock index 7a98beb8..cc7d9833 100644 --- a/glide.lock +++ b/glide.lock @@ -1,5 +1,5 @@ hash: fbde0d2452ce166fdbca5d911aa533bef347f420e172f0801a98b90ec6ccf9be -updated: 2018-03-28T13:41:06.165992863+02:00 +updated: 2018-04-01T14:15:20.005813196+02:00 imports: - name: github.com/adrianmo/go-nmea version: 22095aa1b48050243d3eb9a001ca80eb91a0c6fa @@ -25,6 +25,16 @@ imports: - name: github.com/elazarl/goproxy version: a96fa3a318260eab29abaf32f7128c9eb07fb073 vcs: git +- name: github.com/gobwas/glob + version: 19c076cdf202b3d1c0489bdfa2f2f289f634474b + subpackages: + - compiler + - match + - syntax + - syntax/ast + - syntax/lexer + - util/runes + - util/strings - name: github.com/google/go-github version: e48060a28fac52d0f1cb758bc8b87c07bac4a87d subpackages: diff --git a/modules/dns_spoof.go b/modules/dns_spoof.go index 07efa32c..e32c239d 100644 --- a/modules/dns_spoof.go +++ b/modules/dns_spoof.go @@ -4,7 +4,6 @@ import ( "bytes" "fmt" "net" - "strings" "sync" "github.com/bettercap/bettercap/core" @@ -15,12 +14,14 @@ import ( "github.com/google/gopacket" "github.com/google/gopacket/layers" "github.com/google/gopacket/pcap" + + "github.com/gobwas/glob" ) type DNSSpoofer struct { session.SessionModule Handle *pcap.Handle - Domains []string + Domains []glob.Glob Address net.IP All bool waitGroup *sync.WaitGroup @@ -32,6 +33,7 @@ func NewDNSSpoofer(s *session.Session) *DNSSpoofer { SessionModule: session.NewSessionModule("dns.spoof", s), Handle: nil, All: false, + Domains: make([]glob.Glob, 0), waitGroup: &sync.WaitGroup{}, } @@ -79,6 +81,7 @@ func (s DNSSpoofer) Author() string { func (s *DNSSpoofer) Configure() error { var err error var addr string + var domains []string if s.Running() { return session.ErrAlreadyStarted @@ -97,10 +100,18 @@ func (s *DNSSpoofer) Configure() error { return err } - if err, s.Domains = s.ListParam("dns.spoof.domains"); err != nil { + if err, domains = s.ListParam("dns.spoof.domains"); err != nil { return err } + for _, domain := range domains { + if expr, err := glob.Compile(domain); err != nil { + return fmt.Errorf("'%s' is not a valid domain glob expression: %s", domain, err) + } else { + s.Domains = append(s.Domains, expr) + } + } + if err, addr = s.StringParam("dns.spoof.address"); err != nil { return err } @@ -232,12 +243,8 @@ func (s *DNSSpoofer) dnsReply(pkt gopacket.Packet, peth *layers.Ethernet, pudp * } func (s *DNSSpoofer) shouldSpoof(domain string) bool { - if len(s.Domains) == 1 && s.Domains[0] == "*" { - return true - } - - for _, d := range s.Domains { - if strings.HasSuffix(domain, d) == true { + for _, expr := range s.Domains { + if expr.Match(domain) { return true } } diff --git a/vendor/github.com/gobwas/glob/.gitignore b/vendor/github.com/gobwas/glob/.gitignore new file mode 100644 index 00000000..b4ae623b --- /dev/null +++ b/vendor/github.com/gobwas/glob/.gitignore @@ -0,0 +1,8 @@ +glob.iml +.idea +*.cpu +*.mem +*.test +*.dot +*.png +*.svg diff --git a/vendor/github.com/gobwas/glob/.travis.yml b/vendor/github.com/gobwas/glob/.travis.yml new file mode 100644 index 00000000..95e8fd2f --- /dev/null +++ b/vendor/github.com/gobwas/glob/.travis.yml @@ -0,0 +1,11 @@ +sudo: false + +language: go + +go: + - 1.7 + - 1.8 + - 1.9 + +script: + - go test -v ./... diff --git a/vendor/github.com/gobwas/glob/LICENSE b/vendor/github.com/gobwas/glob/LICENSE new file mode 100644 index 00000000..9d4735ca --- /dev/null +++ b/vendor/github.com/gobwas/glob/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Sergey Kamardin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/gobwas/glob/bench.sh b/vendor/github.com/gobwas/glob/bench.sh new file mode 100755 index 00000000..804cf22e --- /dev/null +++ b/vendor/github.com/gobwas/glob/bench.sh @@ -0,0 +1,26 @@ +#! /bin/bash + +bench() { + filename="/tmp/$1-$2.bench" + if test -e "${filename}"; + then + echo "Already exists ${filename}" + else + backup=`git rev-parse --abbrev-ref HEAD` + git checkout $1 + echo -n "Creating ${filename}... " + go test ./... -run=NONE -bench=$2 > "${filename}" -benchmem + echo "OK" + git checkout ${backup} + sleep 5 + fi +} + + +to=$1 +current=`git rev-parse --abbrev-ref HEAD` + +bench ${to} $2 +bench ${current} $2 + +benchcmp $3 "/tmp/${to}-$2.bench" "/tmp/${current}-$2.bench" diff --git a/vendor/github.com/gobwas/glob/cmd/globdraw/main.go b/vendor/github.com/gobwas/glob/cmd/globdraw/main.go new file mode 100644 index 00000000..585880db --- /dev/null +++ b/vendor/github.com/gobwas/glob/cmd/globdraw/main.go @@ -0,0 +1,44 @@ +package main + +import ( + "flag" + "fmt" + "github.com/gobwas/glob" + "github.com/gobwas/glob/match" + "github.com/gobwas/glob/match/debug" + "os" + "strings" + "unicode/utf8" +) + +func main() { + pattern := flag.String("p", "", "pattern to draw") + sep := flag.String("s", "", "comma separated list of separators characters") + flag.Parse() + + if *pattern == "" { + flag.Usage() + os.Exit(1) + } + + var separators []rune + if len(*sep) > 0 { + for _, c := range strings.Split(*sep, ",") { + if r, w := utf8.DecodeRuneInString(c); len(c) > w { + fmt.Println("only single charactered separators are allowed") + os.Exit(1) + } else { + separators = append(separators, r) + } + } + } + + glob, err := glob.Compile(*pattern, separators...) + if err != nil { + fmt.Println("could not compile pattern:", err) + os.Exit(1) + } + + matcher := glob.(match.Matcher) + fmt.Fprint(os.Stdout, debug.Graphviz(*pattern, matcher)) +} diff --git a/vendor/github.com/gobwas/glob/cmd/globtest/main.go b/vendor/github.com/gobwas/glob/cmd/globtest/main.go new file mode 100644 index 00000000..95c102f7 --- /dev/null +++ b/vendor/github.com/gobwas/glob/cmd/globtest/main.go @@ -0,0 +1,82 @@ +package main + +import ( + "flag" + "fmt" + "github.com/gobwas/glob" + "os" + "strings" + "testing" + "unicode/utf8" +) + +func benchString(r testing.BenchmarkResult) string { + nsop := r.NsPerOp() + ns := fmt.Sprintf("%10d ns/op", nsop) + allocs := "0" + if r.N > 0 { + if nsop < 100 { + // The format specifiers here make sure that + // the ones digits line up for all three possible formats. + if nsop < 10 { + ns = fmt.Sprintf("%13.2f ns/op", float64(r.T.Nanoseconds())/float64(r.N)) + } else { + ns = fmt.Sprintf("%12.1f ns/op", float64(r.T.Nanoseconds())/float64(r.N)) + } + } + + allocs = fmt.Sprintf("%d", r.MemAllocs/uint64(r.N)) + } + + return fmt.Sprintf("%8d\t%s\t%s allocs", r.N, ns, allocs) +} + +func main() { + pattern := flag.String("p", "", "pattern to draw") + sep := flag.String("s", "", "comma separated list of separators") + fixture := flag.String("f", "", "fixture") + verbose := flag.Bool("v", false, "verbose") + flag.Parse() + + if *pattern == "" { + flag.Usage() + os.Exit(1) + } + + var separators []rune + for _, c := range strings.Split(*sep, ",") { + if r, w := utf8.DecodeRuneInString(c); len(c) > w { + fmt.Println("only single charactered separators are allowed") + os.Exit(1) + } else { + separators = append(separators, r) + } + } + + g, err := glob.Compile(*pattern, separators...) + if err != nil { + fmt.Println("could not compile pattern:", err) + os.Exit(1) + } + + if !*verbose { + fmt.Println(g.Match(*fixture)) + return + } + + fmt.Printf("result: %t\n", g.Match(*fixture)) + + cb := testing.Benchmark(func(b *testing.B) { + for i := 0; i < b.N; i++ { + glob.Compile(*pattern, separators...) + } + }) + fmt.Println("compile:", benchString(cb)) + + mb := testing.Benchmark(func(b *testing.B) { + for i := 0; i < b.N; i++ { + g.Match(*fixture) + } + }) + fmt.Println("match: ", benchString(mb)) +} diff --git a/vendor/github.com/gobwas/glob/compiler/compiler.go b/vendor/github.com/gobwas/glob/compiler/compiler.go new file mode 100644 index 00000000..02e7de80 --- /dev/null +++ b/vendor/github.com/gobwas/glob/compiler/compiler.go @@ -0,0 +1,525 @@ +package compiler + +// TODO use constructor with all matchers, and to their structs private +// TODO glue multiple Text nodes (like after QuoteMeta) + +import ( + "fmt" + "reflect" + + "github.com/gobwas/glob/match" + "github.com/gobwas/glob/syntax/ast" + "github.com/gobwas/glob/util/runes" +) + +func optimizeMatcher(matcher match.Matcher) match.Matcher { + switch m := matcher.(type) { + + case match.Any: + if len(m.Separators) == 0 { + return match.NewSuper() + } + + case match.AnyOf: + if len(m.Matchers) == 1 { + return m.Matchers[0] + } + + return m + + case match.List: + if m.Not == false && len(m.List) == 1 { + return match.NewText(string(m.List)) + } + + return m + + case match.BTree: + m.Left = optimizeMatcher(m.Left) + m.Right = optimizeMatcher(m.Right) + + r, ok := m.Value.(match.Text) + if !ok { + return m + } + + var ( + leftNil = m.Left == nil + rightNil = m.Right == nil + ) + if leftNil && rightNil { + return match.NewText(r.Str) + } + + _, leftSuper := m.Left.(match.Super) + lp, leftPrefix := m.Left.(match.Prefix) + la, leftAny := m.Left.(match.Any) + + _, rightSuper := m.Right.(match.Super) + rs, rightSuffix := m.Right.(match.Suffix) + ra, rightAny := m.Right.(match.Any) + + switch { + case leftSuper && rightSuper: + return match.NewContains(r.Str, false) + + case leftSuper && rightNil: + return match.NewSuffix(r.Str) + + case rightSuper && leftNil: + return match.NewPrefix(r.Str) + + case leftNil && rightSuffix: + return match.NewPrefixSuffix(r.Str, rs.Suffix) + + case rightNil && leftPrefix: + return match.NewPrefixSuffix(lp.Prefix, r.Str) + + case rightNil && leftAny: + return match.NewSuffixAny(r.Str, la.Separators) + + case leftNil && rightAny: + return match.NewPrefixAny(r.Str, ra.Separators) + } + + return m + } + + return matcher +} + +func compileMatchers(matchers []match.Matcher) (match.Matcher, error) { + if len(matchers) == 0 { + return nil, fmt.Errorf("compile error: need at least one matcher") + } + if len(matchers) == 1 { + return matchers[0], nil + } + if m := glueMatchers(matchers); m != nil { + return m, nil + } + + idx := -1 + maxLen := -1 + var val match.Matcher + for i, matcher := range matchers { + if l := matcher.Len(); l != -1 && l >= maxLen { + maxLen = l + idx = i + val = matcher + } + } + + if val == nil { // not found matcher with static length + r, err := compileMatchers(matchers[1:]) + if err != nil { + return nil, err + } + return match.NewBTree(matchers[0], nil, r), nil + } + + left := matchers[:idx] + var right []match.Matcher + if len(matchers) > idx+1 { + right = matchers[idx+1:] + } + + var l, r match.Matcher + var err error + if len(left) > 0 { + l, err = compileMatchers(left) + if err != nil { + return nil, err + } + } + + if len(right) > 0 { + r, err = compileMatchers(right) + if err != nil { + return nil, err + } + } + + return match.NewBTree(val, l, r), nil +} + +func glueMatchers(matchers []match.Matcher) match.Matcher { + if m := glueMatchersAsEvery(matchers); m != nil { + return m + } + if m := glueMatchersAsRow(matchers); m != nil { + return m + } + return nil +} + +func glueMatchersAsRow(matchers []match.Matcher) match.Matcher { + if len(matchers) <= 1 { + return nil + } + + var ( + c []match.Matcher + l int + ) + for _, matcher := range matchers { + if ml := matcher.Len(); ml == -1 { + return nil + } else { + c = append(c, matcher) + l += ml + } + } + return match.NewRow(l, c...) +} + +func glueMatchersAsEvery(matchers []match.Matcher) match.Matcher { + if len(matchers) <= 1 { + return nil + } + + var ( + hasAny bool + hasSuper bool + hasSingle bool + min int + separator []rune + ) + + for i, matcher := range matchers { + var sep []rune + + switch m := matcher.(type) { + case match.Super: + sep = []rune{} + hasSuper = true + + case match.Any: + sep = m.Separators + hasAny = true + + case match.Single: + sep = m.Separators + hasSingle = true + min++ + + case match.List: + if !m.Not { + return nil + } + sep = m.List + hasSingle = true + min++ + + default: + return nil + } + + // initialize + if i == 0 { + separator = sep + } + + if runes.Equal(sep, separator) { + continue + } + + return nil + } + + if hasSuper && !hasAny && !hasSingle { + return match.NewSuper() + } + + if hasAny && !hasSuper && !hasSingle { + return match.NewAny(separator) + } + + if (hasAny || hasSuper) && min > 0 && len(separator) == 0 { + return match.NewMin(min) + } + + every := match.NewEveryOf() + + if min > 0 { + every.Add(match.NewMin(min)) + + if !hasAny && !hasSuper { + every.Add(match.NewMax(min)) + } + } + + if len(separator) > 0 { + every.Add(match.NewContains(string(separator), true)) + } + + return every +} + +func minimizeMatchers(matchers []match.Matcher) []match.Matcher { + var done match.Matcher + var left, right, count int + + for l := 0; l < len(matchers); l++ { + for r := len(matchers); r > l; r-- { + if glued := glueMatchers(matchers[l:r]); glued != nil { + var swap bool + + if done == nil { + swap = true + } else { + cl, gl := done.Len(), glued.Len() + swap = cl > -1 && gl > -1 && gl > cl + swap = swap || count < r-l + } + + if swap { + done = glued + left = l + right = r + count = r - l + } + } + } + } + + if done == nil { + return matchers + } + + next := append(append([]match.Matcher{}, matchers[:left]...), done) + if right < len(matchers) { + next = append(next, matchers[right:]...) + } + + if len(next) == len(matchers) { + return next + } + + return minimizeMatchers(next) +} + +// minimizeAnyOf tries to apply some heuristics to minimize number of nodes in given tree +func minimizeTree(tree *ast.Node) *ast.Node { + switch tree.Kind { + case ast.KindAnyOf: + return minimizeTreeAnyOf(tree) + default: + return nil + } +} + +// minimizeAnyOf tries to find common children of given node of AnyOf pattern +// it searches for common children from left and from right +// if any common children are found – then it returns new optimized ast tree +// else it returns nil +func minimizeTreeAnyOf(tree *ast.Node) *ast.Node { + if !areOfSameKind(tree.Children, ast.KindPattern) { + return nil + } + + commonLeft, commonRight := commonChildren(tree.Children) + commonLeftCount, commonRightCount := len(commonLeft), len(commonRight) + if commonLeftCount == 0 && commonRightCount == 0 { // there are no common parts + return nil + } + + var result []*ast.Node + if commonLeftCount > 0 { + result = append(result, ast.NewNode(ast.KindPattern, nil, commonLeft...)) + } + + var anyOf []*ast.Node + for _, child := range tree.Children { + reuse := child.Children[commonLeftCount : len(child.Children)-commonRightCount] + var node *ast.Node + if len(reuse) == 0 { + // this pattern is completely reduced by commonLeft and commonRight patterns + // so it become nothing + node = ast.NewNode(ast.KindNothing, nil) + } else { + node = ast.NewNode(ast.KindPattern, nil, reuse...) + } + anyOf = appendIfUnique(anyOf, node) + } + switch { + case len(anyOf) == 1 && anyOf[0].Kind != ast.KindNothing: + result = append(result, anyOf[0]) + case len(anyOf) > 1: + result = append(result, ast.NewNode(ast.KindAnyOf, nil, anyOf...)) + } + + if commonRightCount > 0 { + result = append(result, ast.NewNode(ast.KindPattern, nil, commonRight...)) + } + + return ast.NewNode(ast.KindPattern, nil, result...) +} + +func commonChildren(nodes []*ast.Node) (commonLeft, commonRight []*ast.Node) { + if len(nodes) <= 1 { + return + } + + // find node that has least number of children + idx := leastChildren(nodes) + if idx == -1 { + return + } + tree := nodes[idx] + treeLength := len(tree.Children) + + // allocate max able size for rightCommon slice + // to get ability insert elements in reverse order (from end to start) + // without sorting + commonRight = make([]*ast.Node, treeLength) + lastRight := treeLength // will use this to get results as commonRight[lastRight:] + + var ( + breakLeft bool + breakRight bool + commonTotal int + ) + for i, j := 0, treeLength-1; commonTotal < treeLength && j >= 0 && !(breakLeft && breakRight); i, j = i+1, j-1 { + treeLeft := tree.Children[i] + treeRight := tree.Children[j] + + for k := 0; k < len(nodes) && !(breakLeft && breakRight); k++ { + // skip least children node + if k == idx { + continue + } + + restLeft := nodes[k].Children[i] + restRight := nodes[k].Children[j+len(nodes[k].Children)-treeLength] + + breakLeft = breakLeft || !treeLeft.Equal(restLeft) + + // disable searching for right common parts, if left part is already overlapping + breakRight = breakRight || (!breakLeft && j <= i) + breakRight = breakRight || !treeRight.Equal(restRight) + } + + if !breakLeft { + commonTotal++ + commonLeft = append(commonLeft, treeLeft) + } + if !breakRight { + commonTotal++ + lastRight = j + commonRight[j] = treeRight + } + } + + commonRight = commonRight[lastRight:] + + return +} + +func appendIfUnique(target []*ast.Node, val *ast.Node) []*ast.Node { + for _, n := range target { + if reflect.DeepEqual(n, val) { + return target + } + } + return append(target, val) +} + +func areOfSameKind(nodes []*ast.Node, kind ast.Kind) bool { + for _, n := range nodes { + if n.Kind != kind { + return false + } + } + return true +} + +func leastChildren(nodes []*ast.Node) int { + min := -1 + idx := -1 + for i, n := range nodes { + if idx == -1 || (len(n.Children) < min) { + min = len(n.Children) + idx = i + } + } + return idx +} + +func compileTreeChildren(tree *ast.Node, sep []rune) ([]match.Matcher, error) { + var matchers []match.Matcher + for _, desc := range tree.Children { + m, err := compile(desc, sep) + if err != nil { + return nil, err + } + matchers = append(matchers, optimizeMatcher(m)) + } + return matchers, nil +} + +func compile(tree *ast.Node, sep []rune) (m match.Matcher, err error) { + switch tree.Kind { + case ast.KindAnyOf: + // todo this could be faster on pattern_alternatives_combine_lite (see glob_test.go) + if n := minimizeTree(tree); n != nil { + return compile(n, sep) + } + matchers, err := compileTreeChildren(tree, sep) + if err != nil { + return nil, err + } + return match.NewAnyOf(matchers...), nil + + case ast.KindPattern: + if len(tree.Children) == 0 { + return match.NewNothing(), nil + } + matchers, err := compileTreeChildren(tree, sep) + if err != nil { + return nil, err + } + m, err = compileMatchers(minimizeMatchers(matchers)) + if err != nil { + return nil, err + } + + case ast.KindAny: + m = match.NewAny(sep) + + case ast.KindSuper: + m = match.NewSuper() + + case ast.KindSingle: + m = match.NewSingle(sep) + + case ast.KindNothing: + m = match.NewNothing() + + case ast.KindList: + l := tree.Value.(ast.List) + m = match.NewList([]rune(l.Chars), l.Not) + + case ast.KindRange: + r := tree.Value.(ast.Range) + m = match.NewRange(r.Lo, r.Hi, r.Not) + + case ast.KindText: + t := tree.Value.(ast.Text) + m = match.NewText(t.Text) + + default: + return nil, fmt.Errorf("could not compile tree: unknown node type") + } + + return optimizeMatcher(m), nil +} + +func Compile(tree *ast.Node, sep []rune) (match.Matcher, error) { + m, err := compile(tree, sep) + if err != nil { + return nil, err + } + + return m, nil +} diff --git a/vendor/github.com/gobwas/glob/compiler/compiler_test.go b/vendor/github.com/gobwas/glob/compiler/compiler_test.go new file mode 100644 index 00000000..b58b1ebc --- /dev/null +++ b/vendor/github.com/gobwas/glob/compiler/compiler_test.go @@ -0,0 +1,624 @@ +package compiler + +import ( + "github.com/gobwas/glob/match" + "github.com/gobwas/glob/match/debug" + "github.com/gobwas/glob/syntax/ast" + "reflect" + "testing" +) + +var separators = []rune{'.'} + +func TestCommonChildren(t *testing.T) { + for i, test := range []struct { + nodes []*ast.Node + left []*ast.Node + right []*ast.Node + }{ + { + nodes: []*ast.Node{ + ast.NewNode(ast.KindNothing, nil, + ast.NewNode(ast.KindText, ast.Text{"a"}), + ast.NewNode(ast.KindText, ast.Text{"z"}), + ast.NewNode(ast.KindText, ast.Text{"c"}), + ), + }, + }, + { + nodes: []*ast.Node{ + ast.NewNode(ast.KindNothing, nil, + ast.NewNode(ast.KindText, ast.Text{"a"}), + ast.NewNode(ast.KindText, ast.Text{"z"}), + ast.NewNode(ast.KindText, ast.Text{"c"}), + ), + ast.NewNode(ast.KindNothing, nil, + ast.NewNode(ast.KindText, ast.Text{"a"}), + ast.NewNode(ast.KindText, ast.Text{"b"}), + ast.NewNode(ast.KindText, ast.Text{"c"}), + ), + }, + left: []*ast.Node{ + ast.NewNode(ast.KindText, ast.Text{"a"}), + }, + right: []*ast.Node{ + ast.NewNode(ast.KindText, ast.Text{"c"}), + }, + }, + { + nodes: []*ast.Node{ + ast.NewNode(ast.KindNothing, nil, + ast.NewNode(ast.KindText, ast.Text{"a"}), + ast.NewNode(ast.KindText, ast.Text{"b"}), + ast.NewNode(ast.KindText, ast.Text{"c"}), + ast.NewNode(ast.KindText, ast.Text{"d"}), + ), + ast.NewNode(ast.KindNothing, nil, + ast.NewNode(ast.KindText, ast.Text{"a"}), + ast.NewNode(ast.KindText, ast.Text{"b"}), + ast.NewNode(ast.KindText, ast.Text{"c"}), + ast.NewNode(ast.KindText, ast.Text{"c"}), + ast.NewNode(ast.KindText, ast.Text{"d"}), + ), + }, + left: []*ast.Node{ + ast.NewNode(ast.KindText, ast.Text{"a"}), + ast.NewNode(ast.KindText, ast.Text{"b"}), + }, + right: []*ast.Node{ + ast.NewNode(ast.KindText, ast.Text{"c"}), + ast.NewNode(ast.KindText, ast.Text{"d"}), + }, + }, + { + nodes: []*ast.Node{ + ast.NewNode(ast.KindNothing, nil, + ast.NewNode(ast.KindText, ast.Text{"a"}), + ast.NewNode(ast.KindText, ast.Text{"b"}), + ast.NewNode(ast.KindText, ast.Text{"c"}), + ), + ast.NewNode(ast.KindNothing, nil, + ast.NewNode(ast.KindText, ast.Text{"a"}), + ast.NewNode(ast.KindText, ast.Text{"b"}), + ast.NewNode(ast.KindText, ast.Text{"b"}), + ast.NewNode(ast.KindText, ast.Text{"c"}), + ), + }, + left: []*ast.Node{ + ast.NewNode(ast.KindText, ast.Text{"a"}), + ast.NewNode(ast.KindText, ast.Text{"b"}), + }, + right: []*ast.Node{ + ast.NewNode(ast.KindText, ast.Text{"c"}), + }, + }, + { + nodes: []*ast.Node{ + ast.NewNode(ast.KindNothing, nil, + ast.NewNode(ast.KindText, ast.Text{"a"}), + ast.NewNode(ast.KindText, ast.Text{"d"}), + ), + ast.NewNode(ast.KindNothing, nil, + ast.NewNode(ast.KindText, ast.Text{"a"}), + ast.NewNode(ast.KindText, ast.Text{"d"}), + ), + ast.NewNode(ast.KindNothing, nil, + ast.NewNode(ast.KindText, ast.Text{"a"}), + ast.NewNode(ast.KindText, ast.Text{"e"}), + ), + }, + left: []*ast.Node{ + ast.NewNode(ast.KindText, ast.Text{"a"}), + }, + right: []*ast.Node{}, + }, + } { + left, right := commonChildren(test.nodes) + if !nodesEqual(left, test.left) { + t.Errorf("[%d] left, right := commonChildren(); left = %v; want %v", i, left, test.left) + } + if !nodesEqual(right, test.right) { + t.Errorf("[%d] left, right := commonChildren(); right = %v; want %v", i, right, test.right) + } + } +} + +func nodesEqual(a, b []*ast.Node) bool { + if len(a) != len(b) { + return false + } + for i, av := range a { + if !av.Equal(b[i]) { + return false + } + } + return true +} + +func TestGlueMatchers(t *testing.T) { + for id, test := range []struct { + in []match.Matcher + exp match.Matcher + }{ + { + []match.Matcher{ + match.NewSuper(), + match.NewSingle(nil), + }, + match.NewMin(1), + }, + { + []match.Matcher{ + match.NewAny(separators), + match.NewSingle(separators), + }, + match.EveryOf{match.Matchers{ + match.NewMin(1), + match.NewContains(string(separators), true), + }}, + }, + { + []match.Matcher{ + match.NewSingle(nil), + match.NewSingle(nil), + match.NewSingle(nil), + }, + match.EveryOf{match.Matchers{ + match.NewMin(3), + match.NewMax(3), + }}, + }, + { + []match.Matcher{ + match.NewList([]rune{'a'}, true), + match.NewAny([]rune{'a'}), + }, + match.EveryOf{match.Matchers{ + match.NewMin(1), + match.NewContains("a", true), + }}, + }, + } { + act, err := compileMatchers(test.in) + if err != nil { + t.Errorf("#%d convert matchers error: %s", id, err) + continue + } + + if !reflect.DeepEqual(act, test.exp) { + t.Errorf("#%d unexpected convert matchers result:\nact: %#v;\nexp: %#v", id, act, test.exp) + continue + } + } +} + +func TestCompileMatchers(t *testing.T) { + for id, test := range []struct { + in []match.Matcher + exp match.Matcher + }{ + { + []match.Matcher{ + match.NewSuper(), + match.NewSingle(separators), + match.NewText("c"), + }, + match.NewBTree( + match.NewText("c"), + match.NewBTree( + match.NewSingle(separators), + match.NewSuper(), + nil, + ), + nil, + ), + }, + { + []match.Matcher{ + match.NewAny(nil), + match.NewText("c"), + match.NewAny(nil), + }, + match.NewBTree( + match.NewText("c"), + match.NewAny(nil), + match.NewAny(nil), + ), + }, + { + []match.Matcher{ + match.NewRange('a', 'c', true), + match.NewList([]rune{'z', 't', 'e'}, false), + match.NewText("c"), + match.NewSingle(nil), + }, + match.NewRow( + 4, + match.Matchers{ + match.NewRange('a', 'c', true), + match.NewList([]rune{'z', 't', 'e'}, false), + match.NewText("c"), + match.NewSingle(nil), + }..., + ), + }, + } { + act, err := compileMatchers(test.in) + if err != nil { + t.Errorf("#%d convert matchers error: %s", id, err) + continue + } + + if !reflect.DeepEqual(act, test.exp) { + t.Errorf("#%d unexpected convert matchers result:\nact: %#v\nexp: %#v", id, act, test.exp) + continue + } + } +} + +func TestConvertMatchers(t *testing.T) { + for id, test := range []struct { + in, exp []match.Matcher + }{ + { + []match.Matcher{ + match.NewRange('a', 'c', true), + match.NewList([]rune{'z', 't', 'e'}, false), + match.NewText("c"), + match.NewSingle(nil), + match.NewAny(nil), + }, + []match.Matcher{ + match.NewRow( + 4, + []match.Matcher{ + match.NewRange('a', 'c', true), + match.NewList([]rune{'z', 't', 'e'}, false), + match.NewText("c"), + match.NewSingle(nil), + }..., + ), + match.NewAny(nil), + }, + }, + { + []match.Matcher{ + match.NewRange('a', 'c', true), + match.NewList([]rune{'z', 't', 'e'}, false), + match.NewText("c"), + match.NewSingle(nil), + match.NewAny(nil), + match.NewSingle(nil), + match.NewSingle(nil), + match.NewAny(nil), + }, + []match.Matcher{ + match.NewRow( + 3, + match.Matchers{ + match.NewRange('a', 'c', true), + match.NewList([]rune{'z', 't', 'e'}, false), + match.NewText("c"), + }..., + ), + match.NewMin(3), + }, + }, + } { + act := minimizeMatchers(test.in) + if !reflect.DeepEqual(act, test.exp) { + t.Errorf("#%d unexpected convert matchers 2 result:\nact: %#v\nexp: %#v", id, act, test.exp) + continue + } + } +} + +func TestCompiler(t *testing.T) { + for id, test := range []struct { + ast *ast.Node + result match.Matcher + sep []rune + }{ + { + ast: ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindText, ast.Text{"abc"}), + ), + result: match.NewText("abc"), + }, + { + ast: ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindAny, nil), + ), + sep: separators, + result: match.NewAny(separators), + }, + { + ast: ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindAny, nil), + ), + result: match.NewSuper(), + }, + { + ast: ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindSuper, nil), + ), + result: match.NewSuper(), + }, + { + ast: ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindSingle, nil), + ), + sep: separators, + result: match.NewSingle(separators), + }, + { + ast: ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindRange, ast.Range{ + Lo: 'a', + Hi: 'z', + Not: true, + }), + ), + result: match.NewRange('a', 'z', true), + }, + { + ast: ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindList, ast.List{ + Chars: "abc", + Not: true, + }), + ), + result: match.NewList([]rune{'a', 'b', 'c'}, true), + }, + { + ast: ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindAny, nil), + ast.NewNode(ast.KindSingle, nil), + ast.NewNode(ast.KindSingle, nil), + ast.NewNode(ast.KindSingle, nil), + ), + sep: separators, + result: match.EveryOf{Matchers: match.Matchers{ + match.NewMin(3), + match.NewContains(string(separators), true), + }}, + }, + { + ast: ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindAny, nil), + ast.NewNode(ast.KindSingle, nil), + ast.NewNode(ast.KindSingle, nil), + ast.NewNode(ast.KindSingle, nil), + ), + result: match.NewMin(3), + }, + { + ast: ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindAny, nil), + ast.NewNode(ast.KindText, ast.Text{"abc"}), + ast.NewNode(ast.KindSingle, nil), + ), + sep: separators, + result: match.NewBTree( + match.NewRow( + 4, + match.Matchers{ + match.NewText("abc"), + match.NewSingle(separators), + }..., + ), + match.NewAny(separators), + nil, + ), + }, + { + ast: ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindText, ast.Text{"/"}), + ast.NewNode(ast.KindAnyOf, nil, + ast.NewNode(ast.KindText, ast.Text{"z"}), + ast.NewNode(ast.KindText, ast.Text{"ab"}), + ), + ast.NewNode(ast.KindSuper, nil), + ), + sep: separators, + result: match.NewBTree( + match.NewText("/"), + nil, + match.NewBTree( + match.NewAnyOf(match.NewText("z"), match.NewText("ab")), + nil, + match.NewSuper(), + ), + ), + }, + { + ast: ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindSuper, nil), + ast.NewNode(ast.KindSingle, nil), + ast.NewNode(ast.KindText, ast.Text{"abc"}), + ast.NewNode(ast.KindSingle, nil), + ), + sep: separators, + result: match.NewBTree( + match.NewRow( + 5, + match.Matchers{ + match.NewSingle(separators), + match.NewText("abc"), + match.NewSingle(separators), + }..., + ), + match.NewSuper(), + nil, + ), + }, + { + ast: ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindAny, nil), + ast.NewNode(ast.KindText, ast.Text{"abc"}), + ), + result: match.NewSuffix("abc"), + }, + { + ast: ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindText, ast.Text{"abc"}), + ast.NewNode(ast.KindAny, nil), + ), + result: match.NewPrefix("abc"), + }, + { + ast: ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindText, ast.Text{"abc"}), + ast.NewNode(ast.KindAny, nil), + ast.NewNode(ast.KindText, ast.Text{"def"}), + ), + result: match.NewPrefixSuffix("abc", "def"), + }, + { + ast: ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindAny, nil), + ast.NewNode(ast.KindAny, nil), + ast.NewNode(ast.KindAny, nil), + ast.NewNode(ast.KindText, ast.Text{"abc"}), + ast.NewNode(ast.KindAny, nil), + ast.NewNode(ast.KindAny, nil), + ), + result: match.NewContains("abc", false), + }, + { + ast: ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindAny, nil), + ast.NewNode(ast.KindAny, nil), + ast.NewNode(ast.KindAny, nil), + ast.NewNode(ast.KindText, ast.Text{"abc"}), + ast.NewNode(ast.KindAny, nil), + ast.NewNode(ast.KindAny, nil), + ), + sep: separators, + result: match.NewBTree( + match.NewText("abc"), + match.NewAny(separators), + match.NewAny(separators), + ), + }, + { + ast: ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindSuper, nil), + ast.NewNode(ast.KindSingle, nil), + ast.NewNode(ast.KindText, ast.Text{"abc"}), + ast.NewNode(ast.KindSuper, nil), + ast.NewNode(ast.KindSingle, nil), + ), + result: match.NewBTree( + match.NewText("abc"), + match.NewMin(1), + match.NewMin(1), + ), + }, + { + ast: ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindText, ast.Text{"abc"}), + ), + result: match.NewText("abc"), + }, + { + ast: ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindAnyOf, nil, + ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindAnyOf, nil, + ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindText, ast.Text{"abc"}), + ), + ), + ), + ), + ), + result: match.NewText("abc"), + }, + { + ast: ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindAnyOf, nil, + ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindText, ast.Text{"abc"}), + ast.NewNode(ast.KindSingle, nil), + ), + ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindText, ast.Text{"abc"}), + ast.NewNode(ast.KindList, ast.List{Chars: "def"}), + ), + ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindText, ast.Text{"abc"}), + ), + ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindText, ast.Text{"abc"}), + ), + ), + ), + result: match.NewBTree( + match.NewText("abc"), + nil, + match.AnyOf{Matchers: match.Matchers{ + match.NewSingle(nil), + match.NewList([]rune{'d', 'e', 'f'}, false), + match.NewNothing(), + }}, + ), + }, + { + ast: ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindRange, ast.Range{Lo: 'a', Hi: 'z'}), + ast.NewNode(ast.KindRange, ast.Range{Lo: 'a', Hi: 'x', Not: true}), + ast.NewNode(ast.KindAny, nil), + ), + result: match.NewBTree( + match.NewRow( + 2, + match.Matchers{ + match.NewRange('a', 'z', false), + match.NewRange('a', 'x', true), + }..., + ), + nil, + match.NewSuper(), + ), + }, + { + ast: ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindAnyOf, nil, + ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindText, ast.Text{"abc"}), + ast.NewNode(ast.KindList, ast.List{Chars: "abc"}), + ast.NewNode(ast.KindText, ast.Text{"ghi"}), + ), + ast.NewNode(ast.KindPattern, nil, + ast.NewNode(ast.KindText, ast.Text{"abc"}), + ast.NewNode(ast.KindList, ast.List{Chars: "def"}), + ast.NewNode(ast.KindText, ast.Text{"ghi"}), + ), + ), + ), + result: match.NewRow( + 7, + match.Matchers{ + match.NewText("abc"), + match.AnyOf{Matchers: match.Matchers{ + match.NewList([]rune{'a', 'b', 'c'}, false), + match.NewList([]rune{'d', 'e', 'f'}, false), + }}, + match.NewText("ghi"), + }..., + ), + }, + } { + m, err := Compile(test.ast, test.sep) + if err != nil { + t.Errorf("compilation error: %s", err) + continue + } + + if !reflect.DeepEqual(m, test.result) { + t.Errorf("[%d] Compile():\nexp: %#v\nact: %#v\n\ngraphviz:\nexp:\n%s\nact:\n%s\n", id, test.result, m, debug.Graphviz("", test.result.(match.Matcher)), debug.Graphviz("", m.(match.Matcher))) + continue + } + } +} diff --git a/vendor/github.com/gobwas/glob/glob.go b/vendor/github.com/gobwas/glob/glob.go new file mode 100644 index 00000000..2afde343 --- /dev/null +++ b/vendor/github.com/gobwas/glob/glob.go @@ -0,0 +1,80 @@ +package glob + +import ( + "github.com/gobwas/glob/compiler" + "github.com/gobwas/glob/syntax" +) + +// Glob represents compiled glob pattern. +type Glob interface { + Match(string) bool +} + +// Compile creates Glob for given pattern and strings (if any present after pattern) as separators. +// The pattern syntax is: +// +// pattern: +// { term } +// +// term: +// `*` matches any sequence of non-separator characters +// `**` matches any sequence of characters +// `?` matches any single non-separator character +// `[` [ `!` ] { character-range } `]` +// character class (must be non-empty) +// `{` pattern-list `}` +// pattern alternatives +// c matches character c (c != `*`, `**`, `?`, `\`, `[`, `{`, `}`) +// `\` c matches character c +// +// character-range: +// c matches character c (c != `\\`, `-`, `]`) +// `\` c matches character c +// lo `-` hi matches character c for lo <= c <= hi +// +// pattern-list: +// pattern { `,` pattern } +// comma-separated (without spaces) patterns +// +func Compile(pattern string, separators ...rune) (Glob, error) { + ast, err := syntax.Parse(pattern) + if err != nil { + return nil, err + } + + matcher, err := compiler.Compile(ast, separators) + if err != nil { + return nil, err + } + + return matcher, nil +} + +// MustCompile is the same as Compile, except that if Compile returns error, this will panic +func MustCompile(pattern string, separators ...rune) Glob { + g, err := Compile(pattern, separators...) + if err != nil { + panic(err) + } + + return g +} + +// QuoteMeta returns a string that quotes all glob pattern meta characters +// inside the argument text; For example, QuoteMeta(`{foo*}`) returns `\[foo\*\]`. +func QuoteMeta(s string) string { + b := make([]byte, 2*len(s)) + + // a byte loop is correct because all meta characters are ASCII + j := 0 + for i := 0; i < len(s); i++ { + if syntax.Special(s[i]) { + b[j] = '\\' + j++ + } + b[j] = s[i] + j++ + } + + return string(b[0:j]) +} diff --git a/vendor/github.com/gobwas/glob/glob_test.go b/vendor/github.com/gobwas/glob/glob_test.go new file mode 100644 index 00000000..98934513 --- /dev/null +++ b/vendor/github.com/gobwas/glob/glob_test.go @@ -0,0 +1,527 @@ +package glob + +import ( + "regexp" + "testing" +) + +const ( + pattern_all = "[a-z][!a-x]*cat*[h][!b]*eyes*" + regexp_all = `^[a-z][^a-x].*cat.*[h][^b].*eyes.*$` + fixture_all_match = "my cat has very bright eyes" + fixture_all_mismatch = "my dog has very bright eyes" + + pattern_plain = "google.com" + regexp_plain = `^google\.com$` + fixture_plain_match = "google.com" + fixture_plain_mismatch = "gobwas.com" + + pattern_multiple = "https://*.google.*" + regexp_multiple = `^https:\/\/.*\.google\..*$` + fixture_multiple_match = "https://account.google.com" + fixture_multiple_mismatch = "https://google.com" + + pattern_alternatives = "{https://*.google.*,*yandex.*,*yahoo.*,*mail.ru}" + regexp_alternatives = `^(https:\/\/.*\.google\..*|.*yandex\..*|.*yahoo\..*|.*mail\.ru)$` + fixture_alternatives_match = "http://yahoo.com" + fixture_alternatives_mismatch = "http://google.com" + + pattern_alternatives_suffix = "{https://*gobwas.com,http://exclude.gobwas.com}" + regexp_alternatives_suffix = `^(https:\/\/.*gobwas\.com|http://exclude.gobwas.com)$` + fixture_alternatives_suffix_first_match = "https://safe.gobwas.com" + fixture_alternatives_suffix_first_mismatch = "http://safe.gobwas.com" + fixture_alternatives_suffix_second = "http://exclude.gobwas.com" + + pattern_prefix = "abc*" + regexp_prefix = `^abc.*$` + pattern_suffix = "*def" + regexp_suffix = `^.*def$` + pattern_prefix_suffix = "ab*ef" + regexp_prefix_suffix = `^ab.*ef$` + fixture_prefix_suffix_match = "abcdef" + fixture_prefix_suffix_mismatch = "af" + + pattern_alternatives_combine_lite = "{abc*def,abc?def,abc[zte]def}" + regexp_alternatives_combine_lite = `^(abc.*def|abc.def|abc[zte]def)$` + fixture_alternatives_combine_lite = "abczdef" + + pattern_alternatives_combine_hard = "{abc*[a-c]def,abc?[d-g]def,abc[zte]?def}" + regexp_alternatives_combine_hard = `^(abc.*[a-c]def|abc.[d-g]def|abc[zte].def)$` + fixture_alternatives_combine_hard = "abczqdef" +) + +type test struct { + pattern, match string + should bool + delimiters []rune +} + +func glob(s bool, p, m string, d ...rune) test { + return test{p, m, s, d} +} + +func TestGlob(t *testing.T) { + for _, test := range []test{ + glob(true, "* ?at * eyes", "my cat has very bright eyes"), + + glob(true, "", ""), + glob(false, "", "b"), + + glob(true, "*ä", "åä"), + glob(true, "abc", "abc"), + glob(true, "a*c", "abc"), + glob(true, "a*c", "a12345c"), + glob(true, "a?c", "a1c"), + glob(true, "a.b", "a.b", '.'), + glob(true, "a.*", "a.b", '.'), + glob(true, "a.**", "a.b.c", '.'), + glob(true, "a.?.c", "a.b.c", '.'), + glob(true, "a.?.?", "a.b.c", '.'), + glob(true, "?at", "cat"), + glob(true, "?at", "fat"), + glob(true, "*", "abc"), + glob(true, `\*`, "*"), + glob(true, "**", "a.b.c", '.'), + + glob(false, "?at", "at"), + glob(false, "?at", "fat", 'f'), + glob(false, "a.*", "a.b.c", '.'), + glob(false, "a.?.c", "a.bb.c", '.'), + glob(false, "*", "a.b.c", '.'), + + glob(true, "*test", "this is a test"), + glob(true, "this*", "this is a test"), + glob(true, "*is *", "this is a test"), + glob(true, "*is*a*", "this is a test"), + glob(true, "**test**", "this is a test"), + glob(true, "**is**a***test*", "this is a test"), + + glob(false, "*is", "this is a test"), + glob(false, "*no*", "this is a test"), + glob(true, "[!a]*", "this is a test3"), + + glob(true, "*abc", "abcabc"), + glob(true, "**abc", "abcabc"), + glob(true, "???", "abc"), + glob(true, "?*?", "abc"), + glob(true, "?*?", "ac"), + glob(false, "sta", "stagnation"), + glob(true, "sta*", "stagnation"), + glob(false, "sta?", "stagnation"), + glob(false, "sta?n", "stagnation"), + + glob(true, "{abc,def}ghi", "defghi"), + glob(true, "{abc,abcd}a", "abcda"), + glob(true, "{a,ab}{bc,f}", "abc"), + glob(true, "{*,**}{a,b}", "ab"), + glob(false, "{*,**}{a,b}", "ac"), + + glob(true, "/{rate,[a-z][a-z][a-z]}*", "/rate"), + glob(true, "/{rate,[0-9][0-9][0-9]}*", "/rate"), + glob(true, "/{rate,[a-z][a-z][a-z]}*", "/usd"), + + glob(true, "{*.google.*,*.yandex.*}", "www.google.com", '.'), + glob(true, "{*.google.*,*.yandex.*}", "www.yandex.com", '.'), + glob(false, "{*.google.*,*.yandex.*}", "yandex.com", '.'), + glob(false, "{*.google.*,*.yandex.*}", "google.com", '.'), + + glob(true, "{*.google.*,yandex.*}", "www.google.com", '.'), + glob(true, "{*.google.*,yandex.*}", "yandex.com", '.'), + glob(false, "{*.google.*,yandex.*}", "www.yandex.com", '.'), + glob(false, "{*.google.*,yandex.*}", "google.com", '.'), + + glob(true, pattern_all, fixture_all_match), + glob(false, pattern_all, fixture_all_mismatch), + + glob(true, pattern_plain, fixture_plain_match), + glob(false, pattern_plain, fixture_plain_mismatch), + + glob(true, pattern_multiple, fixture_multiple_match), + glob(false, pattern_multiple, fixture_multiple_mismatch), + + glob(true, pattern_alternatives, fixture_alternatives_match), + glob(false, pattern_alternatives, fixture_alternatives_mismatch), + + glob(true, pattern_alternatives_suffix, fixture_alternatives_suffix_first_match), + glob(false, pattern_alternatives_suffix, fixture_alternatives_suffix_first_mismatch), + glob(true, pattern_alternatives_suffix, fixture_alternatives_suffix_second), + + glob(true, pattern_alternatives_combine_hard, fixture_alternatives_combine_hard), + + glob(true, pattern_alternatives_combine_lite, fixture_alternatives_combine_lite), + + glob(true, pattern_prefix, fixture_prefix_suffix_match), + glob(false, pattern_prefix, fixture_prefix_suffix_mismatch), + + glob(true, pattern_suffix, fixture_prefix_suffix_match), + glob(false, pattern_suffix, fixture_prefix_suffix_mismatch), + + glob(true, pattern_prefix_suffix, fixture_prefix_suffix_match), + glob(false, pattern_prefix_suffix, fixture_prefix_suffix_mismatch), + } { + t.Run("", func(t *testing.T) { + g := MustCompile(test.pattern, test.delimiters...) + result := g.Match(test.match) + if result != test.should { + t.Errorf( + "pattern %q matching %q should be %v but got %v\n%s", + test.pattern, test.match, test.should, result, g, + ) + } + }) + } +} + +func TestQuoteMeta(t *testing.T) { + for id, test := range []struct { + in, out string + }{ + { + in: `[foo*]`, + out: `\[foo\*\]`, + }, + { + in: `{foo*}`, + out: `\{foo\*\}`, + }, + { + in: `*?\[]{}`, + out: `\*\?\\\[\]\{\}`, + }, + { + in: `some text and *?\[]{}`, + out: `some text and \*\?\\\[\]\{\}`, + }, + } { + act := QuoteMeta(test.in) + if act != test.out { + t.Errorf("#%d QuoteMeta(%q) = %q; want %q", id, test.in, act, test.out) + } + if _, err := Compile(act); err != nil { + t.Errorf("#%d _, err := Compile(QuoteMeta(%q) = %q); err = %q", id, test.in, act, err) + } + } +} + +func BenchmarkParseGlob(b *testing.B) { + for i := 0; i < b.N; i++ { + Compile(pattern_all) + } +} +func BenchmarkParseRegexp(b *testing.B) { + for i := 0; i < b.N; i++ { + regexp.MustCompile(regexp_all) + } +} + +func BenchmarkAllGlobMatch(b *testing.B) { + m, _ := Compile(pattern_all) + + for i := 0; i < b.N; i++ { + _ = m.Match(fixture_all_match) + } +} +func BenchmarkAllGlobMatchParallel(b *testing.B) { + m, _ := Compile(pattern_all) + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + _ = m.Match(fixture_all_match) + } + }) +} + +func BenchmarkAllRegexpMatch(b *testing.B) { + m := regexp.MustCompile(regexp_all) + f := []byte(fixture_all_match) + + for i := 0; i < b.N; i++ { + _ = m.Match(f) + } +} +func BenchmarkAllGlobMismatch(b *testing.B) { + m, _ := Compile(pattern_all) + + for i := 0; i < b.N; i++ { + _ = m.Match(fixture_all_mismatch) + } +} +func BenchmarkAllGlobMismatchParallel(b *testing.B) { + m, _ := Compile(pattern_all) + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + _ = m.Match(fixture_all_mismatch) + } + }) +} +func BenchmarkAllRegexpMismatch(b *testing.B) { + m := regexp.MustCompile(regexp_all) + f := []byte(fixture_all_mismatch) + + for i := 0; i < b.N; i++ { + _ = m.Match(f) + } +} + +func BenchmarkMultipleGlobMatch(b *testing.B) { + m, _ := Compile(pattern_multiple) + + for i := 0; i < b.N; i++ { + _ = m.Match(fixture_multiple_match) + } +} +func BenchmarkMultipleRegexpMatch(b *testing.B) { + m := regexp.MustCompile(regexp_multiple) + f := []byte(fixture_multiple_match) + + for i := 0; i < b.N; i++ { + _ = m.Match(f) + } +} +func BenchmarkMultipleGlobMismatch(b *testing.B) { + m, _ := Compile(pattern_multiple) + + for i := 0; i < b.N; i++ { + _ = m.Match(fixture_multiple_mismatch) + } +} +func BenchmarkMultipleRegexpMismatch(b *testing.B) { + m := regexp.MustCompile(regexp_multiple) + f := []byte(fixture_multiple_mismatch) + + for i := 0; i < b.N; i++ { + _ = m.Match(f) + } +} + +func BenchmarkAlternativesGlobMatch(b *testing.B) { + m, _ := Compile(pattern_alternatives) + + for i := 0; i < b.N; i++ { + _ = m.Match(fixture_alternatives_match) + } +} +func BenchmarkAlternativesGlobMismatch(b *testing.B) { + m, _ := Compile(pattern_alternatives) + + for i := 0; i < b.N; i++ { + _ = m.Match(fixture_alternatives_mismatch) + } +} +func BenchmarkAlternativesRegexpMatch(b *testing.B) { + m := regexp.MustCompile(regexp_alternatives) + f := []byte(fixture_alternatives_match) + + for i := 0; i < b.N; i++ { + _ = m.Match(f) + } +} +func BenchmarkAlternativesRegexpMismatch(b *testing.B) { + m := regexp.MustCompile(regexp_alternatives) + f := []byte(fixture_alternatives_mismatch) + + for i := 0; i < b.N; i++ { + _ = m.Match(f) + } +} + +func BenchmarkAlternativesSuffixFirstGlobMatch(b *testing.B) { + m, _ := Compile(pattern_alternatives_suffix) + + for i := 0; i < b.N; i++ { + _ = m.Match(fixture_alternatives_suffix_first_match) + } +} +func BenchmarkAlternativesSuffixFirstGlobMismatch(b *testing.B) { + m, _ := Compile(pattern_alternatives_suffix) + + for i := 0; i < b.N; i++ { + _ = m.Match(fixture_alternatives_suffix_first_mismatch) + } +} +func BenchmarkAlternativesSuffixSecondGlobMatch(b *testing.B) { + m, _ := Compile(pattern_alternatives_suffix) + + for i := 0; i < b.N; i++ { + _ = m.Match(fixture_alternatives_suffix_second) + } +} +func BenchmarkAlternativesCombineLiteGlobMatch(b *testing.B) { + m, _ := Compile(pattern_alternatives_combine_lite) + + for i := 0; i < b.N; i++ { + _ = m.Match(fixture_alternatives_combine_lite) + } +} +func BenchmarkAlternativesCombineHardGlobMatch(b *testing.B) { + m, _ := Compile(pattern_alternatives_combine_hard) + + for i := 0; i < b.N; i++ { + _ = m.Match(fixture_alternatives_combine_hard) + } +} +func BenchmarkAlternativesSuffixFirstRegexpMatch(b *testing.B) { + m := regexp.MustCompile(regexp_alternatives_suffix) + f := []byte(fixture_alternatives_suffix_first_match) + + for i := 0; i < b.N; i++ { + _ = m.Match(f) + } +} +func BenchmarkAlternativesSuffixFirstRegexpMismatch(b *testing.B) { + m := regexp.MustCompile(regexp_alternatives_suffix) + f := []byte(fixture_alternatives_suffix_first_mismatch) + + for i := 0; i < b.N; i++ { + _ = m.Match(f) + } +} +func BenchmarkAlternativesSuffixSecondRegexpMatch(b *testing.B) { + m := regexp.MustCompile(regexp_alternatives_suffix) + f := []byte(fixture_alternatives_suffix_second) + + for i := 0; i < b.N; i++ { + _ = m.Match(f) + } +} +func BenchmarkAlternativesCombineLiteRegexpMatch(b *testing.B) { + m := regexp.MustCompile(regexp_alternatives_combine_lite) + f := []byte(fixture_alternatives_combine_lite) + + for i := 0; i < b.N; i++ { + _ = m.Match(f) + } +} +func BenchmarkAlternativesCombineHardRegexpMatch(b *testing.B) { + m := regexp.MustCompile(regexp_alternatives_combine_hard) + f := []byte(fixture_alternatives_combine_hard) + + for i := 0; i < b.N; i++ { + _ = m.Match(f) + } +} + +func BenchmarkPlainGlobMatch(b *testing.B) { + m, _ := Compile(pattern_plain) + + for i := 0; i < b.N; i++ { + _ = m.Match(fixture_plain_match) + } +} +func BenchmarkPlainRegexpMatch(b *testing.B) { + m := regexp.MustCompile(regexp_plain) + f := []byte(fixture_plain_match) + + for i := 0; i < b.N; i++ { + _ = m.Match(f) + } +} +func BenchmarkPlainGlobMismatch(b *testing.B) { + m, _ := Compile(pattern_plain) + + for i := 0; i < b.N; i++ { + _ = m.Match(fixture_plain_mismatch) + } +} +func BenchmarkPlainRegexpMismatch(b *testing.B) { + m := regexp.MustCompile(regexp_plain) + f := []byte(fixture_plain_mismatch) + + for i := 0; i < b.N; i++ { + _ = m.Match(f) + } +} + +func BenchmarkPrefixGlobMatch(b *testing.B) { + m, _ := Compile(pattern_prefix) + + for i := 0; i < b.N; i++ { + _ = m.Match(fixture_prefix_suffix_match) + } +} +func BenchmarkPrefixRegexpMatch(b *testing.B) { + m := regexp.MustCompile(regexp_prefix) + f := []byte(fixture_prefix_suffix_match) + + for i := 0; i < b.N; i++ { + _ = m.Match(f) + } +} +func BenchmarkPrefixGlobMismatch(b *testing.B) { + m, _ := Compile(pattern_prefix) + + for i := 0; i < b.N; i++ { + _ = m.Match(fixture_prefix_suffix_mismatch) + } +} +func BenchmarkPrefixRegexpMismatch(b *testing.B) { + m := regexp.MustCompile(regexp_prefix) + f := []byte(fixture_prefix_suffix_mismatch) + + for i := 0; i < b.N; i++ { + _ = m.Match(f) + } +} + +func BenchmarkSuffixGlobMatch(b *testing.B) { + m, _ := Compile(pattern_suffix) + + for i := 0; i < b.N; i++ { + _ = m.Match(fixture_prefix_suffix_match) + } +} +func BenchmarkSuffixRegexpMatch(b *testing.B) { + m := regexp.MustCompile(regexp_suffix) + f := []byte(fixture_prefix_suffix_match) + + for i := 0; i < b.N; i++ { + _ = m.Match(f) + } +} +func BenchmarkSuffixGlobMismatch(b *testing.B) { + m, _ := Compile(pattern_suffix) + + for i := 0; i < b.N; i++ { + _ = m.Match(fixture_prefix_suffix_mismatch) + } +} +func BenchmarkSuffixRegexpMismatch(b *testing.B) { + m := regexp.MustCompile(regexp_suffix) + f := []byte(fixture_prefix_suffix_mismatch) + + for i := 0; i < b.N; i++ { + _ = m.Match(f) + } +} + +func BenchmarkPrefixSuffixGlobMatch(b *testing.B) { + m, _ := Compile(pattern_prefix_suffix) + + for i := 0; i < b.N; i++ { + _ = m.Match(fixture_prefix_suffix_match) + } +} +func BenchmarkPrefixSuffixRegexpMatch(b *testing.B) { + m := regexp.MustCompile(regexp_prefix_suffix) + f := []byte(fixture_prefix_suffix_match) + + for i := 0; i < b.N; i++ { + _ = m.Match(f) + } +} +func BenchmarkPrefixSuffixGlobMismatch(b *testing.B) { + m, _ := Compile(pattern_prefix_suffix) + + for i := 0; i < b.N; i++ { + _ = m.Match(fixture_prefix_suffix_mismatch) + } +} +func BenchmarkPrefixSuffixRegexpMismatch(b *testing.B) { + m := regexp.MustCompile(regexp_prefix_suffix) + f := []byte(fixture_prefix_suffix_mismatch) + + for i := 0; i < b.N; i++ { + _ = m.Match(f) + } +} diff --git a/vendor/github.com/gobwas/glob/match/any.go b/vendor/github.com/gobwas/glob/match/any.go new file mode 100644 index 00000000..514a9a5c --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/any.go @@ -0,0 +1,45 @@ +package match + +import ( + "fmt" + "github.com/gobwas/glob/util/strings" +) + +type Any struct { + Separators []rune +} + +func NewAny(s []rune) Any { + return Any{s} +} + +func (self Any) Match(s string) bool { + return strings.IndexAnyRunes(s, self.Separators) == -1 +} + +func (self Any) Index(s string) (int, []int) { + found := strings.IndexAnyRunes(s, self.Separators) + switch found { + case -1: + case 0: + return 0, segments0 + default: + s = s[:found] + } + + segments := acquireSegments(len(s)) + for i := range s { + segments = append(segments, i) + } + segments = append(segments, len(s)) + + return 0, segments +} + +func (self Any) Len() int { + return lenNo +} + +func (self Any) String() string { + return fmt.Sprintf("", string(self.Separators)) +} diff --git a/vendor/github.com/gobwas/glob/match/any_of.go b/vendor/github.com/gobwas/glob/match/any_of.go new file mode 100644 index 00000000..8e65356c --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/any_of.go @@ -0,0 +1,82 @@ +package match + +import "fmt" + +type AnyOf struct { + Matchers Matchers +} + +func NewAnyOf(m ...Matcher) AnyOf { + return AnyOf{Matchers(m)} +} + +func (self *AnyOf) Add(m Matcher) error { + self.Matchers = append(self.Matchers, m) + return nil +} + +func (self AnyOf) Match(s string) bool { + for _, m := range self.Matchers { + if m.Match(s) { + return true + } + } + + return false +} + +func (self AnyOf) Index(s string) (int, []int) { + index := -1 + + segments := acquireSegments(len(s)) + for _, m := range self.Matchers { + idx, seg := m.Index(s) + if idx == -1 { + continue + } + + if index == -1 || idx < index { + index = idx + segments = append(segments[:0], seg...) + continue + } + + if idx > index { + continue + } + + // here idx == index + segments = appendMerge(segments, seg) + } + + if index == -1 { + releaseSegments(segments) + return -1, nil + } + + return index, segments +} + +func (self AnyOf) Len() (l int) { + l = -1 + for _, m := range self.Matchers { + ml := m.Len() + switch { + case l == -1: + l = ml + continue + + case ml == -1: + return -1 + + case l != ml: + return -1 + } + } + + return +} + +func (self AnyOf) String() string { + return fmt.Sprintf("", self.Matchers) +} diff --git a/vendor/github.com/gobwas/glob/match/any_of_test.go b/vendor/github.com/gobwas/glob/match/any_of_test.go new file mode 100644 index 00000000..3b478cf5 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/any_of_test.go @@ -0,0 +1,53 @@ +package match + +import ( + "reflect" + "testing" +) + +func TestAnyOfIndex(t *testing.T) { + for id, test := range []struct { + matchers Matchers + fixture string + index int + segments []int + }{ + { + Matchers{ + NewAny(nil), + NewText("b"), + NewText("c"), + }, + "abc", + 0, + []int{0, 1, 2, 3}, + }, + { + Matchers{ + NewPrefix("b"), + NewSuffix("c"), + }, + "abc", + 0, + []int{3}, + }, + { + Matchers{ + NewList([]rune("[def]"), false), + NewList([]rune("[abc]"), false), + }, + "abcdef", + 0, + []int{1}, + }, + } { + everyOf := NewAnyOf(test.matchers...) + index, segments := everyOf.Index(test.fixture) + if index != test.index { + t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index) + } + if !reflect.DeepEqual(segments, test.segments) { + t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments) + } + } +} diff --git a/vendor/github.com/gobwas/glob/match/any_test.go b/vendor/github.com/gobwas/glob/match/any_test.go new file mode 100644 index 00000000..358f5534 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/any_test.go @@ -0,0 +1,57 @@ +package match + +import ( + "reflect" + "testing" +) + +func TestAnyIndex(t *testing.T) { + for id, test := range []struct { + sep []rune + fixture string + index int + segments []int + }{ + { + []rune{'.'}, + "abc", + 0, + []int{0, 1, 2, 3}, + }, + { + []rune{'.'}, + "abc.def", + 0, + []int{0, 1, 2, 3}, + }, + } { + p := NewAny(test.sep) + index, segments := p.Index(test.fixture) + if index != test.index { + t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index) + } + if !reflect.DeepEqual(segments, test.segments) { + t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments) + } + } +} + +func BenchmarkIndexAny(b *testing.B) { + m := NewAny(bench_separators) + + for i := 0; i < b.N; i++ { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } +} + +func BenchmarkIndexAnyParallel(b *testing.B) { + m := NewAny(bench_separators) + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } + }) +} diff --git a/vendor/github.com/gobwas/glob/match/btree.go b/vendor/github.com/gobwas/glob/match/btree.go new file mode 100644 index 00000000..8302bf82 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/btree.go @@ -0,0 +1,185 @@ +package match + +import ( + "fmt" + "unicode/utf8" +) + +type BTree struct { + Value Matcher + Left Matcher + Right Matcher + ValueLengthRunes int + LeftLengthRunes int + RightLengthRunes int + LengthRunes int +} + +func NewBTree(Value, Left, Right Matcher) (tree BTree) { + tree.Value = Value + tree.Left = Left + tree.Right = Right + + lenOk := true + if tree.ValueLengthRunes = Value.Len(); tree.ValueLengthRunes == -1 { + lenOk = false + } + + if Left != nil { + if tree.LeftLengthRunes = Left.Len(); tree.LeftLengthRunes == -1 { + lenOk = false + } + } + + if Right != nil { + if tree.RightLengthRunes = Right.Len(); tree.RightLengthRunes == -1 { + lenOk = false + } + } + + if lenOk { + tree.LengthRunes = tree.LeftLengthRunes + tree.ValueLengthRunes + tree.RightLengthRunes + } else { + tree.LengthRunes = -1 + } + + return tree +} + +func (self BTree) Len() int { + return self.LengthRunes +} + +// todo? +func (self BTree) Index(s string) (index int, segments []int) { + //inputLen := len(s) + //// try to cut unnecessary parts + //// by knowledge of length of right and left part + //offset, limit := self.offsetLimit(inputLen) + //for offset < limit { + // // search for matching part in substring + // vi, segments := self.Value.Index(s[offset:limit]) + // if index == -1 { + // return -1, nil + // } + // if self.Left == nil { + // if index != offset { + // return -1, nil + // } + // } else { + // left := s[:offset+vi] + // i := self.Left.IndexSuffix(left) + // if i == -1 { + // return -1, nil + // } + // index = i + // } + // if self.Right != nil { + // for _, seg := range segments { + // right := s[:offset+vi+seg] + // } + // } + + // l := s[:offset+index] + // var left bool + // if self.Left != nil { + // left = self.Left.Index(l) + // } else { + // left = l == "" + // } + //} + + return -1, nil +} + +func (self BTree) Match(s string) bool { + inputLen := len(s) + // try to cut unnecessary parts + // by knowledge of length of right and left part + offset, limit := self.offsetLimit(inputLen) + + for offset < limit { + // search for matching part in substring + index, segments := self.Value.Index(s[offset:limit]) + if index == -1 { + releaseSegments(segments) + return false + } + + l := s[:offset+index] + var left bool + if self.Left != nil { + left = self.Left.Match(l) + } else { + left = l == "" + } + + if left { + for i := len(segments) - 1; i >= 0; i-- { + length := segments[i] + + var right bool + var r string + // if there is no string for the right branch + if inputLen <= offset+index+length { + r = "" + } else { + r = s[offset+index+length:] + } + + if self.Right != nil { + right = self.Right.Match(r) + } else { + right = r == "" + } + + if right { + releaseSegments(segments) + return true + } + } + } + + _, step := utf8.DecodeRuneInString(s[offset+index:]) + offset += index + step + + releaseSegments(segments) + } + + return false +} + +func (self BTree) offsetLimit(inputLen int) (offset int, limit int) { + // self.Length, self.RLen and self.LLen are values meaning the length of runes for each part + // here we manipulating byte length for better optimizations + // but these checks still works, cause minLen of 1-rune string is 1 byte. + if self.LengthRunes != -1 && self.LengthRunes > inputLen { + return 0, 0 + } + if self.LeftLengthRunes >= 0 { + offset = self.LeftLengthRunes + } + if self.RightLengthRunes >= 0 { + limit = inputLen - self.RightLengthRunes + } else { + limit = inputLen + } + return offset, limit +} + +func (self BTree) String() string { + const n string = "" + var l, r string + if self.Left == nil { + l = n + } else { + l = self.Left.String() + } + if self.Right == nil { + r = n + } else { + r = self.Right.String() + } + + return fmt.Sprintf("%s]>", l, self.Value, r) +} diff --git a/vendor/github.com/gobwas/glob/match/btree_test.go b/vendor/github.com/gobwas/glob/match/btree_test.go new file mode 100644 index 00000000..3bd9ea55 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/btree_test.go @@ -0,0 +1,90 @@ +package match + +import ( + "testing" +) + +func TestBTree(t *testing.T) { + for id, test := range []struct { + tree BTree + str string + exp bool + }{ + { + NewBTree(NewText("abc"), NewSuper(), NewSuper()), + "abc", + true, + }, + { + NewBTree(NewText("a"), NewSingle(nil), NewSingle(nil)), + "aaa", + true, + }, + { + NewBTree(NewText("b"), NewSingle(nil), nil), + "bbb", + false, + }, + { + NewBTree( + NewText("c"), + NewBTree( + NewSingle(nil), + NewSuper(), + nil, + ), + nil, + ), + "abc", + true, + }, + } { + act := test.tree.Match(test.str) + if act != test.exp { + t.Errorf("#%d match %q error: act: %t; exp: %t", id, test.str, act, test.exp) + continue + } + } +} + +type fakeMatcher struct { + len int + name string +} + +func (f *fakeMatcher) Match(string) bool { + return true +} + +var i = 3 + +func (f *fakeMatcher) Index(s string) (int, []int) { + seg := make([]int, 0, i) + for x := 0; x < i; x++ { + seg = append(seg, x) + } + return 0, seg +} +func (f *fakeMatcher) Len() int { + return f.len +} +func (f *fakeMatcher) String() string { + return f.name +} + +func BenchmarkMatchBTree(b *testing.B) { + l := &fakeMatcher{4, "left_fake"} + r := &fakeMatcher{4, "right_fake"} + v := &fakeMatcher{2, "value_fake"} + + // must be <= len(l + r + v) + fixture := "abcdefghijabcdefghijabcdefghijabcdefghijabcdefghijabcdefghijabcdefghijabcdefghij" + + bt := NewBTree(v, l, r) + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + bt.Match(fixture) + } + }) +} diff --git a/vendor/github.com/gobwas/glob/match/contains.go b/vendor/github.com/gobwas/glob/match/contains.go new file mode 100644 index 00000000..0998e95b --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/contains.go @@ -0,0 +1,58 @@ +package match + +import ( + "fmt" + "strings" +) + +type Contains struct { + Needle string + Not bool +} + +func NewContains(needle string, not bool) Contains { + return Contains{needle, not} +} + +func (self Contains) Match(s string) bool { + return strings.Contains(s, self.Needle) != self.Not +} + +func (self Contains) Index(s string) (int, []int) { + var offset int + + idx := strings.Index(s, self.Needle) + + if !self.Not { + if idx == -1 { + return -1, nil + } + + offset = idx + len(self.Needle) + if len(s) <= offset { + return 0, []int{offset} + } + s = s[offset:] + } else if idx != -1 { + s = s[:idx] + } + + segments := acquireSegments(len(s) + 1) + for i := range s { + segments = append(segments, offset+i) + } + + return 0, append(segments, offset+len(s)) +} + +func (self Contains) Len() int { + return lenNo +} + +func (self Contains) String() string { + var not string + if self.Not { + not = "!" + } + return fmt.Sprintf("", not, self.Needle) +} diff --git a/vendor/github.com/gobwas/glob/match/contains_test.go b/vendor/github.com/gobwas/glob/match/contains_test.go new file mode 100644 index 00000000..931322eb --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/contains_test.go @@ -0,0 +1,74 @@ +package match + +import ( + "reflect" + "testing" +) + +func TestContainsIndex(t *testing.T) { + for id, test := range []struct { + prefix string + not bool + fixture string + index int + segments []int + }{ + { + "ab", + false, + "abc", + 0, + []int{2, 3}, + }, + { + "ab", + false, + "fffabfff", + 0, + []int{5, 6, 7, 8}, + }, + { + "ab", + true, + "abc", + 0, + []int{0}, + }, + { + "ab", + true, + "fffabfff", + 0, + []int{0, 1, 2, 3}, + }, + } { + p := NewContains(test.prefix, test.not) + index, segments := p.Index(test.fixture) + if index != test.index { + t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index) + } + if !reflect.DeepEqual(segments, test.segments) { + t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments) + } + } +} + +func BenchmarkIndexContains(b *testing.B) { + m := NewContains(string(bench_separators), true) + + for i := 0; i < b.N; i++ { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } +} + +func BenchmarkIndexContainsParallel(b *testing.B) { + m := NewContains(string(bench_separators), true) + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } + }) +} diff --git a/vendor/github.com/gobwas/glob/match/debug/debug.go b/vendor/github.com/gobwas/glob/match/debug/debug.go new file mode 100644 index 00000000..5c5dbc1a --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/debug/debug.go @@ -0,0 +1,55 @@ +package debug + +import ( + "bytes" + "fmt" + "github.com/gobwas/glob/match" + "math/rand" +) + +func Graphviz(pattern string, m match.Matcher) string { + return fmt.Sprintf(`digraph G {graph[label="%s"];%s}`, pattern, graphviz_internal(m, fmt.Sprintf("%x", rand.Int63()))) +} + +func graphviz_internal(m match.Matcher, id string) string { + buf := &bytes.Buffer{} + + switch matcher := m.(type) { + case match.BTree: + fmt.Fprintf(buf, `"%s"[label="%s"];`, id, matcher.Value.String()) + for _, m := range []match.Matcher{matcher.Left, matcher.Right} { + switch n := m.(type) { + case nil: + rnd := rand.Int63() + fmt.Fprintf(buf, `"%x"[label=""];`, rnd) + fmt.Fprintf(buf, `"%s"->"%x";`, id, rnd) + + default: + sub := fmt.Sprintf("%x", rand.Int63()) + fmt.Fprintf(buf, `"%s"->"%s";`, id, sub) + fmt.Fprintf(buf, graphviz_internal(n, sub)) + } + } + + case match.AnyOf: + fmt.Fprintf(buf, `"%s"[label="AnyOf"];`, id) + for _, m := range matcher.Matchers { + rnd := rand.Int63() + fmt.Fprintf(buf, graphviz_internal(m, fmt.Sprintf("%x", rnd))) + fmt.Fprintf(buf, `"%s"->"%x";`, id, rnd) + } + + case match.EveryOf: + fmt.Fprintf(buf, `"%s"[label="EveryOf"];`, id) + for _, m := range matcher.Matchers { + rnd := rand.Int63() + fmt.Fprintf(buf, graphviz_internal(m, fmt.Sprintf("%x", rnd))) + fmt.Fprintf(buf, `"%s"->"%x";`, id, rnd) + } + + default: + fmt.Fprintf(buf, `"%s"[label="%s"];`, id, m.String()) + } + + return buf.String() +} diff --git a/vendor/github.com/gobwas/glob/match/every_of.go b/vendor/github.com/gobwas/glob/match/every_of.go new file mode 100644 index 00000000..7c968ee3 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/every_of.go @@ -0,0 +1,99 @@ +package match + +import ( + "fmt" +) + +type EveryOf struct { + Matchers Matchers +} + +func NewEveryOf(m ...Matcher) EveryOf { + return EveryOf{Matchers(m)} +} + +func (self *EveryOf) Add(m Matcher) error { + self.Matchers = append(self.Matchers, m) + return nil +} + +func (self EveryOf) Len() (l int) { + for _, m := range self.Matchers { + if ml := m.Len(); l > 0 { + l += ml + } else { + return -1 + } + } + + return +} + +func (self EveryOf) Index(s string) (int, []int) { + var index int + var offset int + + // make `in` with cap as len(s), + // cause it is the maximum size of output segments values + next := acquireSegments(len(s)) + current := acquireSegments(len(s)) + + sub := s + for i, m := range self.Matchers { + idx, seg := m.Index(sub) + if idx == -1 { + releaseSegments(next) + releaseSegments(current) + return -1, nil + } + + if i == 0 { + // we use copy here instead of `current = seg` + // cause seg is a slice from reusable buffer `in` + // and it could be overwritten in next iteration + current = append(current, seg...) + } else { + // clear the next + next = next[:0] + + delta := index - (idx + offset) + for _, ex := range current { + for _, n := range seg { + if ex+delta == n { + next = append(next, n) + } + } + } + + if len(next) == 0 { + releaseSegments(next) + releaseSegments(current) + return -1, nil + } + + current = append(current[:0], next...) + } + + index = idx + offset + sub = s[index:] + offset += idx + } + + releaseSegments(next) + + return index, current +} + +func (self EveryOf) Match(s string) bool { + for _, m := range self.Matchers { + if !m.Match(s) { + return false + } + } + + return true +} + +func (self EveryOf) String() string { + return fmt.Sprintf("", self.Matchers) +} diff --git a/vendor/github.com/gobwas/glob/match/every_of_test.go b/vendor/github.com/gobwas/glob/match/every_of_test.go new file mode 100644 index 00000000..eb83f862 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/every_of_test.go @@ -0,0 +1,45 @@ +package match + +import ( + "reflect" + "testing" +) + +func TestEveryOfIndex(t *testing.T) { + for id, test := range []struct { + matchers Matchers + fixture string + index int + segments []int + }{ + { + Matchers{ + NewAny(nil), + NewText("b"), + NewText("c"), + }, + "dbc", + -1, + nil, + }, + { + Matchers{ + NewAny(nil), + NewPrefix("b"), + NewSuffix("c"), + }, + "abc", + 1, + []int{2}, + }, + } { + everyOf := NewEveryOf(test.matchers...) + index, segments := everyOf.Index(test.fixture) + if index != test.index { + t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index) + } + if !reflect.DeepEqual(segments, test.segments) { + t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments) + } + } +} diff --git a/vendor/github.com/gobwas/glob/match/list.go b/vendor/github.com/gobwas/glob/match/list.go new file mode 100644 index 00000000..7fd763ec --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/list.go @@ -0,0 +1,49 @@ +package match + +import ( + "fmt" + "github.com/gobwas/glob/util/runes" + "unicode/utf8" +) + +type List struct { + List []rune + Not bool +} + +func NewList(list []rune, not bool) List { + return List{list, not} +} + +func (self List) Match(s string) bool { + r, w := utf8.DecodeRuneInString(s) + if len(s) > w { + return false + } + + inList := runes.IndexRune(self.List, r) != -1 + return inList == !self.Not +} + +func (self List) Len() int { + return lenOne +} + +func (self List) Index(s string) (int, []int) { + for i, r := range s { + if self.Not == (runes.IndexRune(self.List, r) == -1) { + return i, segmentsByRuneLength[utf8.RuneLen(r)] + } + } + + return -1, nil +} + +func (self List) String() string { + var not string + if self.Not { + not = "!" + } + + return fmt.Sprintf("", not, string(self.List)) +} diff --git a/vendor/github.com/gobwas/glob/match/list_test.go b/vendor/github.com/gobwas/glob/match/list_test.go new file mode 100644 index 00000000..10a54379 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/list_test.go @@ -0,0 +1,58 @@ +package match + +import ( + "reflect" + "testing" +) + +func TestListIndex(t *testing.T) { + for id, test := range []struct { + list []rune + not bool + fixture string + index int + segments []int + }{ + { + []rune("ab"), + false, + "abc", + 0, + []int{1}, + }, + { + []rune("ab"), + true, + "fffabfff", + 0, + []int{1}, + }, + } { + p := NewList(test.list, test.not) + index, segments := p.Index(test.fixture) + if index != test.index { + t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index) + } + if !reflect.DeepEqual(segments, test.segments) { + t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments) + } + } +} + +func BenchmarkIndexList(b *testing.B) { + m := NewList([]rune("def"), false) + + for i := 0; i < b.N; i++ { + m.Index(bench_pattern) + } +} + +func BenchmarkIndexListParallel(b *testing.B) { + m := NewList([]rune("def"), false) + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + m.Index(bench_pattern) + } + }) +} diff --git a/vendor/github.com/gobwas/glob/match/match.go b/vendor/github.com/gobwas/glob/match/match.go new file mode 100644 index 00000000..f80e007f --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/match.go @@ -0,0 +1,81 @@ +package match + +// todo common table of rune's length + +import ( + "fmt" + "strings" +) + +const lenOne = 1 +const lenZero = 0 +const lenNo = -1 + +type Matcher interface { + Match(string) bool + Index(string) (int, []int) + Len() int + String() string +} + +type Matchers []Matcher + +func (m Matchers) String() string { + var s []string + for _, matcher := range m { + s = append(s, fmt.Sprint(matcher)) + } + + return fmt.Sprintf("%s", strings.Join(s, ",")) +} + +// appendMerge merges and sorts given already SORTED and UNIQUE segments. +func appendMerge(target, sub []int) []int { + lt, ls := len(target), len(sub) + out := make([]int, 0, lt+ls) + + for x, y := 0, 0; x < lt || y < ls; { + if x >= lt { + out = append(out, sub[y:]...) + break + } + + if y >= ls { + out = append(out, target[x:]...) + break + } + + xValue := target[x] + yValue := sub[y] + + switch { + + case xValue == yValue: + out = append(out, xValue) + x++ + y++ + + case xValue < yValue: + out = append(out, xValue) + x++ + + case yValue < xValue: + out = append(out, yValue) + y++ + + } + } + + target = append(target[:0], out...) + + return target +} + +func reverseSegments(input []int) { + l := len(input) + m := l / 2 + + for i := 0; i < m; i++ { + input[i], input[l-i-1] = input[l-i-1], input[i] + } +} diff --git a/vendor/github.com/gobwas/glob/match/match_test.go b/vendor/github.com/gobwas/glob/match/match_test.go new file mode 100644 index 00000000..4c1b83c7 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/match_test.go @@ -0,0 +1,90 @@ +package match + +import ( + "reflect" + "testing" + "unicode/utf8" +) + +var bench_separators = []rune{'.'} + +const bench_pattern = "abcdefghijklmnopqrstuvwxyz0123456789" + +func TestAppendMerge(t *testing.T) { + for id, test := range []struct { + segments [2][]int + exp []int + }{ + { + [2][]int{ + {0, 6, 7}, + {0, 1, 3}, + }, + []int{0, 1, 3, 6, 7}, + }, + { + [2][]int{ + {0, 1, 3, 6, 7}, + {0, 1, 10}, + }, + []int{0, 1, 3, 6, 7, 10}, + }, + } { + act := appendMerge(test.segments[0], test.segments[1]) + if !reflect.DeepEqual(act, test.exp) { + t.Errorf("#%d merge sort segments unexpected:\nact: %v\nexp:%v", id, act, test.exp) + continue + } + } +} + +func BenchmarkAppendMerge(b *testing.B) { + s1 := []int{0, 1, 3, 6, 7} + s2 := []int{0, 1, 3} + + for i := 0; i < b.N; i++ { + appendMerge(s1, s2) + } +} + +func BenchmarkAppendMergeParallel(b *testing.B) { + s1 := []int{0, 1, 3, 6, 7} + s2 := []int{0, 1, 3} + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + appendMerge(s1, s2) + } + }) +} + +func BenchmarkReverse(b *testing.B) { + for i := 0; i < b.N; i++ { + reverseSegments([]int{1, 2, 3, 4}) + } +} + +func getTable() []int { + table := make([]int, utf8.MaxRune+1) + for i := 0; i <= utf8.MaxRune; i++ { + table[i] = utf8.RuneLen(rune(i)) + } + + return table +} + +var table = getTable() + +const runeToLen = 'q' + +func BenchmarkRuneLenFromTable(b *testing.B) { + for i := 0; i < b.N; i++ { + _ = table[runeToLen] + } +} + +func BenchmarkRuneLenFromUTF8(b *testing.B) { + for i := 0; i < b.N; i++ { + _ = utf8.RuneLen(runeToLen) + } +} diff --git a/vendor/github.com/gobwas/glob/match/max.go b/vendor/github.com/gobwas/glob/match/max.go new file mode 100644 index 00000000..d72f69ef --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/max.go @@ -0,0 +1,49 @@ +package match + +import ( + "fmt" + "unicode/utf8" +) + +type Max struct { + Limit int +} + +func NewMax(l int) Max { + return Max{l} +} + +func (self Max) Match(s string) bool { + var l int + for range s { + l += 1 + if l > self.Limit { + return false + } + } + + return true +} + +func (self Max) Index(s string) (int, []int) { + segments := acquireSegments(self.Limit + 1) + segments = append(segments, 0) + var count int + for i, r := range s { + count++ + if count > self.Limit { + break + } + segments = append(segments, i+utf8.RuneLen(r)) + } + + return 0, segments +} + +func (self Max) Len() int { + return lenNo +} + +func (self Max) String() string { + return fmt.Sprintf("", self.Limit) +} diff --git a/vendor/github.com/gobwas/glob/match/max_test.go b/vendor/github.com/gobwas/glob/match/max_test.go new file mode 100644 index 00000000..23676284 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/max_test.go @@ -0,0 +1,57 @@ +package match + +import ( + "reflect" + "testing" +) + +func TestMaxIndex(t *testing.T) { + for id, test := range []struct { + limit int + fixture string + index int + segments []int + }{ + { + 3, + "abc", + 0, + []int{0, 1, 2, 3}, + }, + { + 3, + "abcdef", + 0, + []int{0, 1, 2, 3}, + }, + } { + p := NewMax(test.limit) + index, segments := p.Index(test.fixture) + if index != test.index { + t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index) + } + if !reflect.DeepEqual(segments, test.segments) { + t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments) + } + } +} + +func BenchmarkIndexMax(b *testing.B) { + m := NewMax(10) + + for i := 0; i < b.N; i++ { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } +} + +func BenchmarkIndexMaxParallel(b *testing.B) { + m := NewMax(10) + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } + }) +} diff --git a/vendor/github.com/gobwas/glob/match/min.go b/vendor/github.com/gobwas/glob/match/min.go new file mode 100644 index 00000000..db57ac8e --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/min.go @@ -0,0 +1,57 @@ +package match + +import ( + "fmt" + "unicode/utf8" +) + +type Min struct { + Limit int +} + +func NewMin(l int) Min { + return Min{l} +} + +func (self Min) Match(s string) bool { + var l int + for range s { + l += 1 + if l >= self.Limit { + return true + } + } + + return false +} + +func (self Min) Index(s string) (int, []int) { + var count int + + c := len(s) - self.Limit + 1 + if c <= 0 { + return -1, nil + } + + segments := acquireSegments(c) + for i, r := range s { + count++ + if count >= self.Limit { + segments = append(segments, i+utf8.RuneLen(r)) + } + } + + if len(segments) == 0 { + return -1, nil + } + + return 0, segments +} + +func (self Min) Len() int { + return lenNo +} + +func (self Min) String() string { + return fmt.Sprintf("", self.Limit) +} diff --git a/vendor/github.com/gobwas/glob/match/min_test.go b/vendor/github.com/gobwas/glob/match/min_test.go new file mode 100644 index 00000000..ab854ae0 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/min_test.go @@ -0,0 +1,57 @@ +package match + +import ( + "reflect" + "testing" +) + +func TestMinIndex(t *testing.T) { + for id, test := range []struct { + limit int + fixture string + index int + segments []int + }{ + { + 1, + "abc", + 0, + []int{1, 2, 3}, + }, + { + 3, + "abcd", + 0, + []int{3, 4}, + }, + } { + p := NewMin(test.limit) + index, segments := p.Index(test.fixture) + if index != test.index { + t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index) + } + if !reflect.DeepEqual(segments, test.segments) { + t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments) + } + } +} + +func BenchmarkIndexMin(b *testing.B) { + m := NewMin(10) + + for i := 0; i < b.N; i++ { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } +} + +func BenchmarkIndexMinParallel(b *testing.B) { + m := NewMin(10) + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } + }) +} diff --git a/vendor/github.com/gobwas/glob/match/nothing.go b/vendor/github.com/gobwas/glob/match/nothing.go new file mode 100644 index 00000000..0d4ecd36 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/nothing.go @@ -0,0 +1,27 @@ +package match + +import ( + "fmt" +) + +type Nothing struct{} + +func NewNothing() Nothing { + return Nothing{} +} + +func (self Nothing) Match(s string) bool { + return len(s) == 0 +} + +func (self Nothing) Index(s string) (int, []int) { + return 0, segments0 +} + +func (self Nothing) Len() int { + return lenZero +} + +func (self Nothing) String() string { + return fmt.Sprintf("") +} diff --git a/vendor/github.com/gobwas/glob/match/nothing_test.go b/vendor/github.com/gobwas/glob/match/nothing_test.go new file mode 100644 index 00000000..941c22d1 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/nothing_test.go @@ -0,0 +1,54 @@ +package match + +import ( + "reflect" + "testing" +) + +func TestNothingIndex(t *testing.T) { + for id, test := range []struct { + fixture string + index int + segments []int + }{ + { + "abc", + 0, + []int{0}, + }, + { + "", + 0, + []int{0}, + }, + } { + p := NewNothing() + index, segments := p.Index(test.fixture) + if index != test.index { + t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index) + } + if !reflect.DeepEqual(segments, test.segments) { + t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments) + } + } +} + +func BenchmarkIndexNothing(b *testing.B) { + m := NewNothing() + + for i := 0; i < b.N; i++ { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } +} + +func BenchmarkIndexNothingParallel(b *testing.B) { + m := NewNothing() + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } + }) +} diff --git a/vendor/github.com/gobwas/glob/match/prefix.go b/vendor/github.com/gobwas/glob/match/prefix.go new file mode 100644 index 00000000..a7347250 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/prefix.go @@ -0,0 +1,50 @@ +package match + +import ( + "fmt" + "strings" + "unicode/utf8" +) + +type Prefix struct { + Prefix string +} + +func NewPrefix(p string) Prefix { + return Prefix{p} +} + +func (self Prefix) Index(s string) (int, []int) { + idx := strings.Index(s, self.Prefix) + if idx == -1 { + return -1, nil + } + + length := len(self.Prefix) + var sub string + if len(s) > idx+length { + sub = s[idx+length:] + } else { + sub = "" + } + + segments := acquireSegments(len(sub) + 1) + segments = append(segments, length) + for i, r := range sub { + segments = append(segments, length+i+utf8.RuneLen(r)) + } + + return idx, segments +} + +func (self Prefix) Len() int { + return lenNo +} + +func (self Prefix) Match(s string) bool { + return strings.HasPrefix(s, self.Prefix) +} + +func (self Prefix) String() string { + return fmt.Sprintf("", self.Prefix) +} diff --git a/vendor/github.com/gobwas/glob/match/prefix_any.go b/vendor/github.com/gobwas/glob/match/prefix_any.go new file mode 100644 index 00000000..8ee58fe1 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/prefix_any.go @@ -0,0 +1,55 @@ +package match + +import ( + "fmt" + "strings" + "unicode/utf8" + + sutil "github.com/gobwas/glob/util/strings" +) + +type PrefixAny struct { + Prefix string + Separators []rune +} + +func NewPrefixAny(s string, sep []rune) PrefixAny { + return PrefixAny{s, sep} +} + +func (self PrefixAny) Index(s string) (int, []int) { + idx := strings.Index(s, self.Prefix) + if idx == -1 { + return -1, nil + } + + n := len(self.Prefix) + sub := s[idx+n:] + i := sutil.IndexAnyRunes(sub, self.Separators) + if i > -1 { + sub = sub[:i] + } + + seg := acquireSegments(len(sub) + 1) + seg = append(seg, n) + for i, r := range sub { + seg = append(seg, n+i+utf8.RuneLen(r)) + } + + return idx, seg +} + +func (self PrefixAny) Len() int { + return lenNo +} + +func (self PrefixAny) Match(s string) bool { + if !strings.HasPrefix(s, self.Prefix) { + return false + } + return sutil.IndexAnyRunes(s[len(self.Prefix):], self.Separators) == -1 +} + +func (self PrefixAny) String() string { + return fmt.Sprintf("", self.Prefix, string(self.Separators)) +} diff --git a/vendor/github.com/gobwas/glob/match/prefix_any_test.go b/vendor/github.com/gobwas/glob/match/prefix_any_test.go new file mode 100644 index 00000000..e6990e37 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/prefix_any_test.go @@ -0,0 +1,47 @@ +package match + +import ( + "reflect" + "testing" +) + +func TestPrefixAnyIndex(t *testing.T) { + for id, test := range []struct { + prefix string + separators []rune + fixture string + index int + segments []int + }{ + { + "ab", + []rune{'.'}, + "ab", + 0, + []int{2}, + }, + { + "ab", + []rune{'.'}, + "abc", + 0, + []int{2, 3}, + }, + { + "ab", + []rune{'.'}, + "qw.abcd.efg", + 3, + []int{2, 3, 4}, + }, + } { + p := NewPrefixAny(test.prefix, test.separators) + index, segments := p.Index(test.fixture) + if index != test.index { + t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index) + } + if !reflect.DeepEqual(segments, test.segments) { + t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments) + } + } +} diff --git a/vendor/github.com/gobwas/glob/match/prefix_suffix.go b/vendor/github.com/gobwas/glob/match/prefix_suffix.go new file mode 100644 index 00000000..8208085a --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/prefix_suffix.go @@ -0,0 +1,62 @@ +package match + +import ( + "fmt" + "strings" +) + +type PrefixSuffix struct { + Prefix, Suffix string +} + +func NewPrefixSuffix(p, s string) PrefixSuffix { + return PrefixSuffix{p, s} +} + +func (self PrefixSuffix) Index(s string) (int, []int) { + prefixIdx := strings.Index(s, self.Prefix) + if prefixIdx == -1 { + return -1, nil + } + + suffixLen := len(self.Suffix) + if suffixLen <= 0 { + return prefixIdx, []int{len(s) - prefixIdx} + } + + if (len(s) - prefixIdx) <= 0 { + return -1, nil + } + + segments := acquireSegments(len(s) - prefixIdx) + for sub := s[prefixIdx:]; ; { + suffixIdx := strings.LastIndex(sub, self.Suffix) + if suffixIdx == -1 { + break + } + + segments = append(segments, suffixIdx+suffixLen) + sub = sub[:suffixIdx] + } + + if len(segments) == 0 { + releaseSegments(segments) + return -1, nil + } + + reverseSegments(segments) + + return prefixIdx, segments +} + +func (self PrefixSuffix) Len() int { + return lenNo +} + +func (self PrefixSuffix) Match(s string) bool { + return strings.HasPrefix(s, self.Prefix) && strings.HasSuffix(s, self.Suffix) +} + +func (self PrefixSuffix) String() string { + return fmt.Sprintf("", self.Prefix, self.Suffix) +} diff --git a/vendor/github.com/gobwas/glob/match/prefix_suffix_test.go b/vendor/github.com/gobwas/glob/match/prefix_suffix_test.go new file mode 100644 index 00000000..79b17b20 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/prefix_suffix_test.go @@ -0,0 +1,67 @@ +package match + +import ( + "reflect" + "testing" +) + +func TestPrefixSuffixIndex(t *testing.T) { + for id, test := range []struct { + prefix string + suffix string + fixture string + index int + segments []int + }{ + { + "a", + "c", + "abc", + 0, + []int{3}, + }, + { + "f", + "f", + "fffabfff", + 0, + []int{1, 2, 3, 6, 7, 8}, + }, + { + "ab", + "bc", + "abc", + 0, + []int{3}, + }, + } { + p := NewPrefixSuffix(test.prefix, test.suffix) + index, segments := p.Index(test.fixture) + if index != test.index { + t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index) + } + if !reflect.DeepEqual(segments, test.segments) { + t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments) + } + } +} + +func BenchmarkIndexPrefixSuffix(b *testing.B) { + m := NewPrefixSuffix("qew", "sqw") + + for i := 0; i < b.N; i++ { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } +} + +func BenchmarkIndexPrefixSuffixParallel(b *testing.B) { + m := NewPrefixSuffix("qew", "sqw") + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } + }) +} diff --git a/vendor/github.com/gobwas/glob/match/prefix_test.go b/vendor/github.com/gobwas/glob/match/prefix_test.go new file mode 100644 index 00000000..22a296e6 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/prefix_test.go @@ -0,0 +1,57 @@ +package match + +import ( + "reflect" + "testing" +) + +func TestPrefixIndex(t *testing.T) { + for id, test := range []struct { + prefix string + fixture string + index int + segments []int + }{ + { + "ab", + "abc", + 0, + []int{2, 3}, + }, + { + "ab", + "fffabfff", + 3, + []int{2, 3, 4, 5}, + }, + } { + p := NewPrefix(test.prefix) + index, segments := p.Index(test.fixture) + if index != test.index { + t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index) + } + if !reflect.DeepEqual(segments, test.segments) { + t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments) + } + } +} + +func BenchmarkIndexPrefix(b *testing.B) { + m := NewPrefix("qew") + + for i := 0; i < b.N; i++ { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } +} + +func BenchmarkIndexPrefixParallel(b *testing.B) { + m := NewPrefix("qew") + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } + }) +} diff --git a/vendor/github.com/gobwas/glob/match/range.go b/vendor/github.com/gobwas/glob/match/range.go new file mode 100644 index 00000000..ce30245a --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/range.go @@ -0,0 +1,48 @@ +package match + +import ( + "fmt" + "unicode/utf8" +) + +type Range struct { + Lo, Hi rune + Not bool +} + +func NewRange(lo, hi rune, not bool) Range { + return Range{lo, hi, not} +} + +func (self Range) Len() int { + return lenOne +} + +func (self Range) Match(s string) bool { + r, w := utf8.DecodeRuneInString(s) + if len(s) > w { + return false + } + + inRange := r >= self.Lo && r <= self.Hi + + return inRange == !self.Not +} + +func (self Range) Index(s string) (int, []int) { + for i, r := range s { + if self.Not != (r >= self.Lo && r <= self.Hi) { + return i, segmentsByRuneLength[utf8.RuneLen(r)] + } + } + + return -1, nil +} + +func (self Range) String() string { + var not string + if self.Not { + not = "!" + } + return fmt.Sprintf("", not, string(self.Lo), string(self.Hi)) +} diff --git a/vendor/github.com/gobwas/glob/match/range_test.go b/vendor/github.com/gobwas/glob/match/range_test.go new file mode 100644 index 00000000..0dddcfdb --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/range_test.go @@ -0,0 +1,67 @@ +package match + +import ( + "reflect" + "testing" +) + +func TestRangeIndex(t *testing.T) { + for id, test := range []struct { + lo, hi rune + not bool + fixture string + index int + segments []int + }{ + { + 'a', 'z', + false, + "abc", + 0, + []int{1}, + }, + { + 'a', 'c', + false, + "abcd", + 0, + []int{1}, + }, + { + 'a', 'c', + true, + "abcd", + 3, + []int{1}, + }, + } { + m := NewRange(test.lo, test.hi, test.not) + index, segments := m.Index(test.fixture) + if index != test.index { + t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index) + } + if !reflect.DeepEqual(segments, test.segments) { + t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments) + } + } +} + +func BenchmarkIndexRange(b *testing.B) { + m := NewRange('0', '9', false) + + for i := 0; i < b.N; i++ { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } +} + +func BenchmarkIndexRangeParallel(b *testing.B) { + m := NewRange('0', '9', false) + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } + }) +} diff --git a/vendor/github.com/gobwas/glob/match/row.go b/vendor/github.com/gobwas/glob/match/row.go new file mode 100644 index 00000000..4379042e --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/row.go @@ -0,0 +1,77 @@ +package match + +import ( + "fmt" +) + +type Row struct { + Matchers Matchers + RunesLength int + Segments []int +} + +func NewRow(len int, m ...Matcher) Row { + return Row{ + Matchers: Matchers(m), + RunesLength: len, + Segments: []int{len}, + } +} + +func (self Row) matchAll(s string) bool { + var idx int + for _, m := range self.Matchers { + length := m.Len() + + var next, i int + for next = range s[idx:] { + i++ + if i == length { + break + } + } + + if i < length || !m.Match(s[idx:idx+next+1]) { + return false + } + + idx += next + 1 + } + + return true +} + +func (self Row) lenOk(s string) bool { + var i int + for range s { + i++ + if i > self.RunesLength { + return false + } + } + return self.RunesLength == i +} + +func (self Row) Match(s string) bool { + return self.lenOk(s) && self.matchAll(s) +} + +func (self Row) Len() (l int) { + return self.RunesLength +} + +func (self Row) Index(s string) (int, []int) { + for i := range s { + if len(s[i:]) < self.RunesLength { + break + } + if self.matchAll(s[i:]) { + return i, self.Segments + } + } + return -1, nil +} + +func (self Row) String() string { + return fmt.Sprintf("", self.RunesLength, self.Matchers) +} diff --git a/vendor/github.com/gobwas/glob/match/row_test.go b/vendor/github.com/gobwas/glob/match/row_test.go new file mode 100644 index 00000000..c9e65ef5 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/row_test.go @@ -0,0 +1,82 @@ +package match + +import ( + "reflect" + "testing" +) + +func TestRowIndex(t *testing.T) { + for id, test := range []struct { + matchers Matchers + length int + fixture string + index int + segments []int + }{ + { + Matchers{ + NewText("abc"), + NewText("def"), + NewSingle(nil), + }, + 7, + "qweabcdefghij", + 3, + []int{7}, + }, + { + Matchers{ + NewText("abc"), + NewText("def"), + NewSingle(nil), + }, + 7, + "abcd", + -1, + nil, + }, + } { + p := NewRow(test.length, test.matchers...) + index, segments := p.Index(test.fixture) + if index != test.index { + t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index) + } + if !reflect.DeepEqual(segments, test.segments) { + t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments) + } + } +} + +func BenchmarkRowIndex(b *testing.B) { + m := NewRow( + 7, + Matchers{ + NewText("abc"), + NewText("def"), + NewSingle(nil), + }..., + ) + + for i := 0; i < b.N; i++ { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } +} + +func BenchmarkIndexRowParallel(b *testing.B) { + m := NewRow( + 7, + Matchers{ + NewText("abc"), + NewText("def"), + NewSingle(nil), + }..., + ) + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } + }) +} diff --git a/vendor/github.com/gobwas/glob/match/segments.go b/vendor/github.com/gobwas/glob/match/segments.go new file mode 100644 index 00000000..9ea6f309 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/segments.go @@ -0,0 +1,91 @@ +package match + +import ( + "sync" +) + +type SomePool interface { + Get() []int + Put([]int) +} + +var segmentsPools [1024]sync.Pool + +func toPowerOfTwo(v int) int { + v-- + v |= v >> 1 + v |= v >> 2 + v |= v >> 4 + v |= v >> 8 + v |= v >> 16 + v++ + + return v +} + +const ( + cacheFrom = 16 + cacheToAndHigher = 1024 + cacheFromIndex = 15 + cacheToAndHigherIndex = 1023 +) + +var ( + segments0 = []int{0} + segments1 = []int{1} + segments2 = []int{2} + segments3 = []int{3} + segments4 = []int{4} +) + +var segmentsByRuneLength [5][]int = [5][]int{ + 0: segments0, + 1: segments1, + 2: segments2, + 3: segments3, + 4: segments4, +} + +func init() { + for i := cacheToAndHigher; i >= cacheFrom; i >>= 1 { + func(i int) { + segmentsPools[i-1] = sync.Pool{New: func() interface{} { + return make([]int, 0, i) + }} + }(i) + } +} + +func getTableIndex(c int) int { + p := toPowerOfTwo(c) + switch { + case p >= cacheToAndHigher: + return cacheToAndHigherIndex + case p <= cacheFrom: + return cacheFromIndex + default: + return p - 1 + } +} + +func acquireSegments(c int) []int { + // make []int with less capacity than cacheFrom + // is faster than acquiring it from pool + if c < cacheFrom { + return make([]int, 0, c) + } + + return segmentsPools[getTableIndex(c)].Get().([]int)[:0] +} + +func releaseSegments(s []int) { + c := cap(s) + + // make []int with less capacity than cacheFrom + // is faster than acquiring it from pool + if c < cacheFrom { + return + } + + segmentsPools[getTableIndex(c)].Put(s) +} diff --git a/vendor/github.com/gobwas/glob/match/segments_test.go b/vendor/github.com/gobwas/glob/match/segments_test.go new file mode 100644 index 00000000..1ce1123d --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/segments_test.go @@ -0,0 +1,83 @@ +package match + +import ( + "sync" + "testing" +) + +func benchPool(i int, b *testing.B) { + pool := sync.Pool{New: func() interface{} { + return make([]int, 0, i) + }} + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + s := pool.Get().([]int)[:0] + pool.Put(s) + } + }) +} + +func benchMake(i int, b *testing.B) { + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + _ = make([]int, 0, i) + } + }) +} + +func BenchmarkSegmentsPool_1(b *testing.B) { + benchPool(1, b) +} +func BenchmarkSegmentsPool_2(b *testing.B) { + benchPool(2, b) +} +func BenchmarkSegmentsPool_4(b *testing.B) { + benchPool(4, b) +} +func BenchmarkSegmentsPool_8(b *testing.B) { + benchPool(8, b) +} +func BenchmarkSegmentsPool_16(b *testing.B) { + benchPool(16, b) +} +func BenchmarkSegmentsPool_32(b *testing.B) { + benchPool(32, b) +} +func BenchmarkSegmentsPool_64(b *testing.B) { + benchPool(64, b) +} +func BenchmarkSegmentsPool_128(b *testing.B) { + benchPool(128, b) +} +func BenchmarkSegmentsPool_256(b *testing.B) { + benchPool(256, b) +} + +func BenchmarkSegmentsMake_1(b *testing.B) { + benchMake(1, b) +} +func BenchmarkSegmentsMake_2(b *testing.B) { + benchMake(2, b) +} +func BenchmarkSegmentsMake_4(b *testing.B) { + benchMake(4, b) +} +func BenchmarkSegmentsMake_8(b *testing.B) { + benchMake(8, b) +} +func BenchmarkSegmentsMake_16(b *testing.B) { + benchMake(16, b) +} +func BenchmarkSegmentsMake_32(b *testing.B) { + benchMake(32, b) +} +func BenchmarkSegmentsMake_64(b *testing.B) { + benchMake(64, b) +} +func BenchmarkSegmentsMake_128(b *testing.B) { + benchMake(128, b) +} +func BenchmarkSegmentsMake_256(b *testing.B) { + benchMake(256, b) +} diff --git a/vendor/github.com/gobwas/glob/match/single.go b/vendor/github.com/gobwas/glob/match/single.go new file mode 100644 index 00000000..ee6e3954 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/single.go @@ -0,0 +1,43 @@ +package match + +import ( + "fmt" + "github.com/gobwas/glob/util/runes" + "unicode/utf8" +) + +// single represents ? +type Single struct { + Separators []rune +} + +func NewSingle(s []rune) Single { + return Single{s} +} + +func (self Single) Match(s string) bool { + r, w := utf8.DecodeRuneInString(s) + if len(s) > w { + return false + } + + return runes.IndexRune(self.Separators, r) == -1 +} + +func (self Single) Len() int { + return lenOne +} + +func (self Single) Index(s string) (int, []int) { + for i, r := range s { + if runes.IndexRune(self.Separators, r) == -1 { + return i, segmentsByRuneLength[utf8.RuneLen(r)] + } + } + + return -1, nil +} + +func (self Single) String() string { + return fmt.Sprintf("", string(self.Separators)) +} diff --git a/vendor/github.com/gobwas/glob/match/single_test.go b/vendor/github.com/gobwas/glob/match/single_test.go new file mode 100644 index 00000000..a62d7204 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/single_test.go @@ -0,0 +1,57 @@ +package match + +import ( + "reflect" + "testing" +) + +func TestSingleIndex(t *testing.T) { + for id, test := range []struct { + separators []rune + fixture string + index int + segments []int + }{ + { + []rune{'.'}, + ".abc", + 1, + []int{1}, + }, + { + []rune{'.'}, + ".", + -1, + nil, + }, + } { + p := NewSingle(test.separators) + index, segments := p.Index(test.fixture) + if index != test.index { + t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index) + } + if !reflect.DeepEqual(segments, test.segments) { + t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments) + } + } +} + +func BenchmarkIndexSingle(b *testing.B) { + m := NewSingle(bench_separators) + + for i := 0; i < b.N; i++ { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } +} + +func BenchmarkIndexSingleParallel(b *testing.B) { + m := NewSingle(bench_separators) + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } + }) +} diff --git a/vendor/github.com/gobwas/glob/match/suffix.go b/vendor/github.com/gobwas/glob/match/suffix.go new file mode 100644 index 00000000..85bea8c6 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/suffix.go @@ -0,0 +1,35 @@ +package match + +import ( + "fmt" + "strings" +) + +type Suffix struct { + Suffix string +} + +func NewSuffix(s string) Suffix { + return Suffix{s} +} + +func (self Suffix) Len() int { + return lenNo +} + +func (self Suffix) Match(s string) bool { + return strings.HasSuffix(s, self.Suffix) +} + +func (self Suffix) Index(s string) (int, []int) { + idx := strings.Index(s, self.Suffix) + if idx == -1 { + return -1, nil + } + + return 0, []int{idx + len(self.Suffix)} +} + +func (self Suffix) String() string { + return fmt.Sprintf("", self.Suffix) +} diff --git a/vendor/github.com/gobwas/glob/match/suffix_any.go b/vendor/github.com/gobwas/glob/match/suffix_any.go new file mode 100644 index 00000000..c5106f81 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/suffix_any.go @@ -0,0 +1,43 @@ +package match + +import ( + "fmt" + "strings" + + sutil "github.com/gobwas/glob/util/strings" +) + +type SuffixAny struct { + Suffix string + Separators []rune +} + +func NewSuffixAny(s string, sep []rune) SuffixAny { + return SuffixAny{s, sep} +} + +func (self SuffixAny) Index(s string) (int, []int) { + idx := strings.Index(s, self.Suffix) + if idx == -1 { + return -1, nil + } + + i := sutil.LastIndexAnyRunes(s[:idx], self.Separators) + 1 + + return i, []int{idx + len(self.Suffix) - i} +} + +func (self SuffixAny) Len() int { + return lenNo +} + +func (self SuffixAny) Match(s string) bool { + if !strings.HasSuffix(s, self.Suffix) { + return false + } + return sutil.IndexAnyRunes(s[:len(s)-len(self.Suffix)], self.Separators) == -1 +} + +func (self SuffixAny) String() string { + return fmt.Sprintf("", string(self.Separators), self.Suffix) +} diff --git a/vendor/github.com/gobwas/glob/match/suffix_any_test.go b/vendor/github.com/gobwas/glob/match/suffix_any_test.go new file mode 100644 index 00000000..eed6e596 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/suffix_any_test.go @@ -0,0 +1,47 @@ +package match + +import ( + "reflect" + "testing" +) + +func TestSuffixAnyIndex(t *testing.T) { + for id, test := range []struct { + suffix string + separators []rune + fixture string + index int + segments []int + }{ + { + "ab", + []rune{'.'}, + "ab", + 0, + []int{2}, + }, + { + "ab", + []rune{'.'}, + "cab", + 0, + []int{3}, + }, + { + "ab", + []rune{'.'}, + "qw.cdab.efg", + 3, + []int{4}, + }, + } { + p := NewSuffixAny(test.suffix, test.separators) + index, segments := p.Index(test.fixture) + if index != test.index { + t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index) + } + if !reflect.DeepEqual(segments, test.segments) { + t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments) + } + } +} diff --git a/vendor/github.com/gobwas/glob/match/suffix_test.go b/vendor/github.com/gobwas/glob/match/suffix_test.go new file mode 100644 index 00000000..49047634 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/suffix_test.go @@ -0,0 +1,57 @@ +package match + +import ( + "reflect" + "testing" +) + +func TestSuffixIndex(t *testing.T) { + for id, test := range []struct { + prefix string + fixture string + index int + segments []int + }{ + { + "ab", + "abc", + 0, + []int{2}, + }, + { + "ab", + "fffabfff", + 0, + []int{5}, + }, + } { + p := NewSuffix(test.prefix) + index, segments := p.Index(test.fixture) + if index != test.index { + t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index) + } + if !reflect.DeepEqual(segments, test.segments) { + t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments) + } + } +} + +func BenchmarkIndexSuffix(b *testing.B) { + m := NewSuffix("qwe") + + for i := 0; i < b.N; i++ { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } +} + +func BenchmarkIndexSuffixParallel(b *testing.B) { + m := NewSuffix("qwe") + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } + }) +} diff --git a/vendor/github.com/gobwas/glob/match/super.go b/vendor/github.com/gobwas/glob/match/super.go new file mode 100644 index 00000000..3875950b --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/super.go @@ -0,0 +1,33 @@ +package match + +import ( + "fmt" +) + +type Super struct{} + +func NewSuper() Super { + return Super{} +} + +func (self Super) Match(s string) bool { + return true +} + +func (self Super) Len() int { + return lenNo +} + +func (self Super) Index(s string) (int, []int) { + segments := acquireSegments(len(s) + 1) + for i := range s { + segments = append(segments, i) + } + segments = append(segments, len(s)) + + return 0, segments +} + +func (self Super) String() string { + return fmt.Sprintf("") +} diff --git a/vendor/github.com/gobwas/glob/match/super_test.go b/vendor/github.com/gobwas/glob/match/super_test.go new file mode 100644 index 00000000..10418dc2 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/super_test.go @@ -0,0 +1,54 @@ +package match + +import ( + "reflect" + "testing" +) + +func TestSuperIndex(t *testing.T) { + for id, test := range []struct { + fixture string + index int + segments []int + }{ + { + "abc", + 0, + []int{0, 1, 2, 3}, + }, + { + "", + 0, + []int{0}, + }, + } { + p := NewSuper() + index, segments := p.Index(test.fixture) + if index != test.index { + t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index) + } + if !reflect.DeepEqual(segments, test.segments) { + t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments) + } + } +} + +func BenchmarkIndexSuper(b *testing.B) { + m := NewSuper() + + for i := 0; i < b.N; i++ { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } +} + +func BenchmarkIndexSuperParallel(b *testing.B) { + m := NewSuper() + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } + }) +} diff --git a/vendor/github.com/gobwas/glob/match/text.go b/vendor/github.com/gobwas/glob/match/text.go new file mode 100644 index 00000000..0a17616d --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/text.go @@ -0,0 +1,45 @@ +package match + +import ( + "fmt" + "strings" + "unicode/utf8" +) + +// raw represents raw string to match +type Text struct { + Str string + RunesLength int + BytesLength int + Segments []int +} + +func NewText(s string) Text { + return Text{ + Str: s, + RunesLength: utf8.RuneCountInString(s), + BytesLength: len(s), + Segments: []int{len(s)}, + } +} + +func (self Text) Match(s string) bool { + return self.Str == s +} + +func (self Text) Len() int { + return self.RunesLength +} + +func (self Text) Index(s string) (int, []int) { + index := strings.Index(s, self.Str) + if index == -1 { + return -1, nil + } + + return index, self.Segments +} + +func (self Text) String() string { + return fmt.Sprintf("", self.Str) +} diff --git a/vendor/github.com/gobwas/glob/match/text_test.go b/vendor/github.com/gobwas/glob/match/text_test.go new file mode 100644 index 00000000..a3de40ea --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/text_test.go @@ -0,0 +1,57 @@ +package match + +import ( + "reflect" + "testing" +) + +func TestTextIndex(t *testing.T) { + for id, test := range []struct { + text string + fixture string + index int + segments []int + }{ + { + "b", + "abc", + 1, + []int{1}, + }, + { + "f", + "abcd", + -1, + nil, + }, + } { + m := NewText(test.text) + index, segments := m.Index(test.fixture) + if index != test.index { + t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index) + } + if !reflect.DeepEqual(segments, test.segments) { + t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments) + } + } +} + +func BenchmarkIndexText(b *testing.B) { + m := NewText("foo") + + for i := 0; i < b.N; i++ { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } +} + +func BenchmarkIndexTextParallel(b *testing.B) { + m := NewText("foo") + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + _, s := m.Index(bench_pattern) + releaseSegments(s) + } + }) +} diff --git a/vendor/github.com/gobwas/glob/readme.md b/vendor/github.com/gobwas/glob/readme.md new file mode 100644 index 00000000..f58144e7 --- /dev/null +++ b/vendor/github.com/gobwas/glob/readme.md @@ -0,0 +1,148 @@ +# glob.[go](https://golang.org) + +[![GoDoc][godoc-image]][godoc-url] [![Build Status][travis-image]][travis-url] + +> Go Globbing Library. + +## Install + +```shell + go get github.com/gobwas/glob +``` + +## Example + +```go + +package main + +import "github.com/gobwas/glob" + +func main() { + var g glob.Glob + + // create simple glob + g = glob.MustCompile("*.github.com") + g.Match("api.github.com") // true + + // quote meta characters and then create simple glob + g = glob.MustCompile(glob.QuoteMeta("*.github.com")) + g.Match("*.github.com") // true + + // create new glob with set of delimiters as ["."] + g = glob.MustCompile("api.*.com", '.') + g.Match("api.github.com") // true + g.Match("api.gi.hub.com") // false + + // create new glob with set of delimiters as ["."] + // but now with super wildcard + g = glob.MustCompile("api.**.com", '.') + g.Match("api.github.com") // true + g.Match("api.gi.hub.com") // true + + // create glob with single symbol wildcard + g = glob.MustCompile("?at") + g.Match("cat") // true + g.Match("fat") // true + g.Match("at") // false + + // create glob with single symbol wildcard and delimiters ['f'] + g = glob.MustCompile("?at", 'f') + g.Match("cat") // true + g.Match("fat") // false + g.Match("at") // false + + // create glob with character-list matchers + g = glob.MustCompile("[abc]at") + g.Match("cat") // true + g.Match("bat") // true + g.Match("fat") // false + g.Match("at") // false + + // create glob with character-list matchers + g = glob.MustCompile("[!abc]at") + g.Match("cat") // false + g.Match("bat") // false + g.Match("fat") // true + g.Match("at") // false + + // create glob with character-range matchers + g = glob.MustCompile("[a-c]at") + g.Match("cat") // true + g.Match("bat") // true + g.Match("fat") // false + g.Match("at") // false + + // create glob with character-range matchers + g = glob.MustCompile("[!a-c]at") + g.Match("cat") // false + g.Match("bat") // false + g.Match("fat") // true + g.Match("at") // false + + // create glob with pattern-alternatives list + g = glob.MustCompile("{cat,bat,[fr]at}") + g.Match("cat") // true + g.Match("bat") // true + g.Match("fat") // true + g.Match("rat") // true + g.Match("at") // false + g.Match("zat") // false +} + +``` + +## Performance + +This library is created for compile-once patterns. This means, that compilation could take time, but +strings matching is done faster, than in case when always parsing template. + +If you will not use compiled `glob.Glob` object, and do `g := glob.MustCompile(pattern); g.Match(...)` every time, then your code will be much more slower. + +Run `go test -bench=.` from source root to see the benchmarks: + +Pattern | Fixture | Match | Speed (ns/op) +--------|---------|-------|-------------- +`[a-z][!a-x]*cat*[h][!b]*eyes*` | `my cat has very bright eyes` | `true` | 432 +`[a-z][!a-x]*cat*[h][!b]*eyes*` | `my dog has very bright eyes` | `false` | 199 +`https://*.google.*` | `https://account.google.com` | `true` | 96 +`https://*.google.*` | `https://google.com` | `false` | 66 +`{https://*.google.*,*yandex.*,*yahoo.*,*mail.ru}` | `http://yahoo.com` | `true` | 163 +`{https://*.google.*,*yandex.*,*yahoo.*,*mail.ru}` | `http://google.com` | `false` | 197 +`{https://*gobwas.com,http://exclude.gobwas.com}` | `https://safe.gobwas.com` | `true` | 22 +`{https://*gobwas.com,http://exclude.gobwas.com}` | `http://safe.gobwas.com` | `false` | 24 +`abc*` | `abcdef` | `true` | 8.15 +`abc*` | `af` | `false` | 5.68 +`*def` | `abcdef` | `true` | 8.84 +`*def` | `af` | `false` | 5.74 +`ab*ef` | `abcdef` | `true` | 15.2 +`ab*ef` | `af` | `false` | 10.4 + +The same things with `regexp` package: + +Pattern | Fixture | Match | Speed (ns/op) +--------|---------|-------|-------------- +`^[a-z][^a-x].*cat.*[h][^b].*eyes.*$` | `my cat has very bright eyes` | `true` | 2553 +`^[a-z][^a-x].*cat.*[h][^b].*eyes.*$` | `my dog has very bright eyes` | `false` | 1383 +`^https:\/\/.*\.google\..*$` | `https://account.google.com` | `true` | 1205 +`^https:\/\/.*\.google\..*$` | `https://google.com` | `false` | 767 +`^(https:\/\/.*\.google\..*|.*yandex\..*|.*yahoo\..*|.*mail\.ru)$` | `http://yahoo.com` | `true` | 1435 +`^(https:\/\/.*\.google\..*|.*yandex\..*|.*yahoo\..*|.*mail\.ru)$` | `http://google.com` | `false` | 1674 +`^(https:\/\/.*gobwas\.com|http://exclude.gobwas.com)$` | `https://safe.gobwas.com` | `true` | 1039 +`^(https:\/\/.*gobwas\.com|http://exclude.gobwas.com)$` | `http://safe.gobwas.com` | `false` | 272 +`^abc.*$` | `abcdef` | `true` | 237 +`^abc.*$` | `af` | `false` | 100 +`^.*def$` | `abcdef` | `true` | 464 +`^.*def$` | `af` | `false` | 265 +`^ab.*ef$` | `abcdef` | `true` | 375 +`^ab.*ef$` | `af` | `false` | 145 + +[godoc-image]: https://godoc.org/github.com/gobwas/glob?status.svg +[godoc-url]: https://godoc.org/github.com/gobwas/glob +[travis-image]: https://travis-ci.org/gobwas/glob.svg?branch=master +[travis-url]: https://travis-ci.org/gobwas/glob + +## Syntax + +Syntax is inspired by [standard wildcards](http://tldp.org/LDP/GNU-Linux-Tools-Summary/html/x11655.htm), +except that `**` is aka super-asterisk, that do not sensitive for separators. \ No newline at end of file diff --git a/vendor/github.com/gobwas/glob/syntax/ast/ast.go b/vendor/github.com/gobwas/glob/syntax/ast/ast.go new file mode 100644 index 00000000..3220a694 --- /dev/null +++ b/vendor/github.com/gobwas/glob/syntax/ast/ast.go @@ -0,0 +1,122 @@ +package ast + +import ( + "bytes" + "fmt" +) + +type Node struct { + Parent *Node + Children []*Node + Value interface{} + Kind Kind +} + +func NewNode(k Kind, v interface{}, ch ...*Node) *Node { + n := &Node{ + Kind: k, + Value: v, + } + for _, c := range ch { + Insert(n, c) + } + return n +} + +func (a *Node) Equal(b *Node) bool { + if a.Kind != b.Kind { + return false + } + if a.Value != b.Value { + return false + } + if len(a.Children) != len(b.Children) { + return false + } + for i, c := range a.Children { + if !c.Equal(b.Children[i]) { + return false + } + } + return true +} + +func (a *Node) String() string { + var buf bytes.Buffer + buf.WriteString(a.Kind.String()) + if a.Value != nil { + buf.WriteString(" =") + buf.WriteString(fmt.Sprintf("%v", a.Value)) + } + if len(a.Children) > 0 { + buf.WriteString(" [") + for i, c := range a.Children { + if i > 0 { + buf.WriteString(", ") + } + buf.WriteString(c.String()) + } + buf.WriteString("]") + } + return buf.String() +} + +func Insert(parent *Node, children ...*Node) { + parent.Children = append(parent.Children, children...) + for _, ch := range children { + ch.Parent = parent + } +} + +type List struct { + Not bool + Chars string +} + +type Range struct { + Not bool + Lo, Hi rune +} + +type Text struct { + Text string +} + +type Kind int + +const ( + KindNothing Kind = iota + KindPattern + KindList + KindRange + KindText + KindAny + KindSuper + KindSingle + KindAnyOf +) + +func (k Kind) String() string { + switch k { + case KindNothing: + return "Nothing" + case KindPattern: + return "Pattern" + case KindList: + return "List" + case KindRange: + return "Range" + case KindText: + return "Text" + case KindAny: + return "Any" + case KindSuper: + return "Super" + case KindSingle: + return "Single" + case KindAnyOf: + return "AnyOf" + default: + return "" + } +} diff --git a/vendor/github.com/gobwas/glob/syntax/ast/parser.go b/vendor/github.com/gobwas/glob/syntax/ast/parser.go new file mode 100644 index 00000000..429b4094 --- /dev/null +++ b/vendor/github.com/gobwas/glob/syntax/ast/parser.go @@ -0,0 +1,157 @@ +package ast + +import ( + "errors" + "fmt" + "github.com/gobwas/glob/syntax/lexer" + "unicode/utf8" +) + +type Lexer interface { + Next() lexer.Token +} + +type parseFn func(*Node, Lexer) (parseFn, *Node, error) + +func Parse(lexer Lexer) (*Node, error) { + var parser parseFn + + root := NewNode(KindPattern, nil) + + var ( + tree *Node + err error + ) + for parser, tree = parserMain, root; parser != nil; { + parser, tree, err = parser(tree, lexer) + if err != nil { + return nil, err + } + } + + return root, nil +} + +func parserMain(tree *Node, lex Lexer) (parseFn, *Node, error) { + for { + token := lex.Next() + switch token.Type { + case lexer.EOF: + return nil, tree, nil + + case lexer.Error: + return nil, tree, errors.New(token.Raw) + + case lexer.Text: + Insert(tree, NewNode(KindText, Text{token.Raw})) + return parserMain, tree, nil + + case lexer.Any: + Insert(tree, NewNode(KindAny, nil)) + return parserMain, tree, nil + + case lexer.Super: + Insert(tree, NewNode(KindSuper, nil)) + return parserMain, tree, nil + + case lexer.Single: + Insert(tree, NewNode(KindSingle, nil)) + return parserMain, tree, nil + + case lexer.RangeOpen: + return parserRange, tree, nil + + case lexer.TermsOpen: + a := NewNode(KindAnyOf, nil) + Insert(tree, a) + + p := NewNode(KindPattern, nil) + Insert(a, p) + + return parserMain, p, nil + + case lexer.Separator: + p := NewNode(KindPattern, nil) + Insert(tree.Parent, p) + + return parserMain, p, nil + + case lexer.TermsClose: + return parserMain, tree.Parent.Parent, nil + + default: + return nil, tree, fmt.Errorf("unexpected token: %s", token) + } + } + return nil, tree, fmt.Errorf("unknown error") +} + +func parserRange(tree *Node, lex Lexer) (parseFn, *Node, error) { + var ( + not bool + lo rune + hi rune + chars string + ) + for { + token := lex.Next() + switch token.Type { + case lexer.EOF: + return nil, tree, errors.New("unexpected end") + + case lexer.Error: + return nil, tree, errors.New(token.Raw) + + case lexer.Not: + not = true + + case lexer.RangeLo: + r, w := utf8.DecodeRuneInString(token.Raw) + if len(token.Raw) > w { + return nil, tree, fmt.Errorf("unexpected length of lo character") + } + lo = r + + case lexer.RangeBetween: + // + + case lexer.RangeHi: + r, w := utf8.DecodeRuneInString(token.Raw) + if len(token.Raw) > w { + return nil, tree, fmt.Errorf("unexpected length of lo character") + } + + hi = r + + if hi < lo { + return nil, tree, fmt.Errorf("hi character '%s' should be greater than lo '%s'", string(hi), string(lo)) + } + + case lexer.Text: + chars = token.Raw + + case lexer.RangeClose: + isRange := lo != 0 && hi != 0 + isChars := chars != "" + + if isChars == isRange { + return nil, tree, fmt.Errorf("could not parse range") + } + + if isRange { + Insert(tree, NewNode(KindRange, Range{ + Lo: lo, + Hi: hi, + Not: not, + })) + } else { + Insert(tree, NewNode(KindList, List{ + Chars: chars, + Not: not, + })) + } + + return parserMain, tree, nil + } + } +} diff --git a/vendor/github.com/gobwas/glob/syntax/ast/parser_test.go b/vendor/github.com/gobwas/glob/syntax/ast/parser_test.go new file mode 100644 index 00000000..a469d38c --- /dev/null +++ b/vendor/github.com/gobwas/glob/syntax/ast/parser_test.go @@ -0,0 +1,218 @@ +package ast + +import ( + "reflect" + "testing" + + "github.com/gobwas/glob/syntax/lexer" +) + +type stubLexer struct { + tokens []lexer.Token + pos int +} + +func (s *stubLexer) Next() (ret lexer.Token) { + if s.pos == len(s.tokens) { + return lexer.Token{lexer.EOF, ""} + } + ret = s.tokens[s.pos] + s.pos++ + return +} + +func TestParseString(t *testing.T) { + for id, test := range []struct { + tokens []lexer.Token + tree *Node + }{ + { + //pattern: "abc", + tokens: []lexer.Token{ + {lexer.Text, "abc"}, + {lexer.EOF, ""}, + }, + tree: NewNode(KindPattern, nil, + NewNode(KindText, Text{Text: "abc"}), + ), + }, + { + //pattern: "a*c", + tokens: []lexer.Token{ + {lexer.Text, "a"}, + {lexer.Any, "*"}, + {lexer.Text, "c"}, + {lexer.EOF, ""}, + }, + tree: NewNode(KindPattern, nil, + NewNode(KindText, Text{Text: "a"}), + NewNode(KindAny, nil), + NewNode(KindText, Text{Text: "c"}), + ), + }, + { + //pattern: "a**c", + tokens: []lexer.Token{ + {lexer.Text, "a"}, + {lexer.Super, "**"}, + {lexer.Text, "c"}, + {lexer.EOF, ""}, + }, + tree: NewNode(KindPattern, nil, + NewNode(KindText, Text{Text: "a"}), + NewNode(KindSuper, nil), + NewNode(KindText, Text{Text: "c"}), + ), + }, + { + //pattern: "a?c", + tokens: []lexer.Token{ + {lexer.Text, "a"}, + {lexer.Single, "?"}, + {lexer.Text, "c"}, + {lexer.EOF, ""}, + }, + tree: NewNode(KindPattern, nil, + NewNode(KindText, Text{Text: "a"}), + NewNode(KindSingle, nil), + NewNode(KindText, Text{Text: "c"}), + ), + }, + { + //pattern: "[!a-z]", + tokens: []lexer.Token{ + {lexer.RangeOpen, "["}, + {lexer.Not, "!"}, + {lexer.RangeLo, "a"}, + {lexer.RangeBetween, "-"}, + {lexer.RangeHi, "z"}, + {lexer.RangeClose, "]"}, + {lexer.EOF, ""}, + }, + tree: NewNode(KindPattern, nil, + NewNode(KindRange, Range{Lo: 'a', Hi: 'z', Not: true}), + ), + }, + { + //pattern: "[az]", + tokens: []lexer.Token{ + {lexer.RangeOpen, "["}, + {lexer.Text, "az"}, + {lexer.RangeClose, "]"}, + {lexer.EOF, ""}, + }, + tree: NewNode(KindPattern, nil, + NewNode(KindList, List{Chars: "az"}), + ), + }, + { + //pattern: "{a,z}", + tokens: []lexer.Token{ + {lexer.TermsOpen, "{"}, + {lexer.Text, "a"}, + {lexer.Separator, ","}, + {lexer.Text, "z"}, + {lexer.TermsClose, "}"}, + {lexer.EOF, ""}, + }, + tree: NewNode(KindPattern, nil, + NewNode(KindAnyOf, nil, + NewNode(KindPattern, nil, + NewNode(KindText, Text{Text: "a"}), + ), + NewNode(KindPattern, nil, + NewNode(KindText, Text{Text: "z"}), + ), + ), + ), + }, + { + //pattern: "/{z,ab}*", + tokens: []lexer.Token{ + {lexer.Text, "/"}, + {lexer.TermsOpen, "{"}, + {lexer.Text, "z"}, + {lexer.Separator, ","}, + {lexer.Text, "ab"}, + {lexer.TermsClose, "}"}, + {lexer.Any, "*"}, + {lexer.EOF, ""}, + }, + tree: NewNode(KindPattern, nil, + NewNode(KindText, Text{Text: "/"}), + NewNode(KindAnyOf, nil, + NewNode(KindPattern, nil, + NewNode(KindText, Text{Text: "z"}), + ), + NewNode(KindPattern, nil, + NewNode(KindText, Text{Text: "ab"}), + ), + ), + NewNode(KindAny, nil), + ), + }, + { + //pattern: "{a,{x,y},?,[a-z],[!qwe]}", + tokens: []lexer.Token{ + {lexer.TermsOpen, "{"}, + {lexer.Text, "a"}, + {lexer.Separator, ","}, + {lexer.TermsOpen, "{"}, + {lexer.Text, "x"}, + {lexer.Separator, ","}, + {lexer.Text, "y"}, + {lexer.TermsClose, "}"}, + {lexer.Separator, ","}, + {lexer.Single, "?"}, + {lexer.Separator, ","}, + {lexer.RangeOpen, "["}, + {lexer.RangeLo, "a"}, + {lexer.RangeBetween, "-"}, + {lexer.RangeHi, "z"}, + {lexer.RangeClose, "]"}, + {lexer.Separator, ","}, + {lexer.RangeOpen, "["}, + {lexer.Not, "!"}, + {lexer.Text, "qwe"}, + {lexer.RangeClose, "]"}, + {lexer.TermsClose, "}"}, + {lexer.EOF, ""}, + }, + tree: NewNode(KindPattern, nil, + NewNode(KindAnyOf, nil, + NewNode(KindPattern, nil, + NewNode(KindText, Text{Text: "a"}), + ), + NewNode(KindPattern, nil, + NewNode(KindAnyOf, nil, + NewNode(KindPattern, nil, + NewNode(KindText, Text{Text: "x"}), + ), + NewNode(KindPattern, nil, + NewNode(KindText, Text{Text: "y"}), + ), + ), + ), + NewNode(KindPattern, nil, + NewNode(KindSingle, nil), + ), + NewNode(KindPattern, nil, + NewNode(KindRange, Range{Lo: 'a', Hi: 'z', Not: false}), + ), + NewNode(KindPattern, nil, + NewNode(KindList, List{Chars: "qwe", Not: true}), + ), + ), + ), + }, + } { + lexer := &stubLexer{tokens: test.tokens} + result, err := Parse(lexer) + if err != nil { + t.Errorf("[%d] unexpected error: %s", id, err) + } + if !reflect.DeepEqual(test.tree, result) { + t.Errorf("[%d] Parse():\nact:\t%s\nexp:\t%s\n", id, result, test.tree) + } + } +} diff --git a/vendor/github.com/gobwas/glob/syntax/lexer/lexer.go b/vendor/github.com/gobwas/glob/syntax/lexer/lexer.go new file mode 100644 index 00000000..a1c8d196 --- /dev/null +++ b/vendor/github.com/gobwas/glob/syntax/lexer/lexer.go @@ -0,0 +1,273 @@ +package lexer + +import ( + "bytes" + "fmt" + "github.com/gobwas/glob/util/runes" + "unicode/utf8" +) + +const ( + char_any = '*' + char_comma = ',' + char_single = '?' + char_escape = '\\' + char_range_open = '[' + char_range_close = ']' + char_terms_open = '{' + char_terms_close = '}' + char_range_not = '!' + char_range_between = '-' +) + +var specials = []byte{ + char_any, + char_single, + char_escape, + char_range_open, + char_range_close, + char_terms_open, + char_terms_close, +} + +func Special(c byte) bool { + return bytes.IndexByte(specials, c) != -1 +} + +type tokens []Token + +func (i *tokens) shift() (ret Token) { + ret = (*i)[0] + copy(*i, (*i)[1:]) + *i = (*i)[:len(*i)-1] + return +} + +func (i *tokens) push(v Token) { + *i = append(*i, v) +} + +func (i *tokens) empty() bool { + return len(*i) == 0 +} + +var eof rune = 0 + +type lexer struct { + data string + pos int + err error + + tokens tokens + termsLevel int + + lastRune rune + lastRuneSize int + hasRune bool +} + +func NewLexer(source string) *lexer { + l := &lexer{ + data: source, + tokens: tokens(make([]Token, 0, 4)), + } + return l +} + +func (l *lexer) Next() Token { + if l.err != nil { + return Token{Error, l.err.Error()} + } + if !l.tokens.empty() { + return l.tokens.shift() + } + + l.fetchItem() + return l.Next() +} + +func (l *lexer) peek() (r rune, w int) { + if l.pos == len(l.data) { + return eof, 0 + } + + r, w = utf8.DecodeRuneInString(l.data[l.pos:]) + if r == utf8.RuneError { + l.errorf("could not read rune") + r = eof + w = 0 + } + + return +} + +func (l *lexer) read() rune { + if l.hasRune { + l.hasRune = false + l.seek(l.lastRuneSize) + return l.lastRune + } + + r, s := l.peek() + l.seek(s) + + l.lastRune = r + l.lastRuneSize = s + + return r +} + +func (l *lexer) seek(w int) { + l.pos += w +} + +func (l *lexer) unread() { + if l.hasRune { + l.errorf("could not unread rune") + return + } + l.seek(-l.lastRuneSize) + l.hasRune = true +} + +func (l *lexer) errorf(f string, v ...interface{}) { + l.err = fmt.Errorf(f, v...) +} + +func (l *lexer) inTerms() bool { + return l.termsLevel > 0 +} + +func (l *lexer) termsEnter() { + l.termsLevel++ +} + +func (l *lexer) termsLeave() { + l.termsLevel-- +} + +var inTextBreakers = []rune{char_single, char_any, char_range_open, char_terms_open} +var inTermsBreakers = append(inTextBreakers, char_terms_close, char_comma) + +func (l *lexer) fetchItem() { + r := l.read() + switch { + case r == eof: + l.tokens.push(Token{EOF, ""}) + + case r == char_terms_open: + l.termsEnter() + l.tokens.push(Token{TermsOpen, string(r)}) + + case r == char_comma && l.inTerms(): + l.tokens.push(Token{Separator, string(r)}) + + case r == char_terms_close && l.inTerms(): + l.tokens.push(Token{TermsClose, string(r)}) + l.termsLeave() + + case r == char_range_open: + l.tokens.push(Token{RangeOpen, string(r)}) + l.fetchRange() + + case r == char_single: + l.tokens.push(Token{Single, string(r)}) + + case r == char_any: + if l.read() == char_any { + l.tokens.push(Token{Super, string(r) + string(r)}) + } else { + l.unread() + l.tokens.push(Token{Any, string(r)}) + } + + default: + l.unread() + + var breakers []rune + if l.inTerms() { + breakers = inTermsBreakers + } else { + breakers = inTextBreakers + } + l.fetchText(breakers) + } +} + +func (l *lexer) fetchRange() { + var wantHi bool + var wantClose bool + var seenNot bool + for { + r := l.read() + if r == eof { + l.errorf("unexpected end of input") + return + } + + if wantClose { + if r != char_range_close { + l.errorf("expected close range character") + } else { + l.tokens.push(Token{RangeClose, string(r)}) + } + return + } + + if wantHi { + l.tokens.push(Token{RangeHi, string(r)}) + wantClose = true + continue + } + + if !seenNot && r == char_range_not { + l.tokens.push(Token{Not, string(r)}) + seenNot = true + continue + } + + if n, w := l.peek(); n == char_range_between { + l.seek(w) + l.tokens.push(Token{RangeLo, string(r)}) + l.tokens.push(Token{RangeBetween, string(n)}) + wantHi = true + continue + } + + l.unread() // unread first peek and fetch as text + l.fetchText([]rune{char_range_close}) + wantClose = true + } +} + +func (l *lexer) fetchText(breakers []rune) { + var data []rune + var escaped bool + +reading: + for { + r := l.read() + if r == eof { + break + } + + if !escaped { + if r == char_escape { + escaped = true + continue + } + + if runes.IndexRune(breakers, r) != -1 { + l.unread() + break reading + } + } + + escaped = false + data = append(data, r) + } + + if len(data) > 0 { + l.tokens.push(Token{Text, string(data)}) + } +} diff --git a/vendor/github.com/gobwas/glob/syntax/lexer/lexer_test.go b/vendor/github.com/gobwas/glob/syntax/lexer/lexer_test.go new file mode 100644 index 00000000..ec35f813 --- /dev/null +++ b/vendor/github.com/gobwas/glob/syntax/lexer/lexer_test.go @@ -0,0 +1,192 @@ +package lexer + +import ( + "testing" +) + +func TestLexGood(t *testing.T) { + for id, test := range []struct { + pattern string + items []Token + }{ + { + pattern: "", + items: []Token{ + {EOF, ""}, + }, + }, + { + pattern: "hello", + items: []Token{ + {Text, "hello"}, + {EOF, ""}, + }, + }, + { + pattern: "/{rate,[0-9]]}*", + items: []Token{ + {Text, "/"}, + {TermsOpen, "{"}, + {Text, "rate"}, + {Separator, ","}, + {RangeOpen, "["}, + {RangeLo, "0"}, + {RangeBetween, "-"}, + {RangeHi, "9"}, + {RangeClose, "]"}, + {Text, "]"}, + {TermsClose, "}"}, + {Any, "*"}, + {EOF, ""}, + }, + }, + { + pattern: "hello,world", + items: []Token{ + {Text, "hello,world"}, + {EOF, ""}, + }, + }, + { + pattern: "hello\\,world", + items: []Token{ + {Text, "hello,world"}, + {EOF, ""}, + }, + }, + { + pattern: "hello\\{world", + items: []Token{ + {Text, "hello{world"}, + {EOF, ""}, + }, + }, + { + pattern: "hello?", + items: []Token{ + {Text, "hello"}, + {Single, "?"}, + {EOF, ""}, + }, + }, + { + pattern: "hellof*", + items: []Token{ + {Text, "hellof"}, + {Any, "*"}, + {EOF, ""}, + }, + }, + { + pattern: "hello**", + items: []Token{ + {Text, "hello"}, + {Super, "**"}, + {EOF, ""}, + }, + }, + { + pattern: "[日-語]", + items: []Token{ + {RangeOpen, "["}, + {RangeLo, "日"}, + {RangeBetween, "-"}, + {RangeHi, "語"}, + {RangeClose, "]"}, + {EOF, ""}, + }, + }, + { + pattern: "[!日-語]", + items: []Token{ + {RangeOpen, "["}, + {Not, "!"}, + {RangeLo, "日"}, + {RangeBetween, "-"}, + {RangeHi, "語"}, + {RangeClose, "]"}, + {EOF, ""}, + }, + }, + { + pattern: "[日本語]", + items: []Token{ + {RangeOpen, "["}, + {Text, "日本語"}, + {RangeClose, "]"}, + {EOF, ""}, + }, + }, + { + pattern: "[!日本語]", + items: []Token{ + {RangeOpen, "["}, + {Not, "!"}, + {Text, "日本語"}, + {RangeClose, "]"}, + {EOF, ""}, + }, + }, + { + pattern: "{a,b}", + items: []Token{ + {TermsOpen, "{"}, + {Text, "a"}, + {Separator, ","}, + {Text, "b"}, + {TermsClose, "}"}, + {EOF, ""}, + }, + }, + { + pattern: "/{z,ab}*", + items: []Token{ + {Text, "/"}, + {TermsOpen, "{"}, + {Text, "z"}, + {Separator, ","}, + {Text, "ab"}, + {TermsClose, "}"}, + {Any, "*"}, + {EOF, ""}, + }, + }, + { + pattern: "{[!日-語],*,?,{a,b,\\c}}", + items: []Token{ + {TermsOpen, "{"}, + {RangeOpen, "["}, + {Not, "!"}, + {RangeLo, "日"}, + {RangeBetween, "-"}, + {RangeHi, "語"}, + {RangeClose, "]"}, + {Separator, ","}, + {Any, "*"}, + {Separator, ","}, + {Single, "?"}, + {Separator, ","}, + {TermsOpen, "{"}, + {Text, "a"}, + {Separator, ","}, + {Text, "b"}, + {Separator, ","}, + {Text, "c"}, + {TermsClose, "}"}, + {TermsClose, "}"}, + {EOF, ""}, + }, + }, + } { + lexer := NewLexer(test.pattern) + for i, exp := range test.items { + act := lexer.Next() + if act.Type != exp.Type { + t.Errorf("#%d %q: wrong %d-th item type: exp: %q; act: %q\n\t(%s vs %s)", id, test.pattern, i, exp.Type, act.Type, exp, act) + } + if act.Raw != exp.Raw { + t.Errorf("#%d %q: wrong %d-th item contents: exp: %q; act: %q\n\t(%s vs %s)", id, test.pattern, i, exp.Raw, act.Raw, exp, act) + } + } + } +} diff --git a/vendor/github.com/gobwas/glob/syntax/lexer/token.go b/vendor/github.com/gobwas/glob/syntax/lexer/token.go new file mode 100644 index 00000000..2797c4e8 --- /dev/null +++ b/vendor/github.com/gobwas/glob/syntax/lexer/token.go @@ -0,0 +1,88 @@ +package lexer + +import "fmt" + +type TokenType int + +const ( + EOF TokenType = iota + Error + Text + Char + Any + Super + Single + Not + Separator + RangeOpen + RangeClose + RangeLo + RangeHi + RangeBetween + TermsOpen + TermsClose +) + +func (tt TokenType) String() string { + switch tt { + case EOF: + return "eof" + + case Error: + return "error" + + case Text: + return "text" + + case Char: + return "char" + + case Any: + return "any" + + case Super: + return "super" + + case Single: + return "single" + + case Not: + return "not" + + case Separator: + return "separator" + + case RangeOpen: + return "range_open" + + case RangeClose: + return "range_close" + + case RangeLo: + return "range_lo" + + case RangeHi: + return "range_hi" + + case RangeBetween: + return "range_between" + + case TermsOpen: + return "terms_open" + + case TermsClose: + return "terms_close" + + default: + return "undef" + } +} + +type Token struct { + Type TokenType + Raw string +} + +func (t Token) String() string { + return fmt.Sprintf("%v<%q>", t.Type, t.Raw) +} diff --git a/vendor/github.com/gobwas/glob/syntax/syntax.go b/vendor/github.com/gobwas/glob/syntax/syntax.go new file mode 100644 index 00000000..1d168b14 --- /dev/null +++ b/vendor/github.com/gobwas/glob/syntax/syntax.go @@ -0,0 +1,14 @@ +package syntax + +import ( + "github.com/gobwas/glob/syntax/ast" + "github.com/gobwas/glob/syntax/lexer" +) + +func Parse(s string) (*ast.Node, error) { + return ast.Parse(lexer.NewLexer(s)) +} + +func Special(b byte) bool { + return lexer.Special(b) +} diff --git a/vendor/github.com/gobwas/glob/util/runes/runes.go b/vendor/github.com/gobwas/glob/util/runes/runes.go new file mode 100644 index 00000000..a7235564 --- /dev/null +++ b/vendor/github.com/gobwas/glob/util/runes/runes.go @@ -0,0 +1,154 @@ +package runes + +func Index(s, needle []rune) int { + ls, ln := len(s), len(needle) + + switch { + case ln == 0: + return 0 + case ln == 1: + return IndexRune(s, needle[0]) + case ln == ls: + if Equal(s, needle) { + return 0 + } + return -1 + case ln > ls: + return -1 + } + +head: + for i := 0; i < ls && ls-i >= ln; i++ { + for y := 0; y < ln; y++ { + if s[i+y] != needle[y] { + continue head + } + } + + return i + } + + return -1 +} + +func LastIndex(s, needle []rune) int { + ls, ln := len(s), len(needle) + + switch { + case ln == 0: + if ls == 0 { + return 0 + } + return ls + case ln == 1: + return IndexLastRune(s, needle[0]) + case ln == ls: + if Equal(s, needle) { + return 0 + } + return -1 + case ln > ls: + return -1 + } + +head: + for i := ls - 1; i >= 0 && i >= ln; i-- { + for y := ln - 1; y >= 0; y-- { + if s[i-(ln-y-1)] != needle[y] { + continue head + } + } + + return i - ln + 1 + } + + return -1 +} + +// IndexAny returns the index of the first instance of any Unicode code point +// from chars in s, or -1 if no Unicode code point from chars is present in s. +func IndexAny(s, chars []rune) int { + if len(chars) > 0 { + for i, c := range s { + for _, m := range chars { + if c == m { + return i + } + } + } + } + return -1 +} + +func Contains(s, needle []rune) bool { + return Index(s, needle) >= 0 +} + +func Max(s []rune) (max rune) { + for _, r := range s { + if r > max { + max = r + } + } + + return +} + +func Min(s []rune) rune { + min := rune(-1) + for _, r := range s { + if min == -1 { + min = r + continue + } + + if r < min { + min = r + } + } + + return min +} + +func IndexRune(s []rune, r rune) int { + for i, c := range s { + if c == r { + return i + } + } + return -1 +} + +func IndexLastRune(s []rune, r rune) int { + for i := len(s) - 1; i >= 0; i-- { + if s[i] == r { + return i + } + } + + return -1 +} + +func Equal(a, b []rune) bool { + if len(a) == len(b) { + for i := 0; i < len(a); i++ { + if a[i] != b[i] { + return false + } + } + + return true + } + + return false +} + +// HasPrefix tests whether the string s begins with prefix. +func HasPrefix(s, prefix []rune) bool { + return len(s) >= len(prefix) && Equal(s[0:len(prefix)], prefix) +} + +// HasSuffix tests whether the string s ends with suffix. +func HasSuffix(s, suffix []rune) bool { + return len(s) >= len(suffix) && Equal(s[len(s)-len(suffix):], suffix) +} diff --git a/vendor/github.com/gobwas/glob/util/runes/runes_test.go b/vendor/github.com/gobwas/glob/util/runes/runes_test.go new file mode 100644 index 00000000..54498eb8 --- /dev/null +++ b/vendor/github.com/gobwas/glob/util/runes/runes_test.go @@ -0,0 +1,222 @@ +package runes + +import ( + "strings" + "testing" +) + +type indexTest struct { + s []rune + sep []rune + out int +} + +type equalTest struct { + a []rune + b []rune + out bool +} + +func newIndexTest(s, sep string, out int) indexTest { + return indexTest{[]rune(s), []rune(sep), out} +} +func newEqualTest(s, sep string, out bool) equalTest { + return equalTest{[]rune(s), []rune(sep), out} +} + +var dots = "1....2....3....4" + +var indexTests = []indexTest{ + newIndexTest("", "", 0), + newIndexTest("", "a", -1), + newIndexTest("", "foo", -1), + newIndexTest("fo", "foo", -1), + newIndexTest("foo", "foo", 0), + newIndexTest("oofofoofooo", "f", 2), + newIndexTest("oofofoofooo", "foo", 4), + newIndexTest("barfoobarfoo", "foo", 3), + newIndexTest("foo", "", 0), + newIndexTest("foo", "o", 1), + newIndexTest("abcABCabc", "A", 3), + // cases with one byte strings - test special case in Index() + newIndexTest("", "a", -1), + newIndexTest("x", "a", -1), + newIndexTest("x", "x", 0), + newIndexTest("abc", "a", 0), + newIndexTest("abc", "b", 1), + newIndexTest("abc", "c", 2), + newIndexTest("abc", "x", -1), +} + +var lastIndexTests = []indexTest{ + newIndexTest("", "", 0), + newIndexTest("", "a", -1), + newIndexTest("", "foo", -1), + newIndexTest("fo", "foo", -1), + newIndexTest("foo", "foo", 0), + newIndexTest("foo", "f", 0), + newIndexTest("oofofoofooo", "f", 7), + newIndexTest("oofofoofooo", "foo", 7), + newIndexTest("barfoobarfoo", "foo", 9), + newIndexTest("foo", "", 3), + newIndexTest("foo", "o", 2), + newIndexTest("abcABCabc", "A", 3), + newIndexTest("abcABCabc", "a", 6), +} + +var indexAnyTests = []indexTest{ + newIndexTest("", "", -1), + newIndexTest("", "a", -1), + newIndexTest("", "abc", -1), + newIndexTest("a", "", -1), + newIndexTest("a", "a", 0), + newIndexTest("aaa", "a", 0), + newIndexTest("abc", "xyz", -1), + newIndexTest("abc", "xcz", 2), + newIndexTest("a☺b☻c☹d", "uvw☻xyz", 3), + newIndexTest("aRegExp*", ".(|)*+?^$[]", 7), + newIndexTest(dots+dots+dots, " ", -1), +} + +// Execute f on each test case. funcName should be the name of f; it's used +// in failure reports. +func runIndexTests(t *testing.T, f func(s, sep []rune) int, funcName string, testCases []indexTest) { + for _, test := range testCases { + actual := f(test.s, test.sep) + if actual != test.out { + t.Errorf("%s(%q,%q) = %v; want %v", funcName, test.s, test.sep, actual, test.out) + } + } +} + +func TestIndex(t *testing.T) { runIndexTests(t, Index, "Index", indexTests) } +func TestLastIndex(t *testing.T) { runIndexTests(t, LastIndex, "LastIndex", lastIndexTests) } +func TestIndexAny(t *testing.T) { runIndexTests(t, IndexAny, "IndexAny", indexAnyTests) } + +var equalTests = []equalTest{ + newEqualTest("a", "a", true), + newEqualTest("a", "b", false), + newEqualTest("a☺b☻c☹d", "uvw☻xyz", false), + newEqualTest("a☺b☻c☹d", "a☺b☻c☹d", true), +} + +func TestEqual(t *testing.T) { + for _, test := range equalTests { + actual := Equal(test.a, test.b) + if actual != test.out { + t.Errorf("Equal(%q,%q) = %v; want %v", test.a, test.b, actual, test.out) + } + } +} + +func BenchmarkLastIndexRunes(b *testing.B) { + r := []rune("abcdef") + n := []rune("cd") + + for i := 0; i < b.N; i++ { + LastIndex(r, n) + } +} +func BenchmarkLastIndexStrings(b *testing.B) { + r := "abcdef" + n := "cd" + + for i := 0; i < b.N; i++ { + strings.LastIndex(r, n) + } +} + +func BenchmarkIndexAnyRunes(b *testing.B) { + s := []rune("...b...") + c := []rune("abc") + + for i := 0; i < b.N; i++ { + IndexAny(s, c) + } +} +func BenchmarkIndexAnyStrings(b *testing.B) { + s := "...b..." + c := "abc" + + for i := 0; i < b.N; i++ { + strings.IndexAny(s, c) + } +} + +func BenchmarkIndexRuneRunes(b *testing.B) { + s := []rune("...b...") + r := 'b' + + for i := 0; i < b.N; i++ { + IndexRune(s, r) + } +} +func BenchmarkIndexRuneStrings(b *testing.B) { + s := "...b..." + r := 'b' + + for i := 0; i < b.N; i++ { + strings.IndexRune(s, r) + } +} + +func BenchmarkIndexRunes(b *testing.B) { + r := []rune("abcdef") + n := []rune("cd") + + for i := 0; i < b.N; i++ { + Index(r, n) + } +} +func BenchmarkIndexStrings(b *testing.B) { + r := "abcdef" + n := "cd" + + for i := 0; i < b.N; i++ { + strings.Index(r, n) + } +} + +func BenchmarkEqualRunes(b *testing.B) { + x := []rune("abc") + y := []rune("abc") + + for i := 0; i < b.N; i++ { + if Equal(x, y) { + continue + } + } +} + +func BenchmarkEqualStrings(b *testing.B) { + x := "abc" + y := "abc" + + for i := 0; i < b.N; i++ { + if x == y { + continue + } + } +} + +func BenchmarkNotEqualRunes(b *testing.B) { + x := []rune("abc") + y := []rune("abcd") + + for i := 0; i < b.N; i++ { + if Equal(x, y) { + continue + } + } +} + +func BenchmarkNotEqualStrings(b *testing.B) { + x := "abc" + y := "abcd" + + for i := 0; i < b.N; i++ { + if x == y { + continue + } + } +} diff --git a/vendor/github.com/gobwas/glob/util/strings/strings.go b/vendor/github.com/gobwas/glob/util/strings/strings.go new file mode 100644 index 00000000..e8ee1920 --- /dev/null +++ b/vendor/github.com/gobwas/glob/util/strings/strings.go @@ -0,0 +1,39 @@ +package strings + +import ( + "strings" + "unicode/utf8" +) + +func IndexAnyRunes(s string, rs []rune) int { + for _, r := range rs { + if i := strings.IndexRune(s, r); i != -1 { + return i + } + } + + return -1 +} + +func LastIndexAnyRunes(s string, rs []rune) int { + for _, r := range rs { + i := -1 + if 0 <= r && r < utf8.RuneSelf { + i = strings.LastIndexByte(s, byte(r)) + } else { + sub := s + for len(sub) > 0 { + j := strings.IndexRune(s, r) + if j == -1 { + break + } + i = j + sub = sub[i+1:] + } + } + if i != -1 { + return i + } + } + return -1 +}