Ver Fonte

Add gobwas

Andrea Fazzi há 5 anos atrás
pai
commit
8826fe2d3b
37 ficheiros alterados com 3024 adições e 27 exclusões
  1. 40 0
      Godeps/Godeps.json
  2. 8 0
      vendor/github.com/gobwas/glob/.gitignore
  3. 15 0
      vendor/github.com/gobwas/glob/.travis.yml
  4. 21 0
      vendor/github.com/gobwas/glob/LICENSE
  5. 26 0
      vendor/github.com/gobwas/glob/bench.sh
  6. 525 0
      vendor/github.com/gobwas/glob/compiler/compiler.go
  7. 80 0
      vendor/github.com/gobwas/glob/glob.go
  8. 45 0
      vendor/github.com/gobwas/glob/match/any.go
  9. 82 0
      vendor/github.com/gobwas/glob/match/any_of.go
  10. 185 0
      vendor/github.com/gobwas/glob/match/btree.go
  11. 58 0
      vendor/github.com/gobwas/glob/match/contains.go
  12. 99 0
      vendor/github.com/gobwas/glob/match/every_of.go
  13. 49 0
      vendor/github.com/gobwas/glob/match/list.go
  14. 81 0
      vendor/github.com/gobwas/glob/match/match.go
  15. 49 0
      vendor/github.com/gobwas/glob/match/max.go
  16. 57 0
      vendor/github.com/gobwas/glob/match/min.go
  17. 27 0
      vendor/github.com/gobwas/glob/match/nothing.go
  18. 50 0
      vendor/github.com/gobwas/glob/match/prefix.go
  19. 55 0
      vendor/github.com/gobwas/glob/match/prefix_any.go
  20. 62 0
      vendor/github.com/gobwas/glob/match/prefix_suffix.go
  21. 48 0
      vendor/github.com/gobwas/glob/match/range.go
  22. 77 0
      vendor/github.com/gobwas/glob/match/row.go
  23. 91 0
      vendor/github.com/gobwas/glob/match/segments.go
  24. 43 0
      vendor/github.com/gobwas/glob/match/single.go
  25. 35 0
      vendor/github.com/gobwas/glob/match/suffix.go
  26. 43 0
      vendor/github.com/gobwas/glob/match/suffix_any.go
  27. 33 0
      vendor/github.com/gobwas/glob/match/super.go
  28. 45 0
      vendor/github.com/gobwas/glob/match/text.go
  29. 148 0
      vendor/github.com/gobwas/glob/readme.md
  30. 122 0
      vendor/github.com/gobwas/glob/syntax/ast/ast.go
  31. 157 0
      vendor/github.com/gobwas/glob/syntax/ast/parser.go
  32. 273 0
      vendor/github.com/gobwas/glob/syntax/lexer/lexer.go
  33. 88 0
      vendor/github.com/gobwas/glob/syntax/lexer/token.go
  34. 14 0
      vendor/github.com/gobwas/glob/syntax/syntax.go
  35. 154 0
      vendor/github.com/gobwas/glob/util/runes/runes.go
  36. 39 0
      vendor/github.com/gobwas/glob/util/strings/strings.go
  37. 0 27
      vendor/gogs.carducci-dante.gov.it/karmen/util/template/template.go

+ 40 - 0
Godeps/Godeps.json

@@ -17,6 +17,46 @@
 			"Comment": "v1.3-2-g2e00b5c",
 			"Rev": "2e00b5cd70399450106cec6431c2e2ce3cae5034"
 		},
+		{
+			"ImportPath": "github.com/gobwas/glob",
+			"Comment": "v0.2.3-4-ge7a84e9",
+			"Rev": "e7a84e9525fe90abcda167b604e483cc959ad4aa"
+		},
+		{
+			"ImportPath": "github.com/gobwas/glob/compiler",
+			"Comment": "v0.2.3-4-ge7a84e9",
+			"Rev": "e7a84e9525fe90abcda167b604e483cc959ad4aa"
+		},
+		{
+			"ImportPath": "github.com/gobwas/glob/match",
+			"Comment": "v0.2.3-4-ge7a84e9",
+			"Rev": "e7a84e9525fe90abcda167b604e483cc959ad4aa"
+		},
+		{
+			"ImportPath": "github.com/gobwas/glob/syntax",
+			"Comment": "v0.2.3-4-ge7a84e9",
+			"Rev": "e7a84e9525fe90abcda167b604e483cc959ad4aa"
+		},
+		{
+			"ImportPath": "github.com/gobwas/glob/syntax/ast",
+			"Comment": "v0.2.3-4-ge7a84e9",
+			"Rev": "e7a84e9525fe90abcda167b604e483cc959ad4aa"
+		},
+		{
+			"ImportPath": "github.com/gobwas/glob/syntax/lexer",
+			"Comment": "v0.2.3-4-ge7a84e9",
+			"Rev": "e7a84e9525fe90abcda167b604e483cc959ad4aa"
+		},
+		{
+			"ImportPath": "github.com/gobwas/glob/util/runes",
+			"Comment": "v0.2.3-4-ge7a84e9",
+			"Rev": "e7a84e9525fe90abcda167b604e483cc959ad4aa"
+		},
+		{
+			"ImportPath": "github.com/gobwas/glob/util/strings",
+			"Comment": "v0.2.3-4-ge7a84e9",
+			"Rev": "e7a84e9525fe90abcda167b604e483cc959ad4aa"
+		},
 		{
 			"ImportPath": "github.com/gocarina/gocsv",
 			"Rev": "7099e67763c29f812fa2ed7083f32e38be60125a"

+ 8 - 0
vendor/github.com/gobwas/glob/.gitignore

@@ -0,0 +1,8 @@
+glob.iml
+.idea
+*.cpu
+*.mem
+*.test
+*.dot
+*.png
+*.svg

+ 15 - 0
vendor/github.com/gobwas/glob/.travis.yml

@@ -0,0 +1,15 @@
+language: go
+go:
+  - "1.7.X"
+  - "1.8.X"
+  - "1.9.X"
+  - "1.10.X"
+  - master
+
+matrix:
+  allow_failures:
+    - go: master
+fast_finish: true
+
+script:
+  - go test -v ./...

+ 21 - 0
vendor/github.com/gobwas/glob/LICENSE

@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 Sergey Kamardin
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.

+ 26 - 0
vendor/github.com/gobwas/glob/bench.sh

@@ -0,0 +1,26 @@
+#! /bin/bash
+
+bench() {
+    filename="/tmp/$1-$2.bench"
+    if test -e "${filename}";
+    then
+        echo "Already exists ${filename}"
+    else
+        backup=`git rev-parse --abbrev-ref HEAD`
+        git checkout $1
+        echo -n "Creating ${filename}... "
+        go test ./... -run=NONE -bench=$2 > "${filename}" -benchmem
+        echo "OK"
+        git checkout ${backup}
+        sleep 5
+    fi
+}
+
+
+to=$1
+current=`git rev-parse --abbrev-ref HEAD`
+
+bench ${to} $2
+bench ${current} $2
+
+benchcmp $3 "/tmp/${to}-$2.bench" "/tmp/${current}-$2.bench"

+ 525 - 0
vendor/github.com/gobwas/glob/compiler/compiler.go

@@ -0,0 +1,525 @@
+package compiler
+
+// TODO use constructor with all matchers, and to their structs private
+// TODO glue multiple Text nodes (like after QuoteMeta)
+
+import (
+	"fmt"
+	"reflect"
+
+	"github.com/gobwas/glob/match"
+	"github.com/gobwas/glob/syntax/ast"
+	"github.com/gobwas/glob/util/runes"
+)
+
+func optimizeMatcher(matcher match.Matcher) match.Matcher {
+	switch m := matcher.(type) {
+
+	case match.Any:
+		if len(m.Separators) == 0 {
+			return match.NewSuper()
+		}
+
+	case match.AnyOf:
+		if len(m.Matchers) == 1 {
+			return m.Matchers[0]
+		}
+
+		return m
+
+	case match.List:
+		if m.Not == false && len(m.List) == 1 {
+			return match.NewText(string(m.List))
+		}
+
+		return m
+
+	case match.BTree:
+		m.Left = optimizeMatcher(m.Left)
+		m.Right = optimizeMatcher(m.Right)
+
+		r, ok := m.Value.(match.Text)
+		if !ok {
+			return m
+		}
+
+		var (
+			leftNil  = m.Left == nil
+			rightNil = m.Right == nil
+		)
+		if leftNil && rightNil {
+			return match.NewText(r.Str)
+		}
+
+		_, leftSuper := m.Left.(match.Super)
+		lp, leftPrefix := m.Left.(match.Prefix)
+		la, leftAny := m.Left.(match.Any)
+
+		_, rightSuper := m.Right.(match.Super)
+		rs, rightSuffix := m.Right.(match.Suffix)
+		ra, rightAny := m.Right.(match.Any)
+
+		switch {
+		case leftSuper && rightSuper:
+			return match.NewContains(r.Str, false)
+
+		case leftSuper && rightNil:
+			return match.NewSuffix(r.Str)
+
+		case rightSuper && leftNil:
+			return match.NewPrefix(r.Str)
+
+		case leftNil && rightSuffix:
+			return match.NewPrefixSuffix(r.Str, rs.Suffix)
+
+		case rightNil && leftPrefix:
+			return match.NewPrefixSuffix(lp.Prefix, r.Str)
+
+		case rightNil && leftAny:
+			return match.NewSuffixAny(r.Str, la.Separators)
+
+		case leftNil && rightAny:
+			return match.NewPrefixAny(r.Str, ra.Separators)
+		}
+
+		return m
+	}
+
+	return matcher
+}
+
+func compileMatchers(matchers []match.Matcher) (match.Matcher, error) {
+	if len(matchers) == 0 {
+		return nil, fmt.Errorf("compile error: need at least one matcher")
+	}
+	if len(matchers) == 1 {
+		return matchers[0], nil
+	}
+	if m := glueMatchers(matchers); m != nil {
+		return m, nil
+	}
+
+	idx := -1
+	maxLen := -1
+	var val match.Matcher
+	for i, matcher := range matchers {
+		if l := matcher.Len(); l != -1 && l >= maxLen {
+			maxLen = l
+			idx = i
+			val = matcher
+		}
+	}
+
+	if val == nil { // not found matcher with static length
+		r, err := compileMatchers(matchers[1:])
+		if err != nil {
+			return nil, err
+		}
+		return match.NewBTree(matchers[0], nil, r), nil
+	}
+
+	left := matchers[:idx]
+	var right []match.Matcher
+	if len(matchers) > idx+1 {
+		right = matchers[idx+1:]
+	}
+
+	var l, r match.Matcher
+	var err error
+	if len(left) > 0 {
+		l, err = compileMatchers(left)
+		if err != nil {
+			return nil, err
+		}
+	}
+
+	if len(right) > 0 {
+		r, err = compileMatchers(right)
+		if err != nil {
+			return nil, err
+		}
+	}
+
+	return match.NewBTree(val, l, r), nil
+}
+
+func glueMatchers(matchers []match.Matcher) match.Matcher {
+	if m := glueMatchersAsEvery(matchers); m != nil {
+		return m
+	}
+	if m := glueMatchersAsRow(matchers); m != nil {
+		return m
+	}
+	return nil
+}
+
+func glueMatchersAsRow(matchers []match.Matcher) match.Matcher {
+	if len(matchers) <= 1 {
+		return nil
+	}
+
+	var (
+		c []match.Matcher
+		l int
+	)
+	for _, matcher := range matchers {
+		if ml := matcher.Len(); ml == -1 {
+			return nil
+		} else {
+			c = append(c, matcher)
+			l += ml
+		}
+	}
+	return match.NewRow(l, c...)
+}
+
+func glueMatchersAsEvery(matchers []match.Matcher) match.Matcher {
+	if len(matchers) <= 1 {
+		return nil
+	}
+
+	var (
+		hasAny    bool
+		hasSuper  bool
+		hasSingle bool
+		min       int
+		separator []rune
+	)
+
+	for i, matcher := range matchers {
+		var sep []rune
+
+		switch m := matcher.(type) {
+		case match.Super:
+			sep = []rune{}
+			hasSuper = true
+
+		case match.Any:
+			sep = m.Separators
+			hasAny = true
+
+		case match.Single:
+			sep = m.Separators
+			hasSingle = true
+			min++
+
+		case match.List:
+			if !m.Not {
+				return nil
+			}
+			sep = m.List
+			hasSingle = true
+			min++
+
+		default:
+			return nil
+		}
+
+		// initialize
+		if i == 0 {
+			separator = sep
+		}
+
+		if runes.Equal(sep, separator) {
+			continue
+		}
+
+		return nil
+	}
+
+	if hasSuper && !hasAny && !hasSingle {
+		return match.NewSuper()
+	}
+
+	if hasAny && !hasSuper && !hasSingle {
+		return match.NewAny(separator)
+	}
+
+	if (hasAny || hasSuper) && min > 0 && len(separator) == 0 {
+		return match.NewMin(min)
+	}
+
+	every := match.NewEveryOf()
+
+	if min > 0 {
+		every.Add(match.NewMin(min))
+
+		if !hasAny && !hasSuper {
+			every.Add(match.NewMax(min))
+		}
+	}
+
+	if len(separator) > 0 {
+		every.Add(match.NewContains(string(separator), true))
+	}
+
+	return every
+}
+
+func minimizeMatchers(matchers []match.Matcher) []match.Matcher {
+	var done match.Matcher
+	var left, right, count int
+
+	for l := 0; l < len(matchers); l++ {
+		for r := len(matchers); r > l; r-- {
+			if glued := glueMatchers(matchers[l:r]); glued != nil {
+				var swap bool
+
+				if done == nil {
+					swap = true
+				} else {
+					cl, gl := done.Len(), glued.Len()
+					swap = cl > -1 && gl > -1 && gl > cl
+					swap = swap || count < r-l
+				}
+
+				if swap {
+					done = glued
+					left = l
+					right = r
+					count = r - l
+				}
+			}
+		}
+	}
+
+	if done == nil {
+		return matchers
+	}
+
+	next := append(append([]match.Matcher{}, matchers[:left]...), done)
+	if right < len(matchers) {
+		next = append(next, matchers[right:]...)
+	}
+
+	if len(next) == len(matchers) {
+		return next
+	}
+
+	return minimizeMatchers(next)
+}
+
+// minimizeAnyOf tries to apply some heuristics to minimize number of nodes in given tree
+func minimizeTree(tree *ast.Node) *ast.Node {
+	switch tree.Kind {
+	case ast.KindAnyOf:
+		return minimizeTreeAnyOf(tree)
+	default:
+		return nil
+	}
+}
+
+// minimizeAnyOf tries to find common children of given node of AnyOf pattern
+// it searches for common children from left and from right
+// if any common children are found – then it returns new optimized ast tree
+// else it returns nil
+func minimizeTreeAnyOf(tree *ast.Node) *ast.Node {
+	if !areOfSameKind(tree.Children, ast.KindPattern) {
+		return nil
+	}
+
+	commonLeft, commonRight := commonChildren(tree.Children)
+	commonLeftCount, commonRightCount := len(commonLeft), len(commonRight)
+	if commonLeftCount == 0 && commonRightCount == 0 { // there are no common parts
+		return nil
+	}
+
+	var result []*ast.Node
+	if commonLeftCount > 0 {
+		result = append(result, ast.NewNode(ast.KindPattern, nil, commonLeft...))
+	}
+
+	var anyOf []*ast.Node
+	for _, child := range tree.Children {
+		reuse := child.Children[commonLeftCount : len(child.Children)-commonRightCount]
+		var node *ast.Node
+		if len(reuse) == 0 {
+			// this pattern is completely reduced by commonLeft and commonRight patterns
+			// so it become nothing
+			node = ast.NewNode(ast.KindNothing, nil)
+		} else {
+			node = ast.NewNode(ast.KindPattern, nil, reuse...)
+		}
+		anyOf = appendIfUnique(anyOf, node)
+	}
+	switch {
+	case len(anyOf) == 1 && anyOf[0].Kind != ast.KindNothing:
+		result = append(result, anyOf[0])
+	case len(anyOf) > 1:
+		result = append(result, ast.NewNode(ast.KindAnyOf, nil, anyOf...))
+	}
+
+	if commonRightCount > 0 {
+		result = append(result, ast.NewNode(ast.KindPattern, nil, commonRight...))
+	}
+
+	return ast.NewNode(ast.KindPattern, nil, result...)
+}
+
+func commonChildren(nodes []*ast.Node) (commonLeft, commonRight []*ast.Node) {
+	if len(nodes) <= 1 {
+		return
+	}
+
+	// find node that has least number of children
+	idx := leastChildren(nodes)
+	if idx == -1 {
+		return
+	}
+	tree := nodes[idx]
+	treeLength := len(tree.Children)
+
+	// allocate max able size for rightCommon slice
+	// to get ability insert elements in reverse order (from end to start)
+	// without sorting
+	commonRight = make([]*ast.Node, treeLength)
+	lastRight := treeLength // will use this to get results as commonRight[lastRight:]
+
+	var (
+		breakLeft   bool
+		breakRight  bool
+		commonTotal int
+	)
+	for i, j := 0, treeLength-1; commonTotal < treeLength && j >= 0 && !(breakLeft && breakRight); i, j = i+1, j-1 {
+		treeLeft := tree.Children[i]
+		treeRight := tree.Children[j]
+
+		for k := 0; k < len(nodes) && !(breakLeft && breakRight); k++ {
+			// skip least children node
+			if k == idx {
+				continue
+			}
+
+			restLeft := nodes[k].Children[i]
+			restRight := nodes[k].Children[j+len(nodes[k].Children)-treeLength]
+
+			breakLeft = breakLeft || !treeLeft.Equal(restLeft)
+
+			// disable searching for right common parts, if left part is already overlapping
+			breakRight = breakRight || (!breakLeft && j <= i)
+			breakRight = breakRight || !treeRight.Equal(restRight)
+		}
+
+		if !breakLeft {
+			commonTotal++
+			commonLeft = append(commonLeft, treeLeft)
+		}
+		if !breakRight {
+			commonTotal++
+			lastRight = j
+			commonRight[j] = treeRight
+		}
+	}
+
+	commonRight = commonRight[lastRight:]
+
+	return
+}
+
+func appendIfUnique(target []*ast.Node, val *ast.Node) []*ast.Node {
+	for _, n := range target {
+		if reflect.DeepEqual(n, val) {
+			return target
+		}
+	}
+	return append(target, val)
+}
+
+func areOfSameKind(nodes []*ast.Node, kind ast.Kind) bool {
+	for _, n := range nodes {
+		if n.Kind != kind {
+			return false
+		}
+	}
+	return true
+}
+
+func leastChildren(nodes []*ast.Node) int {
+	min := -1
+	idx := -1
+	for i, n := range nodes {
+		if idx == -1 || (len(n.Children) < min) {
+			min = len(n.Children)
+			idx = i
+		}
+	}
+	return idx
+}
+
+func compileTreeChildren(tree *ast.Node, sep []rune) ([]match.Matcher, error) {
+	var matchers []match.Matcher
+	for _, desc := range tree.Children {
+		m, err := compile(desc, sep)
+		if err != nil {
+			return nil, err
+		}
+		matchers = append(matchers, optimizeMatcher(m))
+	}
+	return matchers, nil
+}
+
+func compile(tree *ast.Node, sep []rune) (m match.Matcher, err error) {
+	switch tree.Kind {
+	case ast.KindAnyOf:
+		// todo this could be faster on pattern_alternatives_combine_lite (see glob_test.go)
+		if n := minimizeTree(tree); n != nil {
+			return compile(n, sep)
+		}
+		matchers, err := compileTreeChildren(tree, sep)
+		if err != nil {
+			return nil, err
+		}
+		return match.NewAnyOf(matchers...), nil
+
+	case ast.KindPattern:
+		if len(tree.Children) == 0 {
+			return match.NewNothing(), nil
+		}
+		matchers, err := compileTreeChildren(tree, sep)
+		if err != nil {
+			return nil, err
+		}
+		m, err = compileMatchers(minimizeMatchers(matchers))
+		if err != nil {
+			return nil, err
+		}
+
+	case ast.KindAny:
+		m = match.NewAny(sep)
+
+	case ast.KindSuper:
+		m = match.NewSuper()
+
+	case ast.KindSingle:
+		m = match.NewSingle(sep)
+
+	case ast.KindNothing:
+		m = match.NewNothing()
+
+	case ast.KindList:
+		l := tree.Value.(ast.List)
+		m = match.NewList([]rune(l.Chars), l.Not)
+
+	case ast.KindRange:
+		r := tree.Value.(ast.Range)
+		m = match.NewRange(r.Lo, r.Hi, r.Not)
+
+	case ast.KindText:
+		t := tree.Value.(ast.Text)
+		m = match.NewText(t.Text)
+
+	default:
+		return nil, fmt.Errorf("could not compile tree: unknown node type")
+	}
+
+	return optimizeMatcher(m), nil
+}
+
+func Compile(tree *ast.Node, sep []rune) (match.Matcher, error) {
+	m, err := compile(tree, sep)
+	if err != nil {
+		return nil, err
+	}
+
+	return m, nil
+}

+ 80 - 0
vendor/github.com/gobwas/glob/glob.go

@@ -0,0 +1,80 @@
+package glob
+
+import (
+	"github.com/gobwas/glob/compiler"
+	"github.com/gobwas/glob/syntax"
+)
+
+// Glob represents compiled glob pattern.
+type Glob interface {
+	Match(string) bool
+}
+
+// Compile creates Glob for given pattern and strings (if any present after pattern) as separators.
+// The pattern syntax is:
+//
+//    pattern:
+//        { term }
+//
+//    term:
+//        `*`         matches any sequence of non-separator characters
+//        `**`        matches any sequence of characters
+//        `?`         matches any single non-separator character
+//        `[` [ `!` ] { character-range } `]`
+//                    character class (must be non-empty)
+//        `{` pattern-list `}`
+//                    pattern alternatives
+//        c           matches character c (c != `*`, `**`, `?`, `\`, `[`, `{`, `}`)
+//        `\` c       matches character c
+//
+//    character-range:
+//        c           matches character c (c != `\\`, `-`, `]`)
+//        `\` c       matches character c
+//        lo `-` hi   matches character c for lo <= c <= hi
+//
+//    pattern-list:
+//        pattern { `,` pattern }
+//                    comma-separated (without spaces) patterns
+//
+func Compile(pattern string, separators ...rune) (Glob, error) {
+	ast, err := syntax.Parse(pattern)
+	if err != nil {
+		return nil, err
+	}
+
+	matcher, err := compiler.Compile(ast, separators)
+	if err != nil {
+		return nil, err
+	}
+
+	return matcher, nil
+}
+
+// MustCompile is the same as Compile, except that if Compile returns error, this will panic
+func MustCompile(pattern string, separators ...rune) Glob {
+	g, err := Compile(pattern, separators...)
+	if err != nil {
+		panic(err)
+	}
+
+	return g
+}
+
+// QuoteMeta returns a string that quotes all glob pattern meta characters
+// inside the argument text; For example, QuoteMeta(`{foo*}`) returns `\[foo\*\]`.
+func QuoteMeta(s string) string {
+	b := make([]byte, 2*len(s))
+
+	// a byte loop is correct because all meta characters are ASCII
+	j := 0
+	for i := 0; i < len(s); i++ {
+		if syntax.Special(s[i]) {
+			b[j] = '\\'
+			j++
+		}
+		b[j] = s[i]
+		j++
+	}
+
+	return string(b[0:j])
+}

+ 45 - 0
vendor/github.com/gobwas/glob/match/any.go

@@ -0,0 +1,45 @@
+package match
+
+import (
+	"fmt"
+	"github.com/gobwas/glob/util/strings"
+)
+
+type Any struct {
+	Separators []rune
+}
+
+func NewAny(s []rune) Any {
+	return Any{s}
+}
+
+func (self Any) Match(s string) bool {
+	return strings.IndexAnyRunes(s, self.Separators) == -1
+}
+
+func (self Any) Index(s string) (int, []int) {
+	found := strings.IndexAnyRunes(s, self.Separators)
+	switch found {
+	case -1:
+	case 0:
+		return 0, segments0
+	default:
+		s = s[:found]
+	}
+
+	segments := acquireSegments(len(s))
+	for i := range s {
+		segments = append(segments, i)
+	}
+	segments = append(segments, len(s))
+
+	return 0, segments
+}
+
+func (self Any) Len() int {
+	return lenNo
+}
+
+func (self Any) String() string {
+	return fmt.Sprintf("<any:![%s]>", string(self.Separators))
+}

+ 82 - 0
vendor/github.com/gobwas/glob/match/any_of.go

@@ -0,0 +1,82 @@
+package match
+
+import "fmt"
+
+type AnyOf struct {
+	Matchers Matchers
+}
+
+func NewAnyOf(m ...Matcher) AnyOf {
+	return AnyOf{Matchers(m)}
+}
+
+func (self *AnyOf) Add(m Matcher) error {
+	self.Matchers = append(self.Matchers, m)
+	return nil
+}
+
+func (self AnyOf) Match(s string) bool {
+	for _, m := range self.Matchers {
+		if m.Match(s) {
+			return true
+		}
+	}
+
+	return false
+}
+
+func (self AnyOf) Index(s string) (int, []int) {
+	index := -1
+
+	segments := acquireSegments(len(s))
+	for _, m := range self.Matchers {
+		idx, seg := m.Index(s)
+		if idx == -1 {
+			continue
+		}
+
+		if index == -1 || idx < index {
+			index = idx
+			segments = append(segments[:0], seg...)
+			continue
+		}
+
+		if idx > index {
+			continue
+		}
+
+		// here idx == index
+		segments = appendMerge(segments, seg)
+	}
+
+	if index == -1 {
+		releaseSegments(segments)
+		return -1, nil
+	}
+
+	return index, segments
+}
+
+func (self AnyOf) Len() (l int) {
+	l = -1
+	for _, m := range self.Matchers {
+		ml := m.Len()
+		switch {
+		case l == -1:
+			l = ml
+			continue
+
+		case ml == -1:
+			return -1
+
+		case l != ml:
+			return -1
+		}
+	}
+
+	return
+}
+
+func (self AnyOf) String() string {
+	return fmt.Sprintf("<any_of:[%s]>", self.Matchers)
+}

+ 185 - 0
vendor/github.com/gobwas/glob/match/btree.go

@@ -0,0 +1,185 @@
+package match
+
+import (
+	"fmt"
+	"unicode/utf8"
+)
+
+type BTree struct {
+	Value            Matcher
+	Left             Matcher
+	Right            Matcher
+	ValueLengthRunes int
+	LeftLengthRunes  int
+	RightLengthRunes int
+	LengthRunes      int
+}
+
+func NewBTree(Value, Left, Right Matcher) (tree BTree) {
+	tree.Value = Value
+	tree.Left = Left
+	tree.Right = Right
+
+	lenOk := true
+	if tree.ValueLengthRunes = Value.Len(); tree.ValueLengthRunes == -1 {
+		lenOk = false
+	}
+
+	if Left != nil {
+		if tree.LeftLengthRunes = Left.Len(); tree.LeftLengthRunes == -1 {
+			lenOk = false
+		}
+	}
+
+	if Right != nil {
+		if tree.RightLengthRunes = Right.Len(); tree.RightLengthRunes == -1 {
+			lenOk = false
+		}
+	}
+
+	if lenOk {
+		tree.LengthRunes = tree.LeftLengthRunes + tree.ValueLengthRunes + tree.RightLengthRunes
+	} else {
+		tree.LengthRunes = -1
+	}
+
+	return tree
+}
+
+func (self BTree) Len() int {
+	return self.LengthRunes
+}
+
+// todo?
+func (self BTree) Index(s string) (index int, segments []int) {
+	//inputLen := len(s)
+	//// try to cut unnecessary parts
+	//// by knowledge of length of right and left part
+	//offset, limit := self.offsetLimit(inputLen)
+	//for offset < limit {
+	//	// search for matching part in substring
+	//	vi, segments := self.Value.Index(s[offset:limit])
+	//	if index == -1 {
+	//		return -1, nil
+	//	}
+	//	if self.Left == nil {
+	//		if index != offset {
+	//			return -1, nil
+	//		}
+	//	} else {
+	//		left := s[:offset+vi]
+	//		i := self.Left.IndexSuffix(left)
+	//		if i == -1 {
+	//			return -1, nil
+	//		}
+	//		index = i
+	//	}
+	//	if self.Right != nil {
+	//		for _, seg := range segments {
+	//			right := s[:offset+vi+seg]
+	//		}
+	//	}
+
+	//	l := s[:offset+index]
+	//	var left bool
+	//	if self.Left != nil {
+	//		left = self.Left.Index(l)
+	//	} else {
+	//		left = l == ""
+	//	}
+	//}
+
+	return -1, nil
+}
+
+func (self BTree) Match(s string) bool {
+	inputLen := len(s)
+	// try to cut unnecessary parts
+	// by knowledge of length of right and left part
+	offset, limit := self.offsetLimit(inputLen)
+
+	for offset < limit {
+		// search for matching part in substring
+		index, segments := self.Value.Index(s[offset:limit])
+		if index == -1 {
+			releaseSegments(segments)
+			return false
+		}
+
+		l := s[:offset+index]
+		var left bool
+		if self.Left != nil {
+			left = self.Left.Match(l)
+		} else {
+			left = l == ""
+		}
+
+		if left {
+			for i := len(segments) - 1; i >= 0; i-- {
+				length := segments[i]
+
+				var right bool
+				var r string
+				// if there is no string for the right branch
+				if inputLen <= offset+index+length {
+					r = ""
+				} else {
+					r = s[offset+index+length:]
+				}
+
+				if self.Right != nil {
+					right = self.Right.Match(r)
+				} else {
+					right = r == ""
+				}
+
+				if right {
+					releaseSegments(segments)
+					return true
+				}
+			}
+		}
+
+		_, step := utf8.DecodeRuneInString(s[offset+index:])
+		offset += index + step
+
+		releaseSegments(segments)
+	}
+
+	return false
+}
+
+func (self BTree) offsetLimit(inputLen int) (offset int, limit int) {
+	// self.Length, self.RLen and self.LLen are values meaning the length of runes for each part
+	// here we manipulating byte length for better optimizations
+	// but these checks still works, cause minLen of 1-rune string is 1 byte.
+	if self.LengthRunes != -1 && self.LengthRunes > inputLen {
+		return 0, 0
+	}
+	if self.LeftLengthRunes >= 0 {
+		offset = self.LeftLengthRunes
+	}
+	if self.RightLengthRunes >= 0 {
+		limit = inputLen - self.RightLengthRunes
+	} else {
+		limit = inputLen
+	}
+	return offset, limit
+}
+
+func (self BTree) String() string {
+	const n string = "<nil>"
+	var l, r string
+	if self.Left == nil {
+		l = n
+	} else {
+		l = self.Left.String()
+	}
+	if self.Right == nil {
+		r = n
+	} else {
+		r = self.Right.String()
+	}
+
+	return fmt.Sprintf("<btree:[%s<-%s->%s]>", l, self.Value, r)
+}

+ 58 - 0
vendor/github.com/gobwas/glob/match/contains.go

@@ -0,0 +1,58 @@
+package match
+
+import (
+	"fmt"
+	"strings"
+)
+
+type Contains struct {
+	Needle string
+	Not    bool
+}
+
+func NewContains(needle string, not bool) Contains {
+	return Contains{needle, not}
+}
+
+func (self Contains) Match(s string) bool {
+	return strings.Contains(s, self.Needle) != self.Not
+}
+
+func (self Contains) Index(s string) (int, []int) {
+	var offset int
+
+	idx := strings.Index(s, self.Needle)
+
+	if !self.Not {
+		if idx == -1 {
+			return -1, nil
+		}
+
+		offset = idx + len(self.Needle)
+		if len(s) <= offset {
+			return 0, []int{offset}
+		}
+		s = s[offset:]
+	} else if idx != -1 {
+		s = s[:idx]
+	}
+
+	segments := acquireSegments(len(s) + 1)
+	for i := range s {
+		segments = append(segments, offset+i)
+	}
+
+	return 0, append(segments, offset+len(s))
+}
+
+func (self Contains) Len() int {
+	return lenNo
+}
+
+func (self Contains) String() string {
+	var not string
+	if self.Not {
+		not = "!"
+	}
+	return fmt.Sprintf("<contains:%s[%s]>", not, self.Needle)
+}

+ 99 - 0
vendor/github.com/gobwas/glob/match/every_of.go

@@ -0,0 +1,99 @@
+package match
+
+import (
+	"fmt"
+)
+
+type EveryOf struct {
+	Matchers Matchers
+}
+
+func NewEveryOf(m ...Matcher) EveryOf {
+	return EveryOf{Matchers(m)}
+}
+
+func (self *EveryOf) Add(m Matcher) error {
+	self.Matchers = append(self.Matchers, m)
+	return nil
+}
+
+func (self EveryOf) Len() (l int) {
+	for _, m := range self.Matchers {
+		if ml := m.Len(); l > 0 {
+			l += ml
+		} else {
+			return -1
+		}
+	}
+
+	return
+}
+
+func (self EveryOf) Index(s string) (int, []int) {
+	var index int
+	var offset int
+
+	// make `in` with cap as len(s),
+	// cause it is the maximum size of output segments values
+	next := acquireSegments(len(s))
+	current := acquireSegments(len(s))
+
+	sub := s
+	for i, m := range self.Matchers {
+		idx, seg := m.Index(sub)
+		if idx == -1 {
+			releaseSegments(next)
+			releaseSegments(current)
+			return -1, nil
+		}
+
+		if i == 0 {
+			// we use copy here instead of `current = seg`
+			// cause seg is a slice from reusable buffer `in`
+			// and it could be overwritten in next iteration
+			current = append(current, seg...)
+		} else {
+			// clear the next
+			next = next[:0]
+
+			delta := index - (idx + offset)
+			for _, ex := range current {
+				for _, n := range seg {
+					if ex+delta == n {
+						next = append(next, n)
+					}
+				}
+			}
+
+			if len(next) == 0 {
+				releaseSegments(next)
+				releaseSegments(current)
+				return -1, nil
+			}
+
+			current = append(current[:0], next...)
+		}
+
+		index = idx + offset
+		sub = s[index:]
+		offset += idx
+	}
+
+	releaseSegments(next)
+
+	return index, current
+}
+
+func (self EveryOf) Match(s string) bool {
+	for _, m := range self.Matchers {
+		if !m.Match(s) {
+			return false
+		}
+	}
+
+	return true
+}
+
+func (self EveryOf) String() string {
+	return fmt.Sprintf("<every_of:[%s]>", self.Matchers)
+}

+ 49 - 0
vendor/github.com/gobwas/glob/match/list.go

@@ -0,0 +1,49 @@
+package match
+
+import (
+	"fmt"
+	"github.com/gobwas/glob/util/runes"
+	"unicode/utf8"
+)
+
+type List struct {
+	List []rune
+	Not  bool
+}
+
+func NewList(list []rune, not bool) List {
+	return List{list, not}
+}
+
+func (self List) Match(s string) bool {
+	r, w := utf8.DecodeRuneInString(s)
+	if len(s) > w {
+		return false
+	}
+
+	inList := runes.IndexRune(self.List, r) != -1
+	return inList == !self.Not
+}
+
+func (self List) Len() int {
+	return lenOne
+}
+
+func (self List) Index(s string) (int, []int) {
+	for i, r := range s {
+		if self.Not == (runes.IndexRune(self.List, r) == -1) {
+			return i, segmentsByRuneLength[utf8.RuneLen(r)]
+		}
+	}
+
+	return -1, nil
+}
+
+func (self List) String() string {
+	var not string
+	if self.Not {
+		not = "!"
+	}
+
+	return fmt.Sprintf("<list:%s[%s]>", not, string(self.List))
+}

+ 81 - 0
vendor/github.com/gobwas/glob/match/match.go

@@ -0,0 +1,81 @@
+package match
+
+// todo common table of rune's length
+
+import (
+	"fmt"
+	"strings"
+)
+
+const lenOne = 1
+const lenZero = 0
+const lenNo = -1
+
+type Matcher interface {
+	Match(string) bool
+	Index(string) (int, []int)
+	Len() int
+	String() string
+}
+
+type Matchers []Matcher
+
+func (m Matchers) String() string {
+	var s []string
+	for _, matcher := range m {
+		s = append(s, fmt.Sprint(matcher))
+	}
+
+	return fmt.Sprintf("%s", strings.Join(s, ","))
+}
+
+// appendMerge merges and sorts given already SORTED and UNIQUE segments.
+func appendMerge(target, sub []int) []int {
+	lt, ls := len(target), len(sub)
+	out := make([]int, 0, lt+ls)
+
+	for x, y := 0, 0; x < lt || y < ls; {
+		if x >= lt {
+			out = append(out, sub[y:]...)
+			break
+		}
+
+		if y >= ls {
+			out = append(out, target[x:]...)
+			break
+		}
+
+		xValue := target[x]
+		yValue := sub[y]
+
+		switch {
+
+		case xValue == yValue:
+			out = append(out, xValue)
+			x++
+			y++
+
+		case xValue < yValue:
+			out = append(out, xValue)
+			x++
+
+		case yValue < xValue:
+			out = append(out, yValue)
+			y++
+
+		}
+	}
+
+	target = append(target[:0], out...)
+
+	return target
+}
+
+func reverseSegments(input []int) {
+	l := len(input)
+	m := l / 2
+
+	for i := 0; i < m; i++ {
+		input[i], input[l-i-1] = input[l-i-1], input[i]
+	}
+}

+ 49 - 0
vendor/github.com/gobwas/glob/match/max.go

@@ -0,0 +1,49 @@
+package match
+
+import (
+	"fmt"
+	"unicode/utf8"
+)
+
+type Max struct {
+	Limit int
+}
+
+func NewMax(l int) Max {
+	return Max{l}
+}
+
+func (self Max) Match(s string) bool {
+	var l int
+	for range s {
+		l += 1
+		if l > self.Limit {
+			return false
+		}
+	}
+
+	return true
+}
+
+func (self Max) Index(s string) (int, []int) {
+	segments := acquireSegments(self.Limit + 1)
+	segments = append(segments, 0)
+	var count int
+	for i, r := range s {
+		count++
+		if count > self.Limit {
+			break
+		}
+		segments = append(segments, i+utf8.RuneLen(r))
+	}
+
+	return 0, segments
+}
+
+func (self Max) Len() int {
+	return lenNo
+}
+
+func (self Max) String() string {
+	return fmt.Sprintf("<max:%d>", self.Limit)
+}

+ 57 - 0
vendor/github.com/gobwas/glob/match/min.go

@@ -0,0 +1,57 @@
+package match
+
+import (
+	"fmt"
+	"unicode/utf8"
+)
+
+type Min struct {
+	Limit int
+}
+
+func NewMin(l int) Min {
+	return Min{l}
+}
+
+func (self Min) Match(s string) bool {
+	var l int
+	for range s {
+		l += 1
+		if l >= self.Limit {
+			return true
+		}
+	}
+
+	return false
+}
+
+func (self Min) Index(s string) (int, []int) {
+	var count int
+
+	c := len(s) - self.Limit + 1
+	if c <= 0 {
+		return -1, nil
+	}
+
+	segments := acquireSegments(c)
+	for i, r := range s {
+		count++
+		if count >= self.Limit {
+			segments = append(segments, i+utf8.RuneLen(r))
+		}
+	}
+
+	if len(segments) == 0 {
+		return -1, nil
+	}
+
+	return 0, segments
+}
+
+func (self Min) Len() int {
+	return lenNo
+}
+
+func (self Min) String() string {
+	return fmt.Sprintf("<min:%d>", self.Limit)
+}

+ 27 - 0
vendor/github.com/gobwas/glob/match/nothing.go

@@ -0,0 +1,27 @@
+package match
+
+import (
+	"fmt"
+)
+
+type Nothing struct{}
+
+func NewNothing() Nothing {
+	return Nothing{}
+}
+
+func (self Nothing) Match(s string) bool {
+	return len(s) == 0
+}
+
+func (self Nothing) Index(s string) (int, []int) {
+	return 0, segments0
+}
+
+func (self Nothing) Len() int {
+	return lenZero
+}
+
+func (self Nothing) String() string {
+	return fmt.Sprintf("<nothing>")
+}

+ 50 - 0
vendor/github.com/gobwas/glob/match/prefix.go

@@ -0,0 +1,50 @@
+package match
+
+import (
+	"fmt"
+	"strings"
+	"unicode/utf8"
+)
+
+type Prefix struct {
+	Prefix string
+}
+
+func NewPrefix(p string) Prefix {
+	return Prefix{p}
+}
+
+func (self Prefix) Index(s string) (int, []int) {
+	idx := strings.Index(s, self.Prefix)
+	if idx == -1 {
+		return -1, nil
+	}
+
+	length := len(self.Prefix)
+	var sub string
+	if len(s) > idx+length {
+		sub = s[idx+length:]
+	} else {
+		sub = ""
+	}
+
+	segments := acquireSegments(len(sub) + 1)
+	segments = append(segments, length)
+	for i, r := range sub {
+		segments = append(segments, length+i+utf8.RuneLen(r))
+	}
+
+	return idx, segments
+}
+
+func (self Prefix) Len() int {
+	return lenNo
+}
+
+func (self Prefix) Match(s string) bool {
+	return strings.HasPrefix(s, self.Prefix)
+}
+
+func (self Prefix) String() string {
+	return fmt.Sprintf("<prefix:%s>", self.Prefix)
+}

+ 55 - 0
vendor/github.com/gobwas/glob/match/prefix_any.go

@@ -0,0 +1,55 @@
+package match
+
+import (
+	"fmt"
+	"strings"
+	"unicode/utf8"
+
+	sutil "github.com/gobwas/glob/util/strings"
+)
+
+type PrefixAny struct {
+	Prefix     string
+	Separators []rune
+}
+
+func NewPrefixAny(s string, sep []rune) PrefixAny {
+	return PrefixAny{s, sep}
+}
+
+func (self PrefixAny) Index(s string) (int, []int) {
+	idx := strings.Index(s, self.Prefix)
+	if idx == -1 {
+		return -1, nil
+	}
+
+	n := len(self.Prefix)
+	sub := s[idx+n:]
+	i := sutil.IndexAnyRunes(sub, self.Separators)
+	if i > -1 {
+		sub = sub[:i]
+	}
+
+	seg := acquireSegments(len(sub) + 1)
+	seg = append(seg, n)
+	for i, r := range sub {
+		seg = append(seg, n+i+utf8.RuneLen(r))
+	}
+
+	return idx, seg
+}
+
+func (self PrefixAny) Len() int {
+	return lenNo
+}
+
+func (self PrefixAny) Match(s string) bool {
+	if !strings.HasPrefix(s, self.Prefix) {
+		return false
+	}
+	return sutil.IndexAnyRunes(s[len(self.Prefix):], self.Separators) == -1
+}
+
+func (self PrefixAny) String() string {
+	return fmt.Sprintf("<prefix_any:%s![%s]>", self.Prefix, string(self.Separators))
+}

+ 62 - 0
vendor/github.com/gobwas/glob/match/prefix_suffix.go

@@ -0,0 +1,62 @@
+package match
+
+import (
+	"fmt"
+	"strings"
+)
+
+type PrefixSuffix struct {
+	Prefix, Suffix string
+}
+
+func NewPrefixSuffix(p, s string) PrefixSuffix {
+	return PrefixSuffix{p, s}
+}
+
+func (self PrefixSuffix) Index(s string) (int, []int) {
+	prefixIdx := strings.Index(s, self.Prefix)
+	if prefixIdx == -1 {
+		return -1, nil
+	}
+
+	suffixLen := len(self.Suffix)
+	if suffixLen <= 0 {
+		return prefixIdx, []int{len(s) - prefixIdx}
+	}
+
+	if (len(s) - prefixIdx) <= 0 {
+		return -1, nil
+	}
+
+	segments := acquireSegments(len(s) - prefixIdx)
+	for sub := s[prefixIdx:]; ; {
+		suffixIdx := strings.LastIndex(sub, self.Suffix)
+		if suffixIdx == -1 {
+			break
+		}
+
+		segments = append(segments, suffixIdx+suffixLen)
+		sub = sub[:suffixIdx]
+	}
+
+	if len(segments) == 0 {
+		releaseSegments(segments)
+		return -1, nil
+	}
+
+	reverseSegments(segments)
+
+	return prefixIdx, segments
+}
+
+func (self PrefixSuffix) Len() int {
+	return lenNo
+}
+
+func (self PrefixSuffix) Match(s string) bool {
+	return strings.HasPrefix(s, self.Prefix) && strings.HasSuffix(s, self.Suffix)
+}
+
+func (self PrefixSuffix) String() string {
+	return fmt.Sprintf("<prefix_suffix:[%s,%s]>", self.Prefix, self.Suffix)
+}

+ 48 - 0
vendor/github.com/gobwas/glob/match/range.go

@@ -0,0 +1,48 @@
+package match
+
+import (
+	"fmt"
+	"unicode/utf8"
+)
+
+type Range struct {
+	Lo, Hi rune
+	Not    bool
+}
+
+func NewRange(lo, hi rune, not bool) Range {
+	return Range{lo, hi, not}
+}
+
+func (self Range) Len() int {
+	return lenOne
+}
+
+func (self Range) Match(s string) bool {
+	r, w := utf8.DecodeRuneInString(s)
+	if len(s) > w {
+		return false
+	}
+
+	inRange := r >= self.Lo && r <= self.Hi
+
+	return inRange == !self.Not
+}
+
+func (self Range) Index(s string) (int, []int) {
+	for i, r := range s {
+		if self.Not != (r >= self.Lo && r <= self.Hi) {
+			return i, segmentsByRuneLength[utf8.RuneLen(r)]
+		}
+	}
+
+	return -1, nil
+}
+
+func (self Range) String() string {
+	var not string
+	if self.Not {
+		not = "!"
+	}
+	return fmt.Sprintf("<range:%s[%s,%s]>", not, string(self.Lo), string(self.Hi))
+}

+ 77 - 0
vendor/github.com/gobwas/glob/match/row.go

@@ -0,0 +1,77 @@
+package match
+
+import (
+	"fmt"
+)
+
+type Row struct {
+	Matchers    Matchers
+	RunesLength int
+	Segments    []int
+}
+
+func NewRow(len int, m ...Matcher) Row {
+	return Row{
+		Matchers:    Matchers(m),
+		RunesLength: len,
+		Segments:    []int{len},
+	}
+}
+
+func (self Row) matchAll(s string) bool {
+	var idx int
+	for _, m := range self.Matchers {
+		length := m.Len()
+
+		var next, i int
+		for next = range s[idx:] {
+			i++
+			if i == length {
+				break
+			}
+		}
+
+		if i < length || !m.Match(s[idx:idx+next+1]) {
+			return false
+		}
+
+		idx += next + 1
+	}
+
+	return true
+}
+
+func (self Row) lenOk(s string) bool {
+	var i int
+	for range s {
+		i++
+		if i > self.RunesLength {
+			return false
+		}
+	}
+	return self.RunesLength == i
+}
+
+func (self Row) Match(s string) bool {
+	return self.lenOk(s) && self.matchAll(s)
+}
+
+func (self Row) Len() (l int) {
+	return self.RunesLength
+}
+
+func (self Row) Index(s string) (int, []int) {
+	for i := range s {
+		if len(s[i:]) < self.RunesLength {
+			break
+		}
+		if self.matchAll(s[i:]) {
+			return i, self.Segments
+		}
+	}
+	return -1, nil
+}
+
+func (self Row) String() string {
+	return fmt.Sprintf("<row_%d:[%s]>", self.RunesLength, self.Matchers)
+}

+ 91 - 0
vendor/github.com/gobwas/glob/match/segments.go

@@ -0,0 +1,91 @@
+package match
+
+import (
+	"sync"
+)
+
+type SomePool interface {
+	Get() []int
+	Put([]int)
+}
+
+var segmentsPools [1024]sync.Pool
+
+func toPowerOfTwo(v int) int {
+	v--
+	v |= v >> 1
+	v |= v >> 2
+	v |= v >> 4
+	v |= v >> 8
+	v |= v >> 16
+	v++
+
+	return v
+}
+
+const (
+	cacheFrom             = 16
+	cacheToAndHigher      = 1024
+	cacheFromIndex        = 15
+	cacheToAndHigherIndex = 1023
+)
+
+var (
+	segments0 = []int{0}
+	segments1 = []int{1}
+	segments2 = []int{2}
+	segments3 = []int{3}
+	segments4 = []int{4}
+)
+
+var segmentsByRuneLength [5][]int = [5][]int{
+	0: segments0,
+	1: segments1,
+	2: segments2,
+	3: segments3,
+	4: segments4,
+}
+
+func init() {
+	for i := cacheToAndHigher; i >= cacheFrom; i >>= 1 {
+		func(i int) {
+			segmentsPools[i-1] = sync.Pool{New: func() interface{} {
+				return make([]int, 0, i)
+			}}
+		}(i)
+	}
+}
+
+func getTableIndex(c int) int {
+	p := toPowerOfTwo(c)
+	switch {
+	case p >= cacheToAndHigher:
+		return cacheToAndHigherIndex
+	case p <= cacheFrom:
+		return cacheFromIndex
+	default:
+		return p - 1
+	}
+}
+
+func acquireSegments(c int) []int {
+	// make []int with less capacity than cacheFrom
+	// is faster than acquiring it from pool
+	if c < cacheFrom {
+		return make([]int, 0, c)
+	}
+
+	return segmentsPools[getTableIndex(c)].Get().([]int)[:0]
+}
+
+func releaseSegments(s []int) {
+	c := cap(s)
+
+	// make []int with less capacity than cacheFrom
+	// is faster than acquiring it from pool
+	if c < cacheFrom {
+		return
+	}
+
+	segmentsPools[getTableIndex(c)].Put(s)
+}

+ 43 - 0
vendor/github.com/gobwas/glob/match/single.go

@@ -0,0 +1,43 @@
+package match
+
+import (
+	"fmt"
+	"github.com/gobwas/glob/util/runes"
+	"unicode/utf8"
+)
+
+// single represents ?
+type Single struct {
+	Separators []rune
+}
+
+func NewSingle(s []rune) Single {
+	return Single{s}
+}
+
+func (self Single) Match(s string) bool {
+	r, w := utf8.DecodeRuneInString(s)
+	if len(s) > w {
+		return false
+	}
+
+	return runes.IndexRune(self.Separators, r) == -1
+}
+
+func (self Single) Len() int {
+	return lenOne
+}
+
+func (self Single) Index(s string) (int, []int) {
+	for i, r := range s {
+		if runes.IndexRune(self.Separators, r) == -1 {
+			return i, segmentsByRuneLength[utf8.RuneLen(r)]
+		}
+	}
+
+	return -1, nil
+}
+
+func (self Single) String() string {
+	return fmt.Sprintf("<single:![%s]>", string(self.Separators))
+}

+ 35 - 0
vendor/github.com/gobwas/glob/match/suffix.go

@@ -0,0 +1,35 @@
+package match
+
+import (
+	"fmt"
+	"strings"
+)
+
+type Suffix struct {
+	Suffix string
+}
+
+func NewSuffix(s string) Suffix {
+	return Suffix{s}
+}
+
+func (self Suffix) Len() int {
+	return lenNo
+}
+
+func (self Suffix) Match(s string) bool {
+	return strings.HasSuffix(s, self.Suffix)
+}
+
+func (self Suffix) Index(s string) (int, []int) {
+	idx := strings.Index(s, self.Suffix)
+	if idx == -1 {
+		return -1, nil
+	}
+
+	return 0, []int{idx + len(self.Suffix)}
+}
+
+func (self Suffix) String() string {
+	return fmt.Sprintf("<suffix:%s>", self.Suffix)
+}

+ 43 - 0
vendor/github.com/gobwas/glob/match/suffix_any.go

@@ -0,0 +1,43 @@
+package match
+
+import (
+	"fmt"
+	"strings"
+
+	sutil "github.com/gobwas/glob/util/strings"
+)
+
+type SuffixAny struct {
+	Suffix     string
+	Separators []rune
+}
+
+func NewSuffixAny(s string, sep []rune) SuffixAny {
+	return SuffixAny{s, sep}
+}
+
+func (self SuffixAny) Index(s string) (int, []int) {
+	idx := strings.Index(s, self.Suffix)
+	if idx == -1 {
+		return -1, nil
+	}
+
+	i := sutil.LastIndexAnyRunes(s[:idx], self.Separators) + 1
+
+	return i, []int{idx + len(self.Suffix) - i}
+}
+
+func (self SuffixAny) Len() int {
+	return lenNo
+}
+
+func (self SuffixAny) Match(s string) bool {
+	if !strings.HasSuffix(s, self.Suffix) {
+		return false
+	}
+	return sutil.IndexAnyRunes(s[:len(s)-len(self.Suffix)], self.Separators) == -1
+}
+
+func (self SuffixAny) String() string {
+	return fmt.Sprintf("<suffix_any:![%s]%s>", string(self.Separators), self.Suffix)
+}

+ 33 - 0
vendor/github.com/gobwas/glob/match/super.go

@@ -0,0 +1,33 @@
+package match
+
+import (
+	"fmt"
+)
+
+type Super struct{}
+
+func NewSuper() Super {
+	return Super{}
+}
+
+func (self Super) Match(s string) bool {
+	return true
+}
+
+func (self Super) Len() int {
+	return lenNo
+}
+
+func (self Super) Index(s string) (int, []int) {
+	segments := acquireSegments(len(s) + 1)
+	for i := range s {
+		segments = append(segments, i)
+	}
+	segments = append(segments, len(s))
+
+	return 0, segments
+}
+
+func (self Super) String() string {
+	return fmt.Sprintf("<super>")
+}

+ 45 - 0
vendor/github.com/gobwas/glob/match/text.go

@@ -0,0 +1,45 @@
+package match
+
+import (
+	"fmt"
+	"strings"
+	"unicode/utf8"
+)
+
+// raw represents raw string to match
+type Text struct {
+	Str         string
+	RunesLength int
+	BytesLength int
+	Segments    []int
+}
+
+func NewText(s string) Text {
+	return Text{
+		Str:         s,
+		RunesLength: utf8.RuneCountInString(s),
+		BytesLength: len(s),
+		Segments:    []int{len(s)},
+	}
+}
+
+func (self Text) Match(s string) bool {
+	return self.Str == s
+}
+
+func (self Text) Len() int {
+	return self.RunesLength
+}
+
+func (self Text) Index(s string) (int, []int) {
+	index := strings.Index(s, self.Str)
+	if index == -1 {
+		return -1, nil
+	}
+
+	return index, self.Segments
+}
+
+func (self Text) String() string {
+	return fmt.Sprintf("<text:`%v`>", self.Str)
+}

+ 148 - 0
vendor/github.com/gobwas/glob/readme.md

@@ -0,0 +1,148 @@
+# glob.[go](https://golang.org)
+
+[![GoDoc][godoc-image]][godoc-url] [![Build Status][travis-image]][travis-url]
+
+> Go Globbing Library.
+
+## Install
+
+```shell
+    go get github.com/gobwas/glob
+```
+
+## Example
+
+```go
+
+package main
+
+import "github.com/gobwas/glob"
+
+func main() {
+    var g glob.Glob
+    
+    // create simple glob
+    g = glob.MustCompile("*.github.com")
+    g.Match("api.github.com") // true
+    
+    // quote meta characters and then create simple glob 
+    g = glob.MustCompile(glob.QuoteMeta("*.github.com"))
+    g.Match("*.github.com") // true
+    
+    // create new glob with set of delimiters as ["."]
+    g = glob.MustCompile("api.*.com", '.')
+    g.Match("api.github.com") // true
+    g.Match("api.gi.hub.com") // false
+    
+    // create new glob with set of delimiters as ["."]
+    // but now with super wildcard
+    g = glob.MustCompile("api.**.com", '.')
+    g.Match("api.github.com") // true
+    g.Match("api.gi.hub.com") // true
+        
+    // create glob with single symbol wildcard
+    g = glob.MustCompile("?at")
+    g.Match("cat") // true
+    g.Match("fat") // true
+    g.Match("at") // false
+    
+    // create glob with single symbol wildcard and delimiters ['f']
+    g = glob.MustCompile("?at", 'f')
+    g.Match("cat") // true
+    g.Match("fat") // false
+    g.Match("at") // false 
+    
+    // create glob with character-list matchers 
+    g = glob.MustCompile("[abc]at")
+    g.Match("cat") // true
+    g.Match("bat") // true
+    g.Match("fat") // false
+    g.Match("at") // false
+    
+    // create glob with character-list matchers 
+    g = glob.MustCompile("[!abc]at")
+    g.Match("cat") // false
+    g.Match("bat") // false
+    g.Match("fat") // true
+    g.Match("at") // false 
+    
+    // create glob with character-range matchers 
+    g = glob.MustCompile("[a-c]at")
+    g.Match("cat") // true
+    g.Match("bat") // true
+    g.Match("fat") // false
+    g.Match("at") // false
+    
+    // create glob with character-range matchers 
+    g = glob.MustCompile("[!a-c]at")
+    g.Match("cat") // false
+    g.Match("bat") // false
+    g.Match("fat") // true
+    g.Match("at") // false 
+    
+    // create glob with pattern-alternatives list 
+    g = glob.MustCompile("{cat,bat,[fr]at}")
+    g.Match("cat") // true
+    g.Match("bat") // true
+    g.Match("fat") // true
+    g.Match("rat") // true
+    g.Match("at") // false 
+    g.Match("zat") // false 
+}
+
+```
+
+## Performance
+
+This library is created for compile-once patterns. This means, that compilation could take time, but 
+strings matching is done faster, than in case when always parsing template.
+
+If you will not use compiled `glob.Glob` object, and do `g := glob.MustCompile(pattern); g.Match(...)` every time, then your code will be much more slower.
+
+Run `go test -bench=.` from source root to see the benchmarks:
+
+Pattern | Fixture | Match | Speed (ns/op)
+--------|---------|-------|--------------
+`[a-z][!a-x]*cat*[h][!b]*eyes*` | `my cat has very bright eyes` | `true` | 432
+`[a-z][!a-x]*cat*[h][!b]*eyes*` | `my dog has very bright eyes` | `false` | 199
+`https://*.google.*` | `https://account.google.com` | `true` | 96
+`https://*.google.*` | `https://google.com` | `false` | 66
+`{https://*.google.*,*yandex.*,*yahoo.*,*mail.ru}` | `http://yahoo.com` | `true` | 163
+`{https://*.google.*,*yandex.*,*yahoo.*,*mail.ru}` | `http://google.com` | `false` | 197
+`{https://*gobwas.com,http://exclude.gobwas.com}` | `https://safe.gobwas.com` | `true` | 22
+`{https://*gobwas.com,http://exclude.gobwas.com}` | `http://safe.gobwas.com` | `false` | 24
+`abc*` | `abcdef` | `true` | 8.15
+`abc*` | `af` | `false` | 5.68
+`*def` | `abcdef` | `true` | 8.84
+`*def` | `af` | `false` | 5.74
+`ab*ef` | `abcdef` | `true` | 15.2
+`ab*ef` | `af` | `false` | 10.4
+
+The same things with `regexp` package:
+
+Pattern | Fixture | Match | Speed (ns/op)
+--------|---------|-------|--------------
+`^[a-z][^a-x].*cat.*[h][^b].*eyes.*$` | `my cat has very bright eyes` | `true` | 2553
+`^[a-z][^a-x].*cat.*[h][^b].*eyes.*$` | `my dog has very bright eyes` | `false` | 1383
+`^https:\/\/.*\.google\..*$` | `https://account.google.com` | `true` | 1205
+`^https:\/\/.*\.google\..*$` | `https://google.com` | `false` | 767
+`^(https:\/\/.*\.google\..*\|.*yandex\..*\|.*yahoo\..*\|.*mail\.ru)$` | `http://yahoo.com` | `true` | 1435
+`^(https:\/\/.*\.google\..*\|.*yandex\..*\|.*yahoo\..*\|.*mail\.ru)$` | `http://google.com` | `false` | 1674
+`^(https:\/\/.*gobwas\.com\|http://exclude.gobwas.com)$` | `https://safe.gobwas.com` | `true` | 1039
+`^(https:\/\/.*gobwas\.com\|http://exclude.gobwas.com)$` | `http://safe.gobwas.com` | `false` | 272
+`^abc.*$` | `abcdef` | `true` | 237
+`^abc.*$` | `af` | `false` | 100
+`^.*def$` | `abcdef` | `true` | 464
+`^.*def$` | `af` | `false` | 265
+`^ab.*ef$` | `abcdef` | `true` | 375
+`^ab.*ef$` | `af` | `false` | 145
+
+[godoc-image]: https://godoc.org/github.com/gobwas/glob?status.svg
+[godoc-url]: https://godoc.org/github.com/gobwas/glob
+[travis-image]: https://travis-ci.org/gobwas/glob.svg?branch=master
+[travis-url]: https://travis-ci.org/gobwas/glob
+
+## Syntax
+
+Syntax is inspired by [standard wildcards](http://tldp.org/LDP/GNU-Linux-Tools-Summary/html/x11655.htm),
+except that `**` is aka super-asterisk, that do not sensitive for separators.

+ 122 - 0
vendor/github.com/gobwas/glob/syntax/ast/ast.go

@@ -0,0 +1,122 @@
+package ast
+
+import (
+	"bytes"
+	"fmt"
+)
+
+type Node struct {
+	Parent   *Node
+	Children []*Node
+	Value    interface{}
+	Kind     Kind
+}
+
+func NewNode(k Kind, v interface{}, ch ...*Node) *Node {
+	n := &Node{
+		Kind:  k,
+		Value: v,
+	}
+	for _, c := range ch {
+		Insert(n, c)
+	}
+	return n
+}
+
+func (a *Node) Equal(b *Node) bool {
+	if a.Kind != b.Kind {
+		return false
+	}
+	if a.Value != b.Value {
+		return false
+	}
+	if len(a.Children) != len(b.Children) {
+		return false
+	}
+	for i, c := range a.Children {
+		if !c.Equal(b.Children[i]) {
+			return false
+		}
+	}
+	return true
+}
+
+func (a *Node) String() string {
+	var buf bytes.Buffer
+	buf.WriteString(a.Kind.String())
+	if a.Value != nil {
+		buf.WriteString(" =")
+		buf.WriteString(fmt.Sprintf("%v", a.Value))
+	}
+	if len(a.Children) > 0 {
+		buf.WriteString(" [")
+		for i, c := range a.Children {
+			if i > 0 {
+				buf.WriteString(", ")
+			}
+			buf.WriteString(c.String())
+		}
+		buf.WriteString("]")
+	}
+	return buf.String()
+}
+
+func Insert(parent *Node, children ...*Node) {
+	parent.Children = append(parent.Children, children...)
+	for _, ch := range children {
+		ch.Parent = parent
+	}
+}
+
+type List struct {
+	Not   bool
+	Chars string
+}
+
+type Range struct {
+	Not    bool
+	Lo, Hi rune
+}
+
+type Text struct {
+	Text string
+}
+
+type Kind int
+
+const (
+	KindNothing Kind = iota
+	KindPattern
+	KindList
+	KindRange
+	KindText
+	KindAny
+	KindSuper
+	KindSingle
+	KindAnyOf
+)
+
+func (k Kind) String() string {
+	switch k {
+	case KindNothing:
+		return "Nothing"
+	case KindPattern:
+		return "Pattern"
+	case KindList:
+		return "List"
+	case KindRange:
+		return "Range"
+	case KindText:
+		return "Text"
+	case KindAny:
+		return "Any"
+	case KindSuper:
+		return "Super"
+	case KindSingle:
+		return "Single"
+	case KindAnyOf:
+		return "AnyOf"
+	default:
+		return ""
+	}
+}

+ 157 - 0
vendor/github.com/gobwas/glob/syntax/ast/parser.go

@@ -0,0 +1,157 @@
+package ast
+
+import (
+	"errors"
+	"fmt"
+	"github.com/gobwas/glob/syntax/lexer"
+	"unicode/utf8"
+)
+
+type Lexer interface {
+	Next() lexer.Token
+}
+
+type parseFn func(*Node, Lexer) (parseFn, *Node, error)
+
+func Parse(lexer Lexer) (*Node, error) {
+	var parser parseFn
+
+	root := NewNode(KindPattern, nil)
+
+	var (
+		tree *Node
+		err  error
+	)
+	for parser, tree = parserMain, root; parser != nil; {
+		parser, tree, err = parser(tree, lexer)
+		if err != nil {
+			return nil, err
+		}
+	}
+
+	return root, nil
+}
+
+func parserMain(tree *Node, lex Lexer) (parseFn, *Node, error) {
+	for {
+		token := lex.Next()
+		switch token.Type {
+		case lexer.EOF:
+			return nil, tree, nil
+
+		case lexer.Error:
+			return nil, tree, errors.New(token.Raw)
+
+		case lexer.Text:
+			Insert(tree, NewNode(KindText, Text{token.Raw}))
+			return parserMain, tree, nil
+
+		case lexer.Any:
+			Insert(tree, NewNode(KindAny, nil))
+			return parserMain, tree, nil
+
+		case lexer.Super:
+			Insert(tree, NewNode(KindSuper, nil))
+			return parserMain, tree, nil
+
+		case lexer.Single:
+			Insert(tree, NewNode(KindSingle, nil))
+			return parserMain, tree, nil
+
+		case lexer.RangeOpen:
+			return parserRange, tree, nil
+
+		case lexer.TermsOpen:
+			a := NewNode(KindAnyOf, nil)
+			Insert(tree, a)
+
+			p := NewNode(KindPattern, nil)
+			Insert(a, p)
+
+			return parserMain, p, nil
+
+		case lexer.Separator:
+			p := NewNode(KindPattern, nil)
+			Insert(tree.Parent, p)
+
+			return parserMain, p, nil
+
+		case lexer.TermsClose:
+			return parserMain, tree.Parent.Parent, nil
+
+		default:
+			return nil, tree, fmt.Errorf("unexpected token: %s", token)
+		}
+	}
+	return nil, tree, fmt.Errorf("unknown error")
+}
+
+func parserRange(tree *Node, lex Lexer) (parseFn, *Node, error) {
+	var (
+		not   bool
+		lo    rune
+		hi    rune
+		chars string
+	)
+	for {
+		token := lex.Next()
+		switch token.Type {
+		case lexer.EOF:
+			return nil, tree, errors.New("unexpected end")
+
+		case lexer.Error:
+			return nil, tree, errors.New(token.Raw)
+
+		case lexer.Not:
+			not = true
+
+		case lexer.RangeLo:
+			r, w := utf8.DecodeRuneInString(token.Raw)
+			if len(token.Raw) > w {
+				return nil, tree, fmt.Errorf("unexpected length of lo character")
+			}
+			lo = r
+
+		case lexer.RangeBetween:
+			//
+
+		case lexer.RangeHi:
+			r, w := utf8.DecodeRuneInString(token.Raw)
+			if len(token.Raw) > w {
+				return nil, tree, fmt.Errorf("unexpected length of lo character")
+			}
+
+			hi = r
+
+			if hi < lo {
+				return nil, tree, fmt.Errorf("hi character '%s' should be greater than lo '%s'", string(hi), string(lo))
+			}
+
+		case lexer.Text:
+			chars = token.Raw
+
+		case lexer.RangeClose:
+			isRange := lo != 0 && hi != 0
+			isChars := chars != ""
+
+			if isChars == isRange {
+				return nil, tree, fmt.Errorf("could not parse range")
+			}
+
+			if isRange {
+				Insert(tree, NewNode(KindRange, Range{
+					Lo:  lo,
+					Hi:  hi,
+					Not: not,
+				}))
+			} else {
+				Insert(tree, NewNode(KindList, List{
+					Chars: chars,
+					Not:   not,
+				}))
+			}
+
+			return parserMain, tree, nil
+		}
+	}
+}

+ 273 - 0
vendor/github.com/gobwas/glob/syntax/lexer/lexer.go

@@ -0,0 +1,273 @@
+package lexer
+
+import (
+	"bytes"
+	"fmt"
+	"github.com/gobwas/glob/util/runes"
+	"unicode/utf8"
+)
+
+const (
+	char_any           = '*'
+	char_comma         = ','
+	char_single        = '?'
+	char_escape        = '\\'
+	char_range_open    = '['
+	char_range_close   = ']'
+	char_terms_open    = '{'
+	char_terms_close   = '}'
+	char_range_not     = '!'
+	char_range_between = '-'
+)
+
+var specials = []byte{
+	char_any,
+	char_single,
+	char_escape,
+	char_range_open,
+	char_range_close,
+	char_terms_open,
+	char_terms_close,
+}
+
+func Special(c byte) bool {
+	return bytes.IndexByte(specials, c) != -1
+}
+
+type tokens []Token
+
+func (i *tokens) shift() (ret Token) {
+	ret = (*i)[0]
+	copy(*i, (*i)[1:])
+	*i = (*i)[:len(*i)-1]
+	return
+}
+
+func (i *tokens) push(v Token) {
+	*i = append(*i, v)
+}
+
+func (i *tokens) empty() bool {
+	return len(*i) == 0
+}
+
+var eof rune = 0
+
+type lexer struct {
+	data string
+	pos  int
+	err  error
+
+	tokens     tokens
+	termsLevel int
+
+	lastRune     rune
+	lastRuneSize int
+	hasRune      bool
+}
+
+func NewLexer(source string) *lexer {
+	l := &lexer{
+		data:   source,
+		tokens: tokens(make([]Token, 0, 4)),
+	}
+	return l
+}
+
+func (l *lexer) Next() Token {
+	if l.err != nil {
+		return Token{Error, l.err.Error()}
+	}
+	if !l.tokens.empty() {
+		return l.tokens.shift()
+	}
+
+	l.fetchItem()
+	return l.Next()
+}
+
+func (l *lexer) peek() (r rune, w int) {
+	if l.pos == len(l.data) {
+		return eof, 0
+	}
+
+	r, w = utf8.DecodeRuneInString(l.data[l.pos:])
+	if r == utf8.RuneError {
+		l.errorf("could not read rune")
+		r = eof
+		w = 0
+	}
+
+	return
+}
+
+func (l *lexer) read() rune {
+	if l.hasRune {
+		l.hasRune = false
+		l.seek(l.lastRuneSize)
+		return l.lastRune
+	}
+
+	r, s := l.peek()
+	l.seek(s)
+
+	l.lastRune = r
+	l.lastRuneSize = s
+
+	return r
+}
+
+func (l *lexer) seek(w int) {
+	l.pos += w
+}
+
+func (l *lexer) unread() {
+	if l.hasRune {
+		l.errorf("could not unread rune")
+		return
+	}
+	l.seek(-l.lastRuneSize)
+	l.hasRune = true
+}
+
+func (l *lexer) errorf(f string, v ...interface{}) {
+	l.err = fmt.Errorf(f, v...)
+}
+
+func (l *lexer) inTerms() bool {
+	return l.termsLevel > 0
+}
+
+func (l *lexer) termsEnter() {
+	l.termsLevel++
+}
+
+func (l *lexer) termsLeave() {
+	l.termsLevel--
+}
+
+var inTextBreakers = []rune{char_single, char_any, char_range_open, char_terms_open}
+var inTermsBreakers = append(inTextBreakers, char_terms_close, char_comma)
+
+func (l *lexer) fetchItem() {
+	r := l.read()
+	switch {
+	case r == eof:
+		l.tokens.push(Token{EOF, ""})
+
+	case r == char_terms_open:
+		l.termsEnter()
+		l.tokens.push(Token{TermsOpen, string(r)})
+
+	case r == char_comma && l.inTerms():
+		l.tokens.push(Token{Separator, string(r)})
+
+	case r == char_terms_close && l.inTerms():
+		l.tokens.push(Token{TermsClose, string(r)})
+		l.termsLeave()
+
+	case r == char_range_open:
+		l.tokens.push(Token{RangeOpen, string(r)})
+		l.fetchRange()
+
+	case r == char_single:
+		l.tokens.push(Token{Single, string(r)})
+
+	case r == char_any:
+		if l.read() == char_any {
+			l.tokens.push(Token{Super, string(r) + string(r)})
+		} else {
+			l.unread()
+			l.tokens.push(Token{Any, string(r)})
+		}
+
+	default:
+		l.unread()
+
+		var breakers []rune
+		if l.inTerms() {
+			breakers = inTermsBreakers
+		} else {
+			breakers = inTextBreakers
+		}
+		l.fetchText(breakers)
+	}
+}
+
+func (l *lexer) fetchRange() {
+	var wantHi bool
+	var wantClose bool
+	var seenNot bool
+	for {
+		r := l.read()
+		if r == eof {
+			l.errorf("unexpected end of input")
+			return
+		}
+
+		if wantClose {
+			if r != char_range_close {
+				l.errorf("expected close range character")
+			} else {
+				l.tokens.push(Token{RangeClose, string(r)})
+			}
+			return
+		}
+
+		if wantHi {
+			l.tokens.push(Token{RangeHi, string(r)})
+			wantClose = true
+			continue
+		}
+
+		if !seenNot && r == char_range_not {
+			l.tokens.push(Token{Not, string(r)})
+			seenNot = true
+			continue
+		}
+
+		if n, w := l.peek(); n == char_range_between {
+			l.seek(w)
+			l.tokens.push(Token{RangeLo, string(r)})
+			l.tokens.push(Token{RangeBetween, string(n)})
+			wantHi = true
+			continue
+		}
+
+		l.unread() // unread first peek and fetch as text
+		l.fetchText([]rune{char_range_close})
+		wantClose = true
+	}
+}
+
+func (l *lexer) fetchText(breakers []rune) {
+	var data []rune
+	var escaped bool
+
+reading:
+	for {
+		r := l.read()
+		if r == eof {
+			break
+		}
+
+		if !escaped {
+			if r == char_escape {
+				escaped = true
+				continue
+			}
+
+			if runes.IndexRune(breakers, r) != -1 {
+				l.unread()
+				break reading
+			}
+		}
+
+		escaped = false
+		data = append(data, r)
+	}
+
+	if len(data) > 0 {
+		l.tokens.push(Token{Text, string(data)})
+	}
+}

+ 88 - 0
vendor/github.com/gobwas/glob/syntax/lexer/token.go

@@ -0,0 +1,88 @@
+package lexer
+
+import "fmt"
+
+type TokenType int
+
+const (
+	EOF TokenType = iota
+	Error
+	Text
+	Char
+	Any
+	Super
+	Single
+	Not
+	Separator
+	RangeOpen
+	RangeClose
+	RangeLo
+	RangeHi
+	RangeBetween
+	TermsOpen
+	TermsClose
+)
+
+func (tt TokenType) String() string {
+	switch tt {
+	case EOF:
+		return "eof"
+
+	case Error:
+		return "error"
+
+	case Text:
+		return "text"
+
+	case Char:
+		return "char"
+
+	case Any:
+		return "any"
+
+	case Super:
+		return "super"
+
+	case Single:
+		return "single"
+
+	case Not:
+		return "not"
+
+	case Separator:
+		return "separator"
+
+	case RangeOpen:
+		return "range_open"
+
+	case RangeClose:
+		return "range_close"
+
+	case RangeLo:
+		return "range_lo"
+
+	case RangeHi:
+		return "range_hi"
+
+	case RangeBetween:
+		return "range_between"
+
+	case TermsOpen:
+		return "terms_open"
+
+	case TermsClose:
+		return "terms_close"
+
+	default:
+		return "undef"
+	}
+}
+
+type Token struct {
+	Type TokenType
+	Raw  string
+}
+
+func (t Token) String() string {
+	return fmt.Sprintf("%v<%q>", t.Type, t.Raw)
+}

+ 14 - 0
vendor/github.com/gobwas/glob/syntax/syntax.go

@@ -0,0 +1,14 @@
+package syntax
+
+import (
+	"github.com/gobwas/glob/syntax/ast"
+	"github.com/gobwas/glob/syntax/lexer"
+)
+
+func Parse(s string) (*ast.Node, error) {
+	return ast.Parse(lexer.NewLexer(s))
+}
+
+func Special(b byte) bool {
+	return lexer.Special(b)
+}

+ 154 - 0
vendor/github.com/gobwas/glob/util/runes/runes.go

@@ -0,0 +1,154 @@
+package runes
+
+func Index(s, needle []rune) int {
+	ls, ln := len(s), len(needle)
+
+	switch {
+	case ln == 0:
+		return 0
+	case ln == 1:
+		return IndexRune(s, needle[0])
+	case ln == ls:
+		if Equal(s, needle) {
+			return 0
+		}
+		return -1
+	case ln > ls:
+		return -1
+	}
+
+head:
+	for i := 0; i < ls && ls-i >= ln; i++ {
+		for y := 0; y < ln; y++ {
+			if s[i+y] != needle[y] {
+				continue head
+			}
+		}
+
+		return i
+	}
+
+	return -1
+}
+
+func LastIndex(s, needle []rune) int {
+	ls, ln := len(s), len(needle)
+
+	switch {
+	case ln == 0:
+		if ls == 0 {
+			return 0
+		}
+		return ls
+	case ln == 1:
+		return IndexLastRune(s, needle[0])
+	case ln == ls:
+		if Equal(s, needle) {
+			return 0
+		}
+		return -1
+	case ln > ls:
+		return -1
+	}
+
+head:
+	for i := ls - 1; i >= 0 && i >= ln; i-- {
+		for y := ln - 1; y >= 0; y-- {
+			if s[i-(ln-y-1)] != needle[y] {
+				continue head
+			}
+		}
+
+		return i - ln + 1
+	}
+
+	return -1
+}
+
+// IndexAny returns the index of the first instance of any Unicode code point
+// from chars in s, or -1 if no Unicode code point from chars is present in s.
+func IndexAny(s, chars []rune) int {
+	if len(chars) > 0 {
+		for i, c := range s {
+			for _, m := range chars {
+				if c == m {
+					return i
+				}
+			}
+		}
+	}
+	return -1
+}
+
+func Contains(s, needle []rune) bool {
+	return Index(s, needle) >= 0
+}
+
+func Max(s []rune) (max rune) {
+	for _, r := range s {
+		if r > max {
+			max = r
+		}
+	}
+
+	return
+}
+
+func Min(s []rune) rune {
+	min := rune(-1)
+	for _, r := range s {
+		if min == -1 {
+			min = r
+			continue
+		}
+
+		if r < min {
+			min = r
+		}
+	}
+
+	return min
+}
+
+func IndexRune(s []rune, r rune) int {
+	for i, c := range s {
+		if c == r {
+			return i
+		}
+	}
+	return -1
+}
+
+func IndexLastRune(s []rune, r rune) int {
+	for i := len(s) - 1; i >= 0; i-- {
+		if s[i] == r {
+			return i
+		}
+	}
+
+	return -1
+}
+
+func Equal(a, b []rune) bool {
+	if len(a) == len(b) {
+		for i := 0; i < len(a); i++ {
+			if a[i] != b[i] {
+				return false
+			}
+		}
+
+		return true
+	}
+
+	return false
+}
+
+// HasPrefix tests whether the string s begins with prefix.
+func HasPrefix(s, prefix []rune) bool {
+	return len(s) >= len(prefix) && Equal(s[0:len(prefix)], prefix)
+}
+
+// HasSuffix tests whether the string s ends with suffix.
+func HasSuffix(s, suffix []rune) bool {
+	return len(s) >= len(suffix) && Equal(s[len(s)-len(suffix):], suffix)
+}

+ 39 - 0
vendor/github.com/gobwas/glob/util/strings/strings.go

@@ -0,0 +1,39 @@
+package strings
+
+import (
+	"strings"
+	"unicode/utf8"
+)
+
+func IndexAnyRunes(s string, rs []rune) int {
+	for _, r := range rs {
+		if i := strings.IndexRune(s, r); i != -1 {
+			return i
+		}
+	}
+
+	return -1
+}
+
+func LastIndexAnyRunes(s string, rs []rune) int {
+	for _, r := range rs {
+		i := -1
+		if 0 <= r && r < utf8.RuneSelf {
+			i = strings.LastIndexByte(s, byte(r))
+		} else {
+			sub := s
+			for len(sub) > 0 {
+				j := strings.IndexRune(s, r)
+				if j == -1 {
+					break
+				}
+				i = j
+				sub = sub[i+1:]
+			}
+		}
+		if i != -1 {
+			return i
+		}
+	}
+	return -1
+}

+ 0 - 27
vendor/gogs.carducci-dante.gov.it/karmen/util/template/template.go

@@ -1,8 +1,6 @@
 package template
 
 import (
-	"crypto/sha1"
-	"fmt"
 	"io/ioutil"
 	"text/template"
 )
@@ -29,28 +27,3 @@ func LoadTextTemplate(filename string, funcMap ...template.FuncMap) (*template.T
 	}
 	return tpl, nil
 }
-
-func LoadTextTemplateFromString(content string, funcMap ...template.FuncMap) (*template.Template, error) {
-	var (
-		tpl *template.Template
-		err error
-	)
-
-	h := sha1.New()
-	h.Write([]byte(content))
-
-	name := fmt.Sprintf("%x", h.Sum(nil))
-
-	if len(funcMap) > 0 {
-		tpl, err = template.New(name).Funcs(funcMap[0]).Parse(content)
-		if err != nil {
-			return nil, err
-		}
-	} else {
-		tpl, err = template.New(name).Parse(content)
-		if err != nil {
-			return nil, err
-		}
-	}
-	return tpl, nil
-}