detection of the bug

This commit is contained in:
gobwas 2016-05-12 10:46:16 +03:00
parent 82e8d7da03
commit 1550dd349c
7 changed files with 364 additions and 213 deletions

View File

@ -490,8 +490,8 @@ func doAnyOf(n *nodeAnyOf, s []rune) (match.Matcher, error) {
} }
func do(leaf node, s []rune) (m match.Matcher, err error) { func do(leaf node, s []rune) (m match.Matcher, err error) {
fmt.Println("node", reflect.Indirect(reflect.ValueOf(leaf)).Type().Name())
switch n := leaf.(type) { switch n := leaf.(type) {
case *nodeAnyOf: case *nodeAnyOf:
// todo this could be faster on pattern_alternatives_combine_lite // todo this could be faster on pattern_alternatives_combine_lite
if n := minimizeAnyOf(n.children()); n != nil { if n := minimizeAnyOf(n.children()); n != nil {

View File

@ -2,6 +2,7 @@ package glob
import ( import (
"github.com/gobwas/glob/match" "github.com/gobwas/glob/match"
"github.com/gobwas/glob/match/debug"
"reflect" "reflect"
"testing" "testing"
) )
@ -206,209 +207,210 @@ func TestCompiler(t *testing.T) {
result Glob result Glob
sep []rune sep []rune
}{ }{
{ //{
ast: pattern(&nodeText{text: "abc"}), // ast: pattern(&nodeText{text: "abc"}),
result: match.NewText("abc"), // result: match.NewText("abc"),
}, //},
{ //{
ast: pattern(&nodeAny{}), // ast: pattern(&nodeAny{}),
sep: separators, // sep: separators,
result: match.NewAny(separators), // result: match.NewAny(separators),
}, //},
{ //{
ast: pattern(&nodeAny{}), // ast: pattern(&nodeAny{}),
result: match.NewSuper(), // result: match.NewSuper(),
}, //},
{ //{
ast: pattern(&nodeSuper{}), // ast: pattern(&nodeSuper{}),
result: match.NewSuper(), // result: match.NewSuper(),
}, //},
{ //{
ast: pattern(&nodeSingle{}), // ast: pattern(&nodeSingle{}),
sep: separators, // sep: separators,
result: match.NewSingle(separators), // result: match.NewSingle(separators),
}, //},
{ //{
ast: pattern(&nodeRange{ // ast: pattern(&nodeRange{
lo: 'a', // lo: 'a',
hi: 'z', // hi: 'z',
not: true, // not: true,
}), // }),
result: match.NewRange('a', 'z', true), // result: match.NewRange('a', 'z', true),
}, //},
{ //{
ast: pattern(&nodeList{ // ast: pattern(&nodeList{
chars: "abc", // chars: "abc",
not: true, // not: true,
}), // }),
result: match.NewList([]rune{'a', 'b', 'c'}, true), // result: match.NewList([]rune{'a', 'b', 'c'}, true),
}, //},
{ //{
ast: pattern(&nodeAny{}, &nodeSingle{}, &nodeSingle{}, &nodeSingle{}), // ast: pattern(&nodeAny{}, &nodeSingle{}, &nodeSingle{}, &nodeSingle{}),
sep: separators, // sep: separators,
result: match.EveryOf{Matchers: match.Matchers{ // result: match.EveryOf{Matchers: match.Matchers{
match.NewMin(3), // match.NewMin(3),
match.NewContains(string(separators), true), // match.NewContains(string(separators), true),
}},
},
{
ast: pattern(&nodeAny{}, &nodeSingle{}, &nodeSingle{}, &nodeSingle{}),
result: match.NewMin(3),
},
{
ast: pattern(&nodeAny{}, &nodeText{text: "abc"}, &nodeSingle{}),
sep: separators,
result: match.NewBTree(
match.NewRow(
4,
match.Matchers{
match.NewText("abc"),
match.NewSingle(separators),
}...,
),
match.NewAny(separators),
nil,
),
},
{
ast: pattern(&nodeSuper{}, &nodeSingle{}, &nodeText{text: "abc"}, &nodeSingle{}),
sep: separators,
result: match.NewBTree(
match.NewRow(
5,
match.Matchers{
match.NewSingle(separators),
match.NewText("abc"),
match.NewSingle(separators),
}...,
),
match.NewSuper(),
nil,
),
},
{
ast: pattern(&nodeAny{}, &nodeText{text: "abc"}),
result: match.NewSuffix("abc"),
},
{
ast: pattern(&nodeText{text: "abc"}, &nodeAny{}),
result: match.NewPrefix("abc"),
},
{
ast: pattern(&nodeText{text: "abc"}, &nodeAny{}, &nodeText{text: "def"}),
result: match.NewPrefixSuffix("abc", "def"),
},
{
ast: pattern(&nodeAny{}, &nodeAny{}, &nodeAny{}, &nodeText{text: "abc"}, &nodeAny{}, &nodeAny{}),
result: match.NewContains("abc", false),
},
{
ast: pattern(&nodeAny{}, &nodeAny{}, &nodeAny{}, &nodeText{text: "abc"}, &nodeAny{}, &nodeAny{}),
sep: separators,
result: match.NewBTree(
match.NewText("abc"),
match.NewAny(separators),
match.NewAny(separators),
),
},
{
ast: pattern(&nodeSuper{}, &nodeSingle{}, &nodeText{text: "abc"}, &nodeSuper{}, &nodeSingle{}),
result: match.NewBTree(
match.NewText("abc"),
match.NewMin(1),
match.NewMin(1),
),
},
{
ast: pattern(anyOf(&nodeText{text: "abc"})),
result: match.NewText("abc"),
},
{
ast: pattern(anyOf(pattern(anyOf(pattern(&nodeText{text: "abc"}))))),
result: match.NewText("abc"),
},
{
ast: pattern(anyOf(
pattern(
&nodeText{text: "abc"},
&nodeSingle{},
),
pattern(
&nodeText{text: "abc"},
&nodeList{chars: "def"},
),
pattern(
&nodeText{text: "abc"},
),
pattern(
&nodeText{text: "abc"},
),
)),
result: match.NewBTree(
match.NewText("abc"),
nil,
match.AnyOf{Matchers: match.Matchers{
match.NewSingle(nil),
match.NewList([]rune{'d', 'e', 'f'}, false),
match.NewNothing(),
}},
),
},
{
ast: pattern(
&nodeRange{lo: 'a', hi: 'z'},
&nodeRange{lo: 'a', hi: 'x', not: true},
&nodeAny{},
),
result: match.NewBTree(
match.NewRow(
2,
match.Matchers{
match.NewRange('a', 'z', false),
match.NewRange('a', 'x', true),
}...,
),
nil,
match.NewSuper(),
),
},
{
ast: pattern(anyOf(
pattern(
&nodeText{text: "abc"},
&nodeList{chars: "abc"},
&nodeText{text: "ghi"},
),
pattern(
&nodeText{text: "abc"},
&nodeList{chars: "def"},
&nodeText{text: "ghi"},
),
)),
result: match.NewRow(
7,
match.Matchers{
match.NewText("abc"),
match.AnyOf{Matchers: match.Matchers{
match.NewList([]rune{'a', 'b', 'c'}, false),
match.NewList([]rune{'d', 'e', 'f'}, false),
}},
match.NewText("ghi"),
}...,
),
},
// {
// ast: pattern(
// anyOf(&nodeText{text: "a"}, &nodeText{text: "b"}),
// anyOf(&nodeText{text: "c"}, &nodeText{text: "d"}),
// ),
// result: match.AnyOf{Matchers: match.Matchers{
// match.NewRow(Matchers: match.Matchers{match.Raw{"a"}, match.Raw{"c", 1}}),
// match.NewRow(Matchers: match.Matchers{match.Raw{"a"}, match.Raw{"d"}}),
// match.NewRow(Matchers: match.Matchers{match.Raw{"b"}, match.Raw{"c", 1}}),
// match.NewRow(Matchers: match.Matchers{match.Raw{"b"}, match.Raw{"d"}}),
// }}, // }},
// }, //},
//{
// ast: pattern(&nodeAny{}, &nodeSingle{}, &nodeSingle{}, &nodeSingle{}),
// result: match.NewMin(3),
//},
//{
// ast: pattern(&nodeAny{}, &nodeText{text: "abc"}, &nodeSingle{}),
// sep: separators,
// result: match.NewBTree(
// match.NewRow(
// 4,
// match.Matchers{
// match.NewText("abc"),
// match.NewSingle(separators),
// }...,
// ),
// match.NewAny(separators),
// nil,
// ),
//},
{
ast: pattern(&nodeText{text: "/"}, anyOf(&nodeText{text: "z"}, &nodeText{text: "ab"}), &nodeSuper{}),
sep: separators,
result: match.NewBTree(
match.NewText("/"),
nil,
match.NewBTree(
match.NewAnyOf(match.NewText("z"), match.NewText("ab")),
nil,
match.NewSuper(),
),
),
},
//{
// ast: pattern(&nodeSuper{}, &nodeSingle{}, &nodeText{text: "abc"}, &nodeSingle{}),
// sep: separators,
// result: match.NewBTree(
// match.NewRow(
// 5,
// match.Matchers{
// match.NewSingle(separators),
// match.NewText("abc"),
// match.NewSingle(separators),
// }...,
// ),
// match.NewSuper(),
// nil,
// ),
//},
//{
// ast: pattern(&nodeAny{}, &nodeText{text: "abc"}),
// result: match.NewSuffix("abc"),
//},
//{
// ast: pattern(&nodeText{text: "abc"}, &nodeAny{}),
// result: match.NewPrefix("abc"),
//},
//{
// ast: pattern(&nodeText{text: "abc"}, &nodeAny{}, &nodeText{text: "def"}),
// result: match.NewPrefixSuffix("abc", "def"),
//},
//{
// ast: pattern(&nodeAny{}, &nodeAny{}, &nodeAny{}, &nodeText{text: "abc"}, &nodeAny{}, &nodeAny{}),
// result: match.NewContains("abc", false),
//},
//{
// ast: pattern(&nodeAny{}, &nodeAny{}, &nodeAny{}, &nodeText{text: "abc"}, &nodeAny{}, &nodeAny{}),
// sep: separators,
// result: match.NewBTree(
// match.NewText("abc"),
// match.NewAny(separators),
// match.NewAny(separators),
// ),
//},
//{
// ast: pattern(&nodeSuper{}, &nodeSingle{}, &nodeText{text: "abc"}, &nodeSuper{}, &nodeSingle{}),
// result: match.NewBTree(
// match.NewText("abc"),
// match.NewMin(1),
// match.NewMin(1),
// ),
//},
//{
// ast: pattern(anyOf(&nodeText{text: "abc"})),
// result: match.NewText("abc"),
//},
//{
// ast: pattern(anyOf(pattern(anyOf(pattern(&nodeText{text: "abc"}))))),
// result: match.NewText("abc"),
//},
//{
// ast: pattern(anyOf(
// pattern(
// &nodeText{text: "abc"},
// &nodeSingle{},
// ),
// pattern(
// &nodeText{text: "abc"},
// &nodeList{chars: "def"},
// ),
// pattern(
// &nodeText{text: "abc"},
// ),
// pattern(
// &nodeText{text: "abc"},
// ),
// )),
// result: match.NewBTree(
// match.NewText("abc"),
// nil,
// match.AnyOf{Matchers: match.Matchers{
// match.NewSingle(nil),
// match.NewList([]rune{'d', 'e', 'f'}, false),
// match.NewNothing(),
// }},
// ),
//},
//{
// ast: pattern(
// &nodeRange{lo: 'a', hi: 'z'},
// &nodeRange{lo: 'a', hi: 'x', not: true},
// &nodeAny{},
// ),
// result: match.NewBTree(
// match.NewRow(
// 2,
// match.Matchers{
// match.NewRange('a', 'z', false),
// match.NewRange('a', 'x', true),
// }...,
// ),
// nil,
// match.NewSuper(),
// ),
//},
//{
// ast: pattern(anyOf(
// pattern(
// &nodeText{text: "abc"},
// &nodeList{chars: "abc"},
// &nodeText{text: "ghi"},
// ),
// pattern(
// &nodeText{text: "abc"},
// &nodeList{chars: "def"},
// &nodeText{text: "ghi"},
// ),
// )),
// result: match.NewRow(
// 7,
// match.Matchers{
// match.NewText("abc"),
// match.AnyOf{Matchers: match.Matchers{
// match.NewList([]rune{'a', 'b', 'c'}, false),
// match.NewList([]rune{'d', 'e', 'f'}, false),
// }},
// match.NewText("ghi"),
// }...,
// ),
//},
} { } {
m, err := compile(test.ast, test.sep) m, err := compile(test.ast, test.sep)
if err != nil { if err != nil {
@ -417,7 +419,7 @@ func TestCompiler(t *testing.T) {
} }
if !reflect.DeepEqual(m, test.result) { if !reflect.DeepEqual(m, test.result) {
t.Errorf("#%d results are not equal:\nexp: %#v\nact: %#v", id, test.result, m) t.Errorf("#%d results are not equal:\nexp: %#v\nact: %#v\nexp:\n%s\nact:\n%s\n", id, test.result, m, debug.Graphviz("", test.result.(match.Matcher)), debug.Graphviz("", m.(match.Matcher)))
continue continue
} }
} }

View File

@ -108,6 +108,10 @@ func TestGlob(t *testing.T) {
glob(true, "{*,**}{a,b}", "ab"), glob(true, "{*,**}{a,b}", "ab"),
glob(false, "{*,**}{a,b}", "ac"), glob(false, "{*,**}{a,b}", "ac"),
glob(true, "/{rate,[a-z][a-z][a-z]}*", "/rate"),
glob(true, "/{rate,[0-9][0-9][0-9]}*", "/rate"),
glob(true, "/{rate,[a-z][a-z][a-z]}*", "/usd"),
glob(true, pattern_all, fixture_all_match), glob(true, pattern_all, fixture_all_match),
glob(false, pattern_all, fixture_all_mismatch), glob(false, pattern_all, fixture_all_mismatch),

View File

@ -123,6 +123,20 @@ func (i item) String() string {
return fmt.Sprintf("%v<%s>", i.t, i.s) return fmt.Sprintf("%v<%s>", i.t, i.s)
} }
type stubLexer struct {
Items []item
pos int
}
func (s *stubLexer) nextItem() (ret item) {
if s.pos == len(s.Items) {
return item{item_eof, ""}
}
ret = s.Items[s.pos]
s.pos++
return
}
type lexer struct { type lexer struct {
input string input string
start int start int

View File

@ -16,6 +16,14 @@ func TestLexGood(t *testing.T) {
item{item_eof, ""}, item{item_eof, ""},
}, },
}, },
//{
// TODO(gobwas): this is freezing on globtest/globdraw (`]]` syntax error)
//pattern: "/{rate,[0-9]]}*",
//items: []item{
// item{item_text, "hello"},
// item{item_eof, ""},
//},
//},
{ {
pattern: "hello,world", pattern: "hello,world",
items: []item{ items: []item{
@ -114,6 +122,19 @@ func TestLexGood(t *testing.T) {
item{item_eof, ""}, item{item_eof, ""},
}, },
}, },
{
pattern: "/{z,ab}*",
items: []item{
item{item_text, "/"},
item{item_terms_open, "{"},
item{item_text, "z"},
item{item_separator, ","},
item{item_text, "ab"},
item{item_terms_close, "}"},
item{item_any, "*"},
item{item_eof, ""},
},
},
{ {
pattern: "{[!日-語],*,?,{a,b,\\c}}", pattern: "{[!日-語],*,?,{a,b,\\c}}",
items: []item{ items: []item{

View File

@ -11,6 +11,11 @@ type node interface {
append(node) append(node)
} }
// todo may be split it into another package
type lexerIface interface {
nextItem() item
}
type nodeImpl struct { type nodeImpl struct {
desc []node desc []node
} }
@ -72,9 +77,9 @@ func (t *tree) leave() {
t.current = t.path[len(t.path)-1] t.current = t.path[len(t.path)-1]
} }
type parseFn func(*tree, *lexer) (parseFn, error) type parseFn func(*tree, lexerIface) (parseFn, error)
func parse(lexer *lexer) (*nodePattern, error) { func parse(lexer lexerIface) (*nodePattern, error) {
var parser parseFn var parser parseFn
root := &nodePattern{} root := &nodePattern{}
@ -97,7 +102,7 @@ func parse(lexer *lexer) (*nodePattern, error) {
return root, nil return root, nil
} }
func parserMain(tree *tree, lexer *lexer) (parseFn, error) { func parserMain(tree *tree, lexer lexerIface) (parseFn, error) {
for stop := false; !stop; { for stop := false; !stop; {
item := lexer.nextItem() item := lexer.nextItem()
@ -151,7 +156,7 @@ func parserMain(tree *tree, lexer *lexer) (parseFn, error) {
return nil, nil return nil, nil
} }
func parserRange(tree *tree, lexer *lexer) (parseFn, error) { func parserRange(tree *tree, lexer lexerIface) (parseFn, error) {
var ( var (
not bool not bool
lo rune lo rune

View File

@ -8,11 +8,15 @@ import (
func TestParseString(t *testing.T) { func TestParseString(t *testing.T) {
for id, test := range []struct { for id, test := range []struct {
pattern string items []item
tree node tree node
}{ }{
{ {
pattern: "abc", //pattern: "abc",
items: []item{
item{item_text, "abc"},
item{item_eof, ""},
},
tree: &nodePattern{ tree: &nodePattern{
nodeImpl: nodeImpl{ nodeImpl: nodeImpl{
desc: []node{ desc: []node{
@ -22,7 +26,13 @@ func TestParseString(t *testing.T) {
}, },
}, },
{ {
pattern: "a*c", //pattern: "a*c",
items: []item{
item{item_text, "a"},
item{item_any, "*"},
item{item_text, "c"},
item{item_eof, ""},
},
tree: &nodePattern{ tree: &nodePattern{
nodeImpl: nodeImpl{ nodeImpl: nodeImpl{
desc: []node{ desc: []node{
@ -34,7 +44,13 @@ func TestParseString(t *testing.T) {
}, },
}, },
{ {
pattern: "a**c", //pattern: "a**c",
items: []item{
item{item_text, "a"},
item{item_super, "**"},
item{item_text, "c"},
item{item_eof, ""},
},
tree: &nodePattern{ tree: &nodePattern{
nodeImpl: nodeImpl{ nodeImpl: nodeImpl{
desc: []node{ desc: []node{
@ -46,7 +62,13 @@ func TestParseString(t *testing.T) {
}, },
}, },
{ {
pattern: "a?c", //pattern: "a?c",
items: []item{
item{item_text, "a"},
item{item_single, "?"},
item{item_text, "c"},
item{item_eof, ""},
},
tree: &nodePattern{ tree: &nodePattern{
nodeImpl: nodeImpl{ nodeImpl: nodeImpl{
desc: []node{ desc: []node{
@ -58,7 +80,16 @@ func TestParseString(t *testing.T) {
}, },
}, },
{ {
pattern: "[!a-z]", //pattern: "[!a-z]",
items: []item{
item{item_range_open, "["},
item{item_not, "!"},
item{item_range_lo, "a"},
item{item_range_between, "-"},
item{item_range_hi, "z"},
item{item_range_close, "]"},
item{item_eof, ""},
},
tree: &nodePattern{ tree: &nodePattern{
nodeImpl: nodeImpl{ nodeImpl: nodeImpl{
desc: []node{ desc: []node{
@ -68,7 +99,13 @@ func TestParseString(t *testing.T) {
}, },
}, },
{ {
pattern: "[az]", //pattern: "[az]",
items: []item{
item{item_range_open, "["},
item{item_text, "az"},
item{item_range_close, "]"},
item{item_eof, ""},
},
tree: &nodePattern{ tree: &nodePattern{
nodeImpl: nodeImpl{ nodeImpl: nodeImpl{
desc: []node{ desc: []node{
@ -78,7 +115,15 @@ func TestParseString(t *testing.T) {
}, },
}, },
{ {
pattern: "{a,z}", //pattern: "{a,z}",
items: []item{
item{item_terms_open, "{"},
item{item_text, "a"},
item{item_separator, ","},
item{item_text, "z"},
item{item_terms_close, "}"},
item{item_eof, ""},
},
tree: &nodePattern{ tree: &nodePattern{
nodeImpl: nodeImpl{ nodeImpl: nodeImpl{
desc: []node{ desc: []node{
@ -99,7 +144,65 @@ func TestParseString(t *testing.T) {
}, },
}, },
{ {
pattern: "{a,{x,y},?,[a-z],[!qwe]}", //pattern: "/{z,ab}*",
items: []item{
item{item_text, "/"},
item{item_terms_open, "{"},
item{item_text, "z"},
item{item_separator, ","},
item{item_text, "ab"},
item{item_terms_close, "}"},
item{item_any, "*"},
item{item_eof, ""},
},
tree: &nodePattern{
nodeImpl: nodeImpl{
desc: []node{
&nodeText{text: "/"},
&nodeAnyOf{nodeImpl: nodeImpl{desc: []node{
&nodePattern{
nodeImpl: nodeImpl{desc: []node{
&nodeText{text: "z"},
}},
},
&nodePattern{
nodeImpl: nodeImpl{desc: []node{
&nodeText{text: "ab"},
}},
},
}}},
&nodeAny{},
},
},
},
},
{
//pattern: "{a,{x,y},?,[a-z],[!qwe]}",
items: []item{
item{item_terms_open, "{"},
item{item_text, "a"},
item{item_separator, ","},
item{item_terms_open, "{"},
item{item_text, "x"},
item{item_separator, ","},
item{item_text, "y"},
item{item_terms_close, "}"},
item{item_separator, ","},
item{item_single, "?"},
item{item_separator, ","},
item{item_range_open, "["},
item{item_range_lo, "a"},
item{item_range_between, "-"},
item{item_range_hi, "z"},
item{item_range_close, "]"},
item{item_separator, ","},
item{item_range_open, "["},
item{item_not, "!"},
item{item_text, "qwe"},
item{item_range_close, "]"},
item{item_terms_close, "}"},
item{item_eof, ""},
},
tree: &nodePattern{ tree: &nodePattern{
nodeImpl: nodeImpl{ nodeImpl: nodeImpl{
desc: []node{ desc: []node{
@ -150,7 +253,9 @@ func TestParseString(t *testing.T) {
}, },
}, },
} { } {
pattern, err := parse(newLexer(test.pattern)) lexer := &stubLexer{Items: test.items}
pattern, err := parse(lexer)
if err != nil { if err != nil {
t.Errorf("#%d %s", id, err) t.Errorf("#%d %s", id, err)
continue continue