forked from mirror/glob
enable tests, fix lexer
This commit is contained in:
parent
eabde343bd
commit
994ba33cd9
382
compiler_test.go
382
compiler_test.go
|
@ -207,70 +207,70 @@ func TestCompiler(t *testing.T) {
|
|||
result Glob
|
||||
sep []rune
|
||||
}{
|
||||
//{
|
||||
// ast: pattern(&nodeText{text: "abc"}),
|
||||
// result: match.NewText("abc"),
|
||||
//},
|
||||
//{
|
||||
// ast: pattern(&nodeAny{}),
|
||||
// sep: separators,
|
||||
// result: match.NewAny(separators),
|
||||
//},
|
||||
//{
|
||||
// ast: pattern(&nodeAny{}),
|
||||
// result: match.NewSuper(),
|
||||
//},
|
||||
//{
|
||||
// ast: pattern(&nodeSuper{}),
|
||||
// result: match.NewSuper(),
|
||||
//},
|
||||
//{
|
||||
// ast: pattern(&nodeSingle{}),
|
||||
// sep: separators,
|
||||
// result: match.NewSingle(separators),
|
||||
//},
|
||||
//{
|
||||
// ast: pattern(&nodeRange{
|
||||
// lo: 'a',
|
||||
// hi: 'z',
|
||||
// not: true,
|
||||
// }),
|
||||
// result: match.NewRange('a', 'z', true),
|
||||
//},
|
||||
//{
|
||||
// ast: pattern(&nodeList{
|
||||
// chars: "abc",
|
||||
// not: true,
|
||||
// }),
|
||||
// result: match.NewList([]rune{'a', 'b', 'c'}, true),
|
||||
//},
|
||||
//{
|
||||
// ast: pattern(&nodeAny{}, &nodeSingle{}, &nodeSingle{}, &nodeSingle{}),
|
||||
// sep: separators,
|
||||
// result: match.EveryOf{Matchers: match.Matchers{
|
||||
// match.NewMin(3),
|
||||
// match.NewContains(string(separators), true),
|
||||
// }},
|
||||
//},
|
||||
//{
|
||||
// ast: pattern(&nodeAny{}, &nodeSingle{}, &nodeSingle{}, &nodeSingle{}),
|
||||
// result: match.NewMin(3),
|
||||
//},
|
||||
//{
|
||||
// ast: pattern(&nodeAny{}, &nodeText{text: "abc"}, &nodeSingle{}),
|
||||
// sep: separators,
|
||||
// result: match.NewBTree(
|
||||
// match.NewRow(
|
||||
// 4,
|
||||
// match.Matchers{
|
||||
// match.NewText("abc"),
|
||||
// match.NewSingle(separators),
|
||||
// }...,
|
||||
// ),
|
||||
// match.NewAny(separators),
|
||||
// nil,
|
||||
// ),
|
||||
//},
|
||||
{
|
||||
ast: pattern(&nodeText{text: "abc"}),
|
||||
result: match.NewText("abc"),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeAny{}),
|
||||
sep: separators,
|
||||
result: match.NewAny(separators),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeAny{}),
|
||||
result: match.NewSuper(),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeSuper{}),
|
||||
result: match.NewSuper(),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeSingle{}),
|
||||
sep: separators,
|
||||
result: match.NewSingle(separators),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeRange{
|
||||
lo: 'a',
|
||||
hi: 'z',
|
||||
not: true,
|
||||
}),
|
||||
result: match.NewRange('a', 'z', true),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeList{
|
||||
chars: "abc",
|
||||
not: true,
|
||||
}),
|
||||
result: match.NewList([]rune{'a', 'b', 'c'}, true),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeAny{}, &nodeSingle{}, &nodeSingle{}, &nodeSingle{}),
|
||||
sep: separators,
|
||||
result: match.EveryOf{Matchers: match.Matchers{
|
||||
match.NewMin(3),
|
||||
match.NewContains(string(separators), true),
|
||||
}},
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeAny{}, &nodeSingle{}, &nodeSingle{}, &nodeSingle{}),
|
||||
result: match.NewMin(3),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeAny{}, &nodeText{text: "abc"}, &nodeSingle{}),
|
||||
sep: separators,
|
||||
result: match.NewBTree(
|
||||
match.NewRow(
|
||||
4,
|
||||
match.Matchers{
|
||||
match.NewText("abc"),
|
||||
match.NewSingle(separators),
|
||||
}...,
|
||||
),
|
||||
match.NewAny(separators),
|
||||
nil,
|
||||
),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeText{text: "/"}, anyOf(&nodeText{text: "z"}, &nodeText{text: "ab"}), &nodeSuper{}),
|
||||
sep: separators,
|
||||
|
@ -284,133 +284,133 @@ func TestCompiler(t *testing.T) {
|
|||
),
|
||||
),
|
||||
},
|
||||
//{
|
||||
// ast: pattern(&nodeSuper{}, &nodeSingle{}, &nodeText{text: "abc"}, &nodeSingle{}),
|
||||
// sep: separators,
|
||||
// result: match.NewBTree(
|
||||
// match.NewRow(
|
||||
// 5,
|
||||
// match.Matchers{
|
||||
// match.NewSingle(separators),
|
||||
// match.NewText("abc"),
|
||||
// match.NewSingle(separators),
|
||||
// }...,
|
||||
// ),
|
||||
// match.NewSuper(),
|
||||
// nil,
|
||||
// ),
|
||||
//},
|
||||
//{
|
||||
// ast: pattern(&nodeAny{}, &nodeText{text: "abc"}),
|
||||
// result: match.NewSuffix("abc"),
|
||||
//},
|
||||
//{
|
||||
// ast: pattern(&nodeText{text: "abc"}, &nodeAny{}),
|
||||
// result: match.NewPrefix("abc"),
|
||||
//},
|
||||
//{
|
||||
// ast: pattern(&nodeText{text: "abc"}, &nodeAny{}, &nodeText{text: "def"}),
|
||||
// result: match.NewPrefixSuffix("abc", "def"),
|
||||
//},
|
||||
//{
|
||||
// ast: pattern(&nodeAny{}, &nodeAny{}, &nodeAny{}, &nodeText{text: "abc"}, &nodeAny{}, &nodeAny{}),
|
||||
// result: match.NewContains("abc", false),
|
||||
//},
|
||||
//{
|
||||
// ast: pattern(&nodeAny{}, &nodeAny{}, &nodeAny{}, &nodeText{text: "abc"}, &nodeAny{}, &nodeAny{}),
|
||||
// sep: separators,
|
||||
// result: match.NewBTree(
|
||||
// match.NewText("abc"),
|
||||
// match.NewAny(separators),
|
||||
// match.NewAny(separators),
|
||||
// ),
|
||||
//},
|
||||
//{
|
||||
// ast: pattern(&nodeSuper{}, &nodeSingle{}, &nodeText{text: "abc"}, &nodeSuper{}, &nodeSingle{}),
|
||||
// result: match.NewBTree(
|
||||
// match.NewText("abc"),
|
||||
// match.NewMin(1),
|
||||
// match.NewMin(1),
|
||||
// ),
|
||||
//},
|
||||
//{
|
||||
// ast: pattern(anyOf(&nodeText{text: "abc"})),
|
||||
// result: match.NewText("abc"),
|
||||
//},
|
||||
//{
|
||||
// ast: pattern(anyOf(pattern(anyOf(pattern(&nodeText{text: "abc"}))))),
|
||||
// result: match.NewText("abc"),
|
||||
//},
|
||||
//{
|
||||
// ast: pattern(anyOf(
|
||||
// pattern(
|
||||
// &nodeText{text: "abc"},
|
||||
// &nodeSingle{},
|
||||
// ),
|
||||
// pattern(
|
||||
// &nodeText{text: "abc"},
|
||||
// &nodeList{chars: "def"},
|
||||
// ),
|
||||
// pattern(
|
||||
// &nodeText{text: "abc"},
|
||||
// ),
|
||||
// pattern(
|
||||
// &nodeText{text: "abc"},
|
||||
// ),
|
||||
// )),
|
||||
// result: match.NewBTree(
|
||||
// match.NewText("abc"),
|
||||
// nil,
|
||||
// match.AnyOf{Matchers: match.Matchers{
|
||||
// match.NewSingle(nil),
|
||||
// match.NewList([]rune{'d', 'e', 'f'}, false),
|
||||
// match.NewNothing(),
|
||||
// }},
|
||||
// ),
|
||||
//},
|
||||
//{
|
||||
// ast: pattern(
|
||||
// &nodeRange{lo: 'a', hi: 'z'},
|
||||
// &nodeRange{lo: 'a', hi: 'x', not: true},
|
||||
// &nodeAny{},
|
||||
// ),
|
||||
// result: match.NewBTree(
|
||||
// match.NewRow(
|
||||
// 2,
|
||||
// match.Matchers{
|
||||
// match.NewRange('a', 'z', false),
|
||||
// match.NewRange('a', 'x', true),
|
||||
// }...,
|
||||
// ),
|
||||
// nil,
|
||||
// match.NewSuper(),
|
||||
// ),
|
||||
//},
|
||||
//{
|
||||
// ast: pattern(anyOf(
|
||||
// pattern(
|
||||
// &nodeText{text: "abc"},
|
||||
// &nodeList{chars: "abc"},
|
||||
// &nodeText{text: "ghi"},
|
||||
// ),
|
||||
// pattern(
|
||||
// &nodeText{text: "abc"},
|
||||
// &nodeList{chars: "def"},
|
||||
// &nodeText{text: "ghi"},
|
||||
// ),
|
||||
// )),
|
||||
// result: match.NewRow(
|
||||
// 7,
|
||||
// match.Matchers{
|
||||
// match.NewText("abc"),
|
||||
// match.AnyOf{Matchers: match.Matchers{
|
||||
// match.NewList([]rune{'a', 'b', 'c'}, false),
|
||||
// match.NewList([]rune{'d', 'e', 'f'}, false),
|
||||
// }},
|
||||
// match.NewText("ghi"),
|
||||
// }...,
|
||||
// ),
|
||||
//},
|
||||
{
|
||||
ast: pattern(&nodeSuper{}, &nodeSingle{}, &nodeText{text: "abc"}, &nodeSingle{}),
|
||||
sep: separators,
|
||||
result: match.NewBTree(
|
||||
match.NewRow(
|
||||
5,
|
||||
match.Matchers{
|
||||
match.NewSingle(separators),
|
||||
match.NewText("abc"),
|
||||
match.NewSingle(separators),
|
||||
}...,
|
||||
),
|
||||
match.NewSuper(),
|
||||
nil,
|
||||
),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeAny{}, &nodeText{text: "abc"}),
|
||||
result: match.NewSuffix("abc"),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeText{text: "abc"}, &nodeAny{}),
|
||||
result: match.NewPrefix("abc"),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeText{text: "abc"}, &nodeAny{}, &nodeText{text: "def"}),
|
||||
result: match.NewPrefixSuffix("abc", "def"),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeAny{}, &nodeAny{}, &nodeAny{}, &nodeText{text: "abc"}, &nodeAny{}, &nodeAny{}),
|
||||
result: match.NewContains("abc", false),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeAny{}, &nodeAny{}, &nodeAny{}, &nodeText{text: "abc"}, &nodeAny{}, &nodeAny{}),
|
||||
sep: separators,
|
||||
result: match.NewBTree(
|
||||
match.NewText("abc"),
|
||||
match.NewAny(separators),
|
||||
match.NewAny(separators),
|
||||
),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeSuper{}, &nodeSingle{}, &nodeText{text: "abc"}, &nodeSuper{}, &nodeSingle{}),
|
||||
result: match.NewBTree(
|
||||
match.NewText("abc"),
|
||||
match.NewMin(1),
|
||||
match.NewMin(1),
|
||||
),
|
||||
},
|
||||
{
|
||||
ast: pattern(anyOf(&nodeText{text: "abc"})),
|
||||
result: match.NewText("abc"),
|
||||
},
|
||||
{
|
||||
ast: pattern(anyOf(pattern(anyOf(pattern(&nodeText{text: "abc"}))))),
|
||||
result: match.NewText("abc"),
|
||||
},
|
||||
{
|
||||
ast: pattern(anyOf(
|
||||
pattern(
|
||||
&nodeText{text: "abc"},
|
||||
&nodeSingle{},
|
||||
),
|
||||
pattern(
|
||||
&nodeText{text: "abc"},
|
||||
&nodeList{chars: "def"},
|
||||
),
|
||||
pattern(
|
||||
&nodeText{text: "abc"},
|
||||
),
|
||||
pattern(
|
||||
&nodeText{text: "abc"},
|
||||
),
|
||||
)),
|
||||
result: match.NewBTree(
|
||||
match.NewText("abc"),
|
||||
nil,
|
||||
match.AnyOf{Matchers: match.Matchers{
|
||||
match.NewSingle(nil),
|
||||
match.NewList([]rune{'d', 'e', 'f'}, false),
|
||||
match.NewNothing(),
|
||||
}},
|
||||
),
|
||||
},
|
||||
{
|
||||
ast: pattern(
|
||||
&nodeRange{lo: 'a', hi: 'z'},
|
||||
&nodeRange{lo: 'a', hi: 'x', not: true},
|
||||
&nodeAny{},
|
||||
),
|
||||
result: match.NewBTree(
|
||||
match.NewRow(
|
||||
2,
|
||||
match.Matchers{
|
||||
match.NewRange('a', 'z', false),
|
||||
match.NewRange('a', 'x', true),
|
||||
}...,
|
||||
),
|
||||
nil,
|
||||
match.NewSuper(),
|
||||
),
|
||||
},
|
||||
{
|
||||
ast: pattern(anyOf(
|
||||
pattern(
|
||||
&nodeText{text: "abc"},
|
||||
&nodeList{chars: "abc"},
|
||||
&nodeText{text: "ghi"},
|
||||
),
|
||||
pattern(
|
||||
&nodeText{text: "abc"},
|
||||
&nodeList{chars: "def"},
|
||||
&nodeText{text: "ghi"},
|
||||
),
|
||||
)),
|
||||
result: match.NewRow(
|
||||
7,
|
||||
match.Matchers{
|
||||
match.NewText("abc"),
|
||||
match.AnyOf{Matchers: match.Matchers{
|
||||
match.NewList([]rune{'a', 'b', 'c'}, false),
|
||||
match.NewList([]rune{'d', 'e', 'f'}, false),
|
||||
}},
|
||||
match.NewText("ghi"),
|
||||
}...,
|
||||
),
|
||||
},
|
||||
} {
|
||||
m, err := compile(test.ast, test.sep)
|
||||
if err != nil {
|
||||
|
|
|
@ -166,7 +166,11 @@ func TestQuoteMeta(t *testing.T) {
|
|||
}{
|
||||
{
|
||||
in: `[foo*]`,
|
||||
out: `\[foo\*\]`,
|
||||
out: `\[foo\*]`,
|
||||
},
|
||||
{
|
||||
in: `{foo*}`,
|
||||
out: `\{foo\*\}`,
|
||||
},
|
||||
{
|
||||
in: string(specials),
|
||||
|
|
22
lexer.go
22
lexer.go
|
@ -25,7 +25,6 @@ var specials = []byte{
|
|||
char_single,
|
||||
char_escape,
|
||||
char_range_open,
|
||||
char_range_close,
|
||||
char_terms_open,
|
||||
char_terms_close,
|
||||
}
|
||||
|
@ -283,20 +282,20 @@ func lexRaw(l *lexer) stateFn {
|
|||
return lexTermsOpen
|
||||
|
||||
case char_terms_close:
|
||||
l.unread()
|
||||
return lexTermsClose
|
||||
if l.inTerms() { // if we are in terms
|
||||
l.unread()
|
||||
return lexTermsClose
|
||||
}
|
||||
|
||||
case char_comma:
|
||||
if l.inTerms() { // if we are not in terms
|
||||
if l.inTerms() { // if we are in terms
|
||||
l.unread()
|
||||
return lexSeparator
|
||||
}
|
||||
fallthrough
|
||||
|
||||
default:
|
||||
l.unread()
|
||||
return lexText
|
||||
}
|
||||
|
||||
l.unread()
|
||||
return lexText
|
||||
}
|
||||
|
||||
if l.pos > l.start {
|
||||
|
@ -339,7 +338,10 @@ scan:
|
|||
escaped = false
|
||||
}
|
||||
|
||||
l.emit(item_text, string(data))
|
||||
if len(data) > 0 {
|
||||
l.emit(item_text, string(data))
|
||||
}
|
||||
|
||||
return lexRaw
|
||||
}
|
||||
|
||||
|
|
|
@ -16,14 +16,24 @@ func TestLexGood(t *testing.T) {
|
|||
item{item_eof, ""},
|
||||
},
|
||||
},
|
||||
//{
|
||||
// TODO(gobwas): this is freezing on globtest/globdraw (`]]` syntax error)
|
||||
//pattern: "/{rate,[0-9]]}*",
|
||||
//items: []item{
|
||||
// item{item_text, "hello"},
|
||||
// item{item_eof, ""},
|
||||
//},
|
||||
//},
|
||||
{
|
||||
pattern: "/{rate,[0-9]]}*",
|
||||
items: []item{
|
||||
item{item_text, "/"},
|
||||
item{item_terms_open, "{"},
|
||||
item{item_text, "rate"},
|
||||
item{item_separator, ","},
|
||||
item{item_range_open, "["},
|
||||
item{item_range_lo, "0"},
|
||||
item{item_range_between, "-"},
|
||||
item{item_range_hi, "9"},
|
||||
item{item_range_close, "]"},
|
||||
item{item_text, "]"},
|
||||
item{item_terms_close, "}"},
|
||||
item{item_any, "*"},
|
||||
item{item_eof, ""},
|
||||
},
|
||||
},
|
||||
{
|
||||
pattern: "hello,world",
|
||||
items: []item{
|
||||
|
|
Loading…
Reference in New Issue