glob/syntax/ast/parser_test.go

218 lines
5.3 KiB
Go
Raw Normal View History

2016-05-30 19:35:53 +03:00
package ast
import (
2016-05-31 11:28:02 +03:00
"github.com/gobwas/glob/syntax/lexer"
2016-05-30 19:35:53 +03:00
"reflect"
"testing"
)
type stubLexer struct {
2016-05-31 11:28:02 +03:00
tokens []lexer.Token
2016-05-30 19:35:53 +03:00
pos int
}
2016-05-31 11:28:02 +03:00
func (s *stubLexer) Next() (ret lexer.Token) {
2016-05-30 19:35:53 +03:00
if s.pos == len(s.tokens) {
2016-05-31 11:28:02 +03:00
return lexer.Token{lexer.EOF, ""}
2016-05-30 19:35:53 +03:00
}
ret = s.tokens[s.pos]
s.pos++
return
}
func TestParseString(t *testing.T) {
for id, test := range []struct {
2016-05-31 11:28:02 +03:00
tokens []lexer.Token
tree *Node
2016-05-30 19:35:53 +03:00
}{
{
//pattern: "abc",
2016-05-31 11:28:02 +03:00
tokens: []lexer.Token{
lexer.Token{lexer.Text, "abc"},
lexer.Token{lexer.EOF, ""},
2016-05-30 19:35:53 +03:00
},
tree: NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "abc"}),
2016-05-30 19:35:53 +03:00
),
},
{
//pattern: "a*c",
2016-05-31 11:28:02 +03:00
tokens: []lexer.Token{
lexer.Token{lexer.Text, "a"},
lexer.Token{lexer.Any, "*"},
lexer.Token{lexer.Text, "c"},
lexer.Token{lexer.EOF, ""},
2016-05-30 19:35:53 +03:00
},
tree: NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "a"}),
2016-05-30 19:35:53 +03:00
NewNode(KindAny, nil),
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "c"}),
2016-05-30 19:35:53 +03:00
),
},
{
//pattern: "a**c",
2016-05-31 11:28:02 +03:00
tokens: []lexer.Token{
lexer.Token{lexer.Text, "a"},
lexer.Token{lexer.Super, "**"},
lexer.Token{lexer.Text, "c"},
lexer.Token{lexer.EOF, ""},
2016-05-30 19:35:53 +03:00
},
tree: NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "a"}),
2016-05-30 19:35:53 +03:00
NewNode(KindSuper, nil),
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "c"}),
2016-05-30 19:35:53 +03:00
),
},
{
//pattern: "a?c",
2016-05-31 11:28:02 +03:00
tokens: []lexer.Token{
lexer.Token{lexer.Text, "a"},
lexer.Token{lexer.Single, "?"},
lexer.Token{lexer.Text, "c"},
lexer.Token{lexer.EOF, ""},
2016-05-30 19:35:53 +03:00
},
tree: NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "a"}),
2016-05-30 19:35:53 +03:00
NewNode(KindSingle, nil),
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "c"}),
2016-05-30 19:35:53 +03:00
),
},
{
//pattern: "[!a-z]",
2016-05-31 11:28:02 +03:00
tokens: []lexer.Token{
lexer.Token{lexer.RangeOpen, "["},
lexer.Token{lexer.Not, "!"},
lexer.Token{lexer.RangeLo, "a"},
lexer.Token{lexer.RangeBetween, "-"},
lexer.Token{lexer.RangeHi, "z"},
lexer.Token{lexer.RangeClose, "]"},
lexer.Token{lexer.EOF, ""},
2016-05-30 19:35:53 +03:00
},
tree: NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindRange, Range{Lo: 'a', Hi: 'z', Not: true}),
2016-05-30 19:35:53 +03:00
),
},
{
//pattern: "[az]",
2016-05-31 11:28:02 +03:00
tokens: []lexer.Token{
lexer.Token{lexer.RangeOpen, "["},
lexer.Token{lexer.Text, "az"},
lexer.Token{lexer.RangeClose, "]"},
lexer.Token{lexer.EOF, ""},
2016-05-30 19:35:53 +03:00
},
tree: NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindList, List{Chars: "az"}),
2016-05-30 19:35:53 +03:00
),
},
{
//pattern: "{a,z}",
2016-05-31 11:28:02 +03:00
tokens: []lexer.Token{
lexer.Token{lexer.TermsOpen, "{"},
lexer.Token{lexer.Text, "a"},
lexer.Token{lexer.Separator, ","},
lexer.Token{lexer.Text, "z"},
lexer.Token{lexer.TermsClose, "}"},
lexer.Token{lexer.EOF, ""},
2016-05-30 19:35:53 +03:00
},
tree: NewNode(KindPattern, nil,
NewNode(KindAnyOf, nil,
NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "a"}),
2016-05-30 19:35:53 +03:00
),
NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "z"}),
2016-05-30 19:35:53 +03:00
),
),
),
},
{
//pattern: "/{z,ab}*",
2016-05-31 11:28:02 +03:00
tokens: []lexer.Token{
lexer.Token{lexer.Text, "/"},
lexer.Token{lexer.TermsOpen, "{"},
lexer.Token{lexer.Text, "z"},
lexer.Token{lexer.Separator, ","},
lexer.Token{lexer.Text, "ab"},
lexer.Token{lexer.TermsClose, "}"},
lexer.Token{lexer.Any, "*"},
lexer.Token{lexer.EOF, ""},
2016-05-30 19:35:53 +03:00
},
tree: NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "/"}),
2016-05-30 19:35:53 +03:00
NewNode(KindAnyOf, nil,
NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "z"}),
2016-05-30 19:35:53 +03:00
),
NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "ab"}),
2016-05-30 19:35:53 +03:00
),
),
NewNode(KindAny, nil),
),
},
{
//pattern: "{a,{x,y},?,[a-z],[!qwe]}",
2016-05-31 11:28:02 +03:00
tokens: []lexer.Token{
lexer.Token{lexer.TermsOpen, "{"},
lexer.Token{lexer.Text, "a"},
lexer.Token{lexer.Separator, ","},
lexer.Token{lexer.TermsOpen, "{"},
lexer.Token{lexer.Text, "x"},
lexer.Token{lexer.Separator, ","},
lexer.Token{lexer.Text, "y"},
lexer.Token{lexer.TermsClose, "}"},
lexer.Token{lexer.Separator, ","},
lexer.Token{lexer.Single, "?"},
lexer.Token{lexer.Separator, ","},
lexer.Token{lexer.RangeOpen, "["},
lexer.Token{lexer.RangeLo, "a"},
lexer.Token{lexer.RangeBetween, "-"},
lexer.Token{lexer.RangeHi, "z"},
lexer.Token{lexer.RangeClose, "]"},
lexer.Token{lexer.Separator, ","},
lexer.Token{lexer.RangeOpen, "["},
lexer.Token{lexer.Not, "!"},
lexer.Token{lexer.Text, "qwe"},
lexer.Token{lexer.RangeClose, "]"},
lexer.Token{lexer.TermsClose, "}"},
lexer.Token{lexer.EOF, ""},
2016-05-30 19:35:53 +03:00
},
tree: NewNode(KindPattern, nil,
NewNode(KindAnyOf, nil,
NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "a"}),
2016-05-30 19:35:53 +03:00
),
NewNode(KindPattern, nil,
NewNode(KindAnyOf, nil,
NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "x"}),
2016-05-30 19:35:53 +03:00
),
NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "y"}),
2016-05-30 19:35:53 +03:00
),
),
),
NewNode(KindPattern, nil,
NewNode(KindSingle, nil),
),
NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindRange, Range{Lo: 'a', Hi: 'z', Not: false}),
2016-05-30 19:35:53 +03:00
),
NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindList, List{Chars: "qwe", Not: true}),
2016-05-30 19:35:53 +03:00
),
),
),
},
} {
lexer := &stubLexer{tokens: test.tokens}
result, err := Parse(lexer)
if err != nil {
t.Errorf("[%d] unexpected error: %s", id, err)
}
if !reflect.DeepEqual(test.tree, result) {
t.Errorf("[%d] Parse():\nact:\t%s\nexp:\t%s\n", id, result, test.tree)
}
}
}