glob/syntax/ast/parser_test.go

219 lines
4.7 KiB
Go
Raw Permalink Normal View History

2016-05-30 19:35:53 +03:00
package ast
import (
"reflect"
"testing"
2018-02-09 00:18:42 +03:00
2022-12-12 17:24:35 +03:00
"git.internal/re/glob/syntax/lexer"
2016-05-30 19:35:53 +03:00
)
type stubLexer struct {
2016-05-31 11:28:02 +03:00
tokens []lexer.Token
2016-05-30 19:35:53 +03:00
pos int
}
2016-05-31 11:28:02 +03:00
func (s *stubLexer) Next() (ret lexer.Token) {
2016-05-30 19:35:53 +03:00
if s.pos == len(s.tokens) {
2016-05-31 11:28:02 +03:00
return lexer.Token{lexer.EOF, ""}
2016-05-30 19:35:53 +03:00
}
ret = s.tokens[s.pos]
s.pos++
return
}
func TestParseString(t *testing.T) {
for id, test := range []struct {
2016-05-31 11:28:02 +03:00
tokens []lexer.Token
tree *Node
2016-05-30 19:35:53 +03:00
}{
{
2022-12-12 17:24:35 +03:00
// pattern: "abc",
2016-05-31 11:28:02 +03:00
tokens: []lexer.Token{
2016-08-15 07:02:39 +03:00
{lexer.Text, "abc"},
{lexer.EOF, ""},
2016-05-30 19:35:53 +03:00
},
tree: NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "abc"}),
2016-05-30 19:35:53 +03:00
),
},
{
2022-12-12 17:24:35 +03:00
// pattern: "a*c",
2016-05-31 11:28:02 +03:00
tokens: []lexer.Token{
2016-08-15 07:02:39 +03:00
{lexer.Text, "a"},
{lexer.Any, "*"},
{lexer.Text, "c"},
{lexer.EOF, ""},
2016-05-30 19:35:53 +03:00
},
tree: NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "a"}),
2016-05-30 19:35:53 +03:00
NewNode(KindAny, nil),
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "c"}),
2016-05-30 19:35:53 +03:00
),
},
{
2022-12-12 17:24:35 +03:00
// pattern: "a**c",
2016-05-31 11:28:02 +03:00
tokens: []lexer.Token{
2016-08-15 07:02:39 +03:00
{lexer.Text, "a"},
{lexer.Super, "**"},
{lexer.Text, "c"},
{lexer.EOF, ""},
2016-05-30 19:35:53 +03:00
},
tree: NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "a"}),
2016-05-30 19:35:53 +03:00
NewNode(KindSuper, nil),
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "c"}),
2016-05-30 19:35:53 +03:00
),
},
{
2022-12-12 17:24:35 +03:00
// pattern: "a?c",
2016-05-31 11:28:02 +03:00
tokens: []lexer.Token{
2016-08-15 07:02:39 +03:00
{lexer.Text, "a"},
{lexer.Single, "?"},
{lexer.Text, "c"},
{lexer.EOF, ""},
2016-05-30 19:35:53 +03:00
},
tree: NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "a"}),
2016-05-30 19:35:53 +03:00
NewNode(KindSingle, nil),
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "c"}),
2016-05-30 19:35:53 +03:00
),
},
{
2022-12-12 17:24:35 +03:00
// pattern: "[!a-z]",
2016-05-31 11:28:02 +03:00
tokens: []lexer.Token{
2016-08-15 07:02:39 +03:00
{lexer.RangeOpen, "["},
{lexer.Not, "!"},
{lexer.RangeLo, "a"},
{lexer.RangeBetween, "-"},
{lexer.RangeHi, "z"},
{lexer.RangeClose, "]"},
{lexer.EOF, ""},
2016-05-30 19:35:53 +03:00
},
tree: NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindRange, Range{Lo: 'a', Hi: 'z', Not: true}),
2016-05-30 19:35:53 +03:00
),
},
{
2022-12-12 17:24:35 +03:00
// pattern: "[az]",
2016-05-31 11:28:02 +03:00
tokens: []lexer.Token{
2016-08-15 07:02:39 +03:00
{lexer.RangeOpen, "["},
{lexer.Text, "az"},
{lexer.RangeClose, "]"},
{lexer.EOF, ""},
2016-05-30 19:35:53 +03:00
},
tree: NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindList, List{Chars: "az"}),
2016-05-30 19:35:53 +03:00
),
},
{
2022-12-12 17:24:35 +03:00
// pattern: "{a,z}",
2016-05-31 11:28:02 +03:00
tokens: []lexer.Token{
2016-08-15 07:02:39 +03:00
{lexer.TermsOpen, "{"},
{lexer.Text, "a"},
{lexer.Separator, ","},
{lexer.Text, "z"},
{lexer.TermsClose, "}"},
{lexer.EOF, ""},
2016-05-30 19:35:53 +03:00
},
tree: NewNode(KindPattern, nil,
NewNode(KindAnyOf, nil,
NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "a"}),
2016-05-30 19:35:53 +03:00
),
NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "z"}),
2016-05-30 19:35:53 +03:00
),
),
),
},
{
2022-12-12 17:24:35 +03:00
// pattern: "/{z,ab}*",
2016-05-31 11:28:02 +03:00
tokens: []lexer.Token{
2016-08-15 07:02:39 +03:00
{lexer.Text, "/"},
{lexer.TermsOpen, "{"},
{lexer.Text, "z"},
{lexer.Separator, ","},
{lexer.Text, "ab"},
{lexer.TermsClose, "}"},
{lexer.Any, "*"},
{lexer.EOF, ""},
2016-05-30 19:35:53 +03:00
},
tree: NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "/"}),
2016-05-30 19:35:53 +03:00
NewNode(KindAnyOf, nil,
NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "z"}),
2016-05-30 19:35:53 +03:00
),
NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "ab"}),
2016-05-30 19:35:53 +03:00
),
),
NewNode(KindAny, nil),
),
},
{
2022-12-12 17:24:35 +03:00
// pattern: "{a,{x,y},?,[a-z],[!qwe]}",
2016-05-31 11:28:02 +03:00
tokens: []lexer.Token{
2016-08-15 07:02:39 +03:00
{lexer.TermsOpen, "{"},
{lexer.Text, "a"},
{lexer.Separator, ","},
{lexer.TermsOpen, "{"},
{lexer.Text, "x"},
{lexer.Separator, ","},
{lexer.Text, "y"},
{lexer.TermsClose, "}"},
{lexer.Separator, ","},
{lexer.Single, "?"},
{lexer.Separator, ","},
{lexer.RangeOpen, "["},
{lexer.RangeLo, "a"},
{lexer.RangeBetween, "-"},
{lexer.RangeHi, "z"},
{lexer.RangeClose, "]"},
{lexer.Separator, ","},
{lexer.RangeOpen, "["},
{lexer.Not, "!"},
{lexer.Text, "qwe"},
{lexer.RangeClose, "]"},
{lexer.TermsClose, "}"},
{lexer.EOF, ""},
2016-05-30 19:35:53 +03:00
},
tree: NewNode(KindPattern, nil,
NewNode(KindAnyOf, nil,
NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "a"}),
2016-05-30 19:35:53 +03:00
),
NewNode(KindPattern, nil,
NewNode(KindAnyOf, nil,
NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "x"}),
2016-05-30 19:35:53 +03:00
),
NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindText, Text{Text: "y"}),
2016-05-30 19:35:53 +03:00
),
),
),
NewNode(KindPattern, nil,
NewNode(KindSingle, nil),
),
NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindRange, Range{Lo: 'a', Hi: 'z', Not: false}),
2016-05-30 19:35:53 +03:00
),
NewNode(KindPattern, nil,
2016-05-31 11:28:02 +03:00
NewNode(KindList, List{Chars: "qwe", Not: true}),
2016-05-30 19:35:53 +03:00
),
),
),
},
} {
lexer := &stubLexer{tokens: test.tokens}
result, err := Parse(lexer)
if err != nil {
t.Errorf("[%d] unexpected error: %s", id, err)
}
if !reflect.DeepEqual(test.tree, result) {
t.Errorf("[%d] Parse():\nact:\t%s\nexp:\t%s\n", id, result, test.tree)
}
}
}