Merge pull request #17 from shawnps/master

gofmt -s
This commit is contained in:
Sergey Kamardin 2017-02-12 23:01:51 +03:00 committed by GitHub
commit 51eb1ee00b
7 changed files with 158 additions and 158 deletions

View File

@ -38,7 +38,7 @@ func (self Contains) Index(s string) (int, []int) {
} }
segments := acquireSegments(len(s) + 1) segments := acquireSegments(len(s) + 1)
for i, _ := range s { for i := range s {
segments = append(segments, offset+i) segments = append(segments, offset+i)
} }

View File

@ -17,15 +17,15 @@ func TestAppendMerge(t *testing.T) {
}{ }{
{ {
[2][]int{ [2][]int{
[]int{0, 6, 7}, {0, 6, 7},
[]int{0, 1, 3}, {0, 1, 3},
}, },
[]int{0, 1, 3, 6, 7}, []int{0, 1, 3, 6, 7},
}, },
{ {
[2][]int{ [2][]int{
[]int{0, 1, 3, 6, 7}, {0, 1, 3, 6, 7},
[]int{0, 1, 10}, {0, 1, 10},
}, },
[]int{0, 1, 3, 6, 7, 10}, []int{0, 1, 3, 6, 7, 10},
}, },

View File

@ -15,7 +15,7 @@ func NewMax(l int) Max {
func (self Max) Match(s string) bool { func (self Max) Match(s string) bool {
var l int var l int
for _ = range s { for range s {
l += 1 l += 1
if l > self.Limit { if l > self.Limit {
return false return false

View File

@ -15,7 +15,7 @@ func NewMin(l int) Min {
func (self Min) Match(s string) bool { func (self Min) Match(s string) bool {
var l int var l int
for _ = range s { for range s {
l += 1 l += 1
if l >= self.Limit { if l >= self.Limit {
return true return true

View File

@ -43,7 +43,7 @@ func (self Row) matchAll(s string) bool {
func (self Row) lenOk(s string) bool { func (self Row) lenOk(s string) bool {
var i int var i int
for _ = range s { for range s {
i++ i++
if i > self.RunesLength { if i > self.RunesLength {
return false return false

View File

@ -28,8 +28,8 @@ func TestParseString(t *testing.T) {
{ {
//pattern: "abc", //pattern: "abc",
tokens: []lexer.Token{ tokens: []lexer.Token{
lexer.Token{lexer.Text, "abc"}, {lexer.Text, "abc"},
lexer.Token{lexer.EOF, ""}, {lexer.EOF, ""},
}, },
tree: NewNode(KindPattern, nil, tree: NewNode(KindPattern, nil,
NewNode(KindText, Text{Text: "abc"}), NewNode(KindText, Text{Text: "abc"}),
@ -38,10 +38,10 @@ func TestParseString(t *testing.T) {
{ {
//pattern: "a*c", //pattern: "a*c",
tokens: []lexer.Token{ tokens: []lexer.Token{
lexer.Token{lexer.Text, "a"}, {lexer.Text, "a"},
lexer.Token{lexer.Any, "*"}, {lexer.Any, "*"},
lexer.Token{lexer.Text, "c"}, {lexer.Text, "c"},
lexer.Token{lexer.EOF, ""}, {lexer.EOF, ""},
}, },
tree: NewNode(KindPattern, nil, tree: NewNode(KindPattern, nil,
NewNode(KindText, Text{Text: "a"}), NewNode(KindText, Text{Text: "a"}),
@ -52,10 +52,10 @@ func TestParseString(t *testing.T) {
{ {
//pattern: "a**c", //pattern: "a**c",
tokens: []lexer.Token{ tokens: []lexer.Token{
lexer.Token{lexer.Text, "a"}, {lexer.Text, "a"},
lexer.Token{lexer.Super, "**"}, {lexer.Super, "**"},
lexer.Token{lexer.Text, "c"}, {lexer.Text, "c"},
lexer.Token{lexer.EOF, ""}, {lexer.EOF, ""},
}, },
tree: NewNode(KindPattern, nil, tree: NewNode(KindPattern, nil,
NewNode(KindText, Text{Text: "a"}), NewNode(KindText, Text{Text: "a"}),
@ -66,10 +66,10 @@ func TestParseString(t *testing.T) {
{ {
//pattern: "a?c", //pattern: "a?c",
tokens: []lexer.Token{ tokens: []lexer.Token{
lexer.Token{lexer.Text, "a"}, {lexer.Text, "a"},
lexer.Token{lexer.Single, "?"}, {lexer.Single, "?"},
lexer.Token{lexer.Text, "c"}, {lexer.Text, "c"},
lexer.Token{lexer.EOF, ""}, {lexer.EOF, ""},
}, },
tree: NewNode(KindPattern, nil, tree: NewNode(KindPattern, nil,
NewNode(KindText, Text{Text: "a"}), NewNode(KindText, Text{Text: "a"}),
@ -80,13 +80,13 @@ func TestParseString(t *testing.T) {
{ {
//pattern: "[!a-z]", //pattern: "[!a-z]",
tokens: []lexer.Token{ tokens: []lexer.Token{
lexer.Token{lexer.RangeOpen, "["}, {lexer.RangeOpen, "["},
lexer.Token{lexer.Not, "!"}, {lexer.Not, "!"},
lexer.Token{lexer.RangeLo, "a"}, {lexer.RangeLo, "a"},
lexer.Token{lexer.RangeBetween, "-"}, {lexer.RangeBetween, "-"},
lexer.Token{lexer.RangeHi, "z"}, {lexer.RangeHi, "z"},
lexer.Token{lexer.RangeClose, "]"}, {lexer.RangeClose, "]"},
lexer.Token{lexer.EOF, ""}, {lexer.EOF, ""},
}, },
tree: NewNode(KindPattern, nil, tree: NewNode(KindPattern, nil,
NewNode(KindRange, Range{Lo: 'a', Hi: 'z', Not: true}), NewNode(KindRange, Range{Lo: 'a', Hi: 'z', Not: true}),
@ -95,10 +95,10 @@ func TestParseString(t *testing.T) {
{ {
//pattern: "[az]", //pattern: "[az]",
tokens: []lexer.Token{ tokens: []lexer.Token{
lexer.Token{lexer.RangeOpen, "["}, {lexer.RangeOpen, "["},
lexer.Token{lexer.Text, "az"}, {lexer.Text, "az"},
lexer.Token{lexer.RangeClose, "]"}, {lexer.RangeClose, "]"},
lexer.Token{lexer.EOF, ""}, {lexer.EOF, ""},
}, },
tree: NewNode(KindPattern, nil, tree: NewNode(KindPattern, nil,
NewNode(KindList, List{Chars: "az"}), NewNode(KindList, List{Chars: "az"}),
@ -107,12 +107,12 @@ func TestParseString(t *testing.T) {
{ {
//pattern: "{a,z}", //pattern: "{a,z}",
tokens: []lexer.Token{ tokens: []lexer.Token{
lexer.Token{lexer.TermsOpen, "{"}, {lexer.TermsOpen, "{"},
lexer.Token{lexer.Text, "a"}, {lexer.Text, "a"},
lexer.Token{lexer.Separator, ","}, {lexer.Separator, ","},
lexer.Token{lexer.Text, "z"}, {lexer.Text, "z"},
lexer.Token{lexer.TermsClose, "}"}, {lexer.TermsClose, "}"},
lexer.Token{lexer.EOF, ""}, {lexer.EOF, ""},
}, },
tree: NewNode(KindPattern, nil, tree: NewNode(KindPattern, nil,
NewNode(KindAnyOf, nil, NewNode(KindAnyOf, nil,
@ -128,14 +128,14 @@ func TestParseString(t *testing.T) {
{ {
//pattern: "/{z,ab}*", //pattern: "/{z,ab}*",
tokens: []lexer.Token{ tokens: []lexer.Token{
lexer.Token{lexer.Text, "/"}, {lexer.Text, "/"},
lexer.Token{lexer.TermsOpen, "{"}, {lexer.TermsOpen, "{"},
lexer.Token{lexer.Text, "z"}, {lexer.Text, "z"},
lexer.Token{lexer.Separator, ","}, {lexer.Separator, ","},
lexer.Token{lexer.Text, "ab"}, {lexer.Text, "ab"},
lexer.Token{lexer.TermsClose, "}"}, {lexer.TermsClose, "}"},
lexer.Token{lexer.Any, "*"}, {lexer.Any, "*"},
lexer.Token{lexer.EOF, ""}, {lexer.EOF, ""},
}, },
tree: NewNode(KindPattern, nil, tree: NewNode(KindPattern, nil,
NewNode(KindText, Text{Text: "/"}), NewNode(KindText, Text{Text: "/"}),
@ -153,29 +153,29 @@ func TestParseString(t *testing.T) {
{ {
//pattern: "{a,{x,y},?,[a-z],[!qwe]}", //pattern: "{a,{x,y},?,[a-z],[!qwe]}",
tokens: []lexer.Token{ tokens: []lexer.Token{
lexer.Token{lexer.TermsOpen, "{"}, {lexer.TermsOpen, "{"},
lexer.Token{lexer.Text, "a"}, {lexer.Text, "a"},
lexer.Token{lexer.Separator, ","}, {lexer.Separator, ","},
lexer.Token{lexer.TermsOpen, "{"}, {lexer.TermsOpen, "{"},
lexer.Token{lexer.Text, "x"}, {lexer.Text, "x"},
lexer.Token{lexer.Separator, ","}, {lexer.Separator, ","},
lexer.Token{lexer.Text, "y"}, {lexer.Text, "y"},
lexer.Token{lexer.TermsClose, "}"}, {lexer.TermsClose, "}"},
lexer.Token{lexer.Separator, ","}, {lexer.Separator, ","},
lexer.Token{lexer.Single, "?"}, {lexer.Single, "?"},
lexer.Token{lexer.Separator, ","}, {lexer.Separator, ","},
lexer.Token{lexer.RangeOpen, "["}, {lexer.RangeOpen, "["},
lexer.Token{lexer.RangeLo, "a"}, {lexer.RangeLo, "a"},
lexer.Token{lexer.RangeBetween, "-"}, {lexer.RangeBetween, "-"},
lexer.Token{lexer.RangeHi, "z"}, {lexer.RangeHi, "z"},
lexer.Token{lexer.RangeClose, "]"}, {lexer.RangeClose, "]"},
lexer.Token{lexer.Separator, ","}, {lexer.Separator, ","},
lexer.Token{lexer.RangeOpen, "["}, {lexer.RangeOpen, "["},
lexer.Token{lexer.Not, "!"}, {lexer.Not, "!"},
lexer.Token{lexer.Text, "qwe"}, {lexer.Text, "qwe"},
lexer.Token{lexer.RangeClose, "]"}, {lexer.RangeClose, "]"},
lexer.Token{lexer.TermsClose, "}"}, {lexer.TermsClose, "}"},
lexer.Token{lexer.EOF, ""}, {lexer.EOF, ""},
}, },
tree: NewNode(KindPattern, nil, tree: NewNode(KindPattern, nil,
NewNode(KindAnyOf, nil, NewNode(KindAnyOf, nil,

View File

@ -12,169 +12,169 @@ func TestLexGood(t *testing.T) {
{ {
pattern: "", pattern: "",
items: []Token{ items: []Token{
Token{EOF, ""}, {EOF, ""},
}, },
}, },
{ {
pattern: "hello", pattern: "hello",
items: []Token{ items: []Token{
Token{Text, "hello"}, {Text, "hello"},
Token{EOF, ""}, {EOF, ""},
}, },
}, },
{ {
pattern: "/{rate,[0-9]]}*", pattern: "/{rate,[0-9]]}*",
items: []Token{ items: []Token{
Token{Text, "/"}, {Text, "/"},
Token{TermsOpen, "{"}, {TermsOpen, "{"},
Token{Text, "rate"}, {Text, "rate"},
Token{Separator, ","}, {Separator, ","},
Token{RangeOpen, "["}, {RangeOpen, "["},
Token{RangeLo, "0"}, {RangeLo, "0"},
Token{RangeBetween, "-"}, {RangeBetween, "-"},
Token{RangeHi, "9"}, {RangeHi, "9"},
Token{RangeClose, "]"}, {RangeClose, "]"},
Token{Text, "]"}, {Text, "]"},
Token{TermsClose, "}"}, {TermsClose, "}"},
Token{Any, "*"}, {Any, "*"},
Token{EOF, ""}, {EOF, ""},
}, },
}, },
{ {
pattern: "hello,world", pattern: "hello,world",
items: []Token{ items: []Token{
Token{Text, "hello,world"}, {Text, "hello,world"},
Token{EOF, ""}, {EOF, ""},
}, },
}, },
{ {
pattern: "hello\\,world", pattern: "hello\\,world",
items: []Token{ items: []Token{
Token{Text, "hello,world"}, {Text, "hello,world"},
Token{EOF, ""}, {EOF, ""},
}, },
}, },
{ {
pattern: "hello\\{world", pattern: "hello\\{world",
items: []Token{ items: []Token{
Token{Text, "hello{world"}, {Text, "hello{world"},
Token{EOF, ""}, {EOF, ""},
}, },
}, },
{ {
pattern: "hello?", pattern: "hello?",
items: []Token{ items: []Token{
Token{Text, "hello"}, {Text, "hello"},
Token{Single, "?"}, {Single, "?"},
Token{EOF, ""}, {EOF, ""},
}, },
}, },
{ {
pattern: "hellof*", pattern: "hellof*",
items: []Token{ items: []Token{
Token{Text, "hellof"}, {Text, "hellof"},
Token{Any, "*"}, {Any, "*"},
Token{EOF, ""}, {EOF, ""},
}, },
}, },
{ {
pattern: "hello**", pattern: "hello**",
items: []Token{ items: []Token{
Token{Text, "hello"}, {Text, "hello"},
Token{Super, "**"}, {Super, "**"},
Token{EOF, ""}, {EOF, ""},
}, },
}, },
{ {
pattern: "[日-語]", pattern: "[日-語]",
items: []Token{ items: []Token{
Token{RangeOpen, "["}, {RangeOpen, "["},
Token{RangeLo, "日"}, {RangeLo, "日"},
Token{RangeBetween, "-"}, {RangeBetween, "-"},
Token{RangeHi, "語"}, {RangeHi, "語"},
Token{RangeClose, "]"}, {RangeClose, "]"},
Token{EOF, ""}, {EOF, ""},
}, },
}, },
{ {
pattern: "[!日-語]", pattern: "[!日-語]",
items: []Token{ items: []Token{
Token{RangeOpen, "["}, {RangeOpen, "["},
Token{Not, "!"}, {Not, "!"},
Token{RangeLo, "日"}, {RangeLo, "日"},
Token{RangeBetween, "-"}, {RangeBetween, "-"},
Token{RangeHi, "語"}, {RangeHi, "語"},
Token{RangeClose, "]"}, {RangeClose, "]"},
Token{EOF, ""}, {EOF, ""},
}, },
}, },
{ {
pattern: "[日本語]", pattern: "[日本語]",
items: []Token{ items: []Token{
Token{RangeOpen, "["}, {RangeOpen, "["},
Token{Text, "日本語"}, {Text, "日本語"},
Token{RangeClose, "]"}, {RangeClose, "]"},
Token{EOF, ""}, {EOF, ""},
}, },
}, },
{ {
pattern: "[!日本語]", pattern: "[!日本語]",
items: []Token{ items: []Token{
Token{RangeOpen, "["}, {RangeOpen, "["},
Token{Not, "!"}, {Not, "!"},
Token{Text, "日本語"}, {Text, "日本語"},
Token{RangeClose, "]"}, {RangeClose, "]"},
Token{EOF, ""}, {EOF, ""},
}, },
}, },
{ {
pattern: "{a,b}", pattern: "{a,b}",
items: []Token{ items: []Token{
Token{TermsOpen, "{"}, {TermsOpen, "{"},
Token{Text, "a"}, {Text, "a"},
Token{Separator, ","}, {Separator, ","},
Token{Text, "b"}, {Text, "b"},
Token{TermsClose, "}"}, {TermsClose, "}"},
Token{EOF, ""}, {EOF, ""},
}, },
}, },
{ {
pattern: "/{z,ab}*", pattern: "/{z,ab}*",
items: []Token{ items: []Token{
Token{Text, "/"}, {Text, "/"},
Token{TermsOpen, "{"}, {TermsOpen, "{"},
Token{Text, "z"}, {Text, "z"},
Token{Separator, ","}, {Separator, ","},
Token{Text, "ab"}, {Text, "ab"},
Token{TermsClose, "}"}, {TermsClose, "}"},
Token{Any, "*"}, {Any, "*"},
Token{EOF, ""}, {EOF, ""},
}, },
}, },
{ {
pattern: "{[!日-語],*,?,{a,b,\\c}}", pattern: "{[!日-語],*,?,{a,b,\\c}}",
items: []Token{ items: []Token{
Token{TermsOpen, "{"}, {TermsOpen, "{"},
Token{RangeOpen, "["}, {RangeOpen, "["},
Token{Not, "!"}, {Not, "!"},
Token{RangeLo, "日"}, {RangeLo, "日"},
Token{RangeBetween, "-"}, {RangeBetween, "-"},
Token{RangeHi, "語"}, {RangeHi, "語"},
Token{RangeClose, "]"}, {RangeClose, "]"},
Token{Separator, ","}, {Separator, ","},
Token{Any, "*"}, {Any, "*"},
Token{Separator, ","}, {Separator, ","},
Token{Single, "?"}, {Single, "?"},
Token{Separator, ","}, {Separator, ","},
Token{TermsOpen, "{"}, {TermsOpen, "{"},
Token{Text, "a"}, {Text, "a"},
Token{Separator, ","}, {Separator, ","},
Token{Text, "b"}, {Text, "b"},
Token{Separator, ","}, {Separator, ","},
Token{Text, "c"}, {Text, "c"},
Token{TermsClose, "}"}, {TermsClose, "}"},
Token{TermsClose, "}"}, {TermsClose, "}"},
Token{EOF, ""}, {EOF, ""},
}, },
}, },
} { } {